Repository: pentaho/big-data-plugin Branch: master Commit: abd061467a83 Files: 805 Total size: 5.8 MB Directory structure: gitextract_fsrec6j5/ ├── .gitattributes ├── .github/ │ └── CODEOWNERS ├── .gitignore ├── LICENSE.txt ├── README.markdown ├── api/ │ ├── pom.xml │ └── runtimeTest/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── runtime/ │ │ │ └── test/ │ │ │ ├── RuntimeTest.java │ │ │ ├── RuntimeTestProgressCallback.java │ │ │ ├── RuntimeTestStatus.java │ │ │ ├── RuntimeTester.java │ │ │ ├── action/ │ │ │ │ ├── RuntimeTestAction.java │ │ │ │ ├── RuntimeTestActionHandler.java │ │ │ │ ├── RuntimeTestActionPayload.java │ │ │ │ ├── RuntimeTestActionService.java │ │ │ │ └── impl/ │ │ │ │ ├── HelpUrlPayload.java │ │ │ │ ├── LoggingRuntimeTestActionHandlerImpl.java │ │ │ │ ├── RuntimeTestActionImpl.java │ │ │ │ └── RuntimeTestActionServiceImpl.java │ │ │ ├── i18n/ │ │ │ │ ├── MessageGetter.java │ │ │ │ ├── MessageGetterFactory.java │ │ │ │ └── impl/ │ │ │ │ ├── BaseMessagesMessageGetterFactoryImpl.java │ │ │ │ └── BaseMessagesMessageGetterImpl.java │ │ │ ├── impl/ │ │ │ │ ├── RuntimeTestComparator.java │ │ │ │ ├── RuntimeTestRunner.java │ │ │ │ ├── RuntimeTestStatusImpl.java │ │ │ │ └── RuntimeTesterImpl.java │ │ │ ├── module/ │ │ │ │ ├── RuntimeTestModuleResults.java │ │ │ │ └── impl/ │ │ │ │ └── RuntimeTestModuleResultsImpl.java │ │ │ ├── network/ │ │ │ │ ├── ConnectivityTest.java │ │ │ │ ├── ConnectivityTestFactory.java │ │ │ │ └── impl/ │ │ │ │ ├── ConnectivityTestFactoryImpl.java │ │ │ │ ├── ConnectivityTestImpl.java │ │ │ │ └── GatewayConnectivityTestImpl.java │ │ │ ├── result/ │ │ │ │ ├── RuntimeTestEntrySeverity.java │ │ │ │ ├── RuntimeTestResult.java │ │ │ │ ├── RuntimeTestResultEntry.java │ │ │ │ ├── RuntimeTestResultSummary.java │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── runtime/ │ │ │ │ └── test/ │ │ │ │ └── result/ │ │ │ │ └── impl/ │ │ │ │ └── RuntimeTestResultSummaryImpl.java │ │ │ └── test/ │ │ │ └── impl/ │ │ │ ├── BaseRuntimeTest.java │ │ │ ├── RuntimeTestDelegateWithMoreDependencies.java │ │ │ ├── RuntimeTestResultEntryImpl.java │ │ │ └── RuntimeTestResultImpl.java │ │ └── resources/ │ │ ├── OSGI-INF/ │ │ │ └── blueprint/ │ │ │ └── blueprint.xml │ │ └── org/ │ │ └── pentaho/ │ │ └── runtime/ │ │ └── test/ │ │ ├── action/ │ │ │ └── impl/ │ │ │ └── messages/ │ │ │ └── messages_en_US.properties │ │ ├── impl/ │ │ │ └── messages/ │ │ │ └── messages_en_US.properties │ │ └── network/ │ │ └── impl/ │ │ └── messages/ │ │ └── messages_en_US.properties │ └── test/ │ └── java/ │ └── org/ │ └── pentaho/ │ └── runtime/ │ └── test/ │ ├── RuntimeTestEntryUtil.java │ ├── TestMessageGetter.java │ ├── TestMessageGetterFactory.java │ ├── action/ │ │ └── impl/ │ │ ├── HelpUrlPayloadTest.java │ │ ├── LoggingRuntimeTestActionHandlerImplTest.java │ │ ├── RuntimeTestActionImplTest.java │ │ └── RuntimeTestActionServiceImplTest.java │ ├── i18n/ │ │ └── impl/ │ │ ├── BaseMessagesMessageGetterFactoryImplTest.java │ │ └── BaseMessagesMessageGetterImplTest.java │ ├── impl/ │ │ ├── RuntimeTestComparatorTest.java │ │ ├── RuntimeTestRunnerTest.java │ │ ├── RuntimeTestStatusImplTest.java │ │ └── RuntimeTesterImplTest.java │ ├── module/ │ │ └── impl/ │ │ └── RuntimeTestModuleResultsImplTest.java │ ├── network/ │ │ └── impl/ │ │ ├── ConnectivityTestImplTest.java │ │ └── GatewayConnectivityTestImplTest.java │ ├── result/ │ │ └── RuntimeTestEntrySeverityTest.java │ └── test/ │ └── impl/ │ ├── BaseRuntimeTestTest.java │ ├── RuntimeTestDelegateWithMoreDependenciesTest.java │ ├── RuntimeTestResultEntryImplTest.java │ └── RuntimeTestResultImplTest.java ├── assemblies/ │ ├── pentaho-big-data-plugin/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── assembly/ │ │ ├── descriptors/ │ │ │ └── plugin.xml │ │ └── resources/ │ │ ├── bigdata-logging.properties │ │ ├── classpath.properties │ │ ├── hadoop-configurations/ │ │ │ └── .kettle-ignore │ │ ├── plugin.properties │ │ └── plugins/ │ │ └── .gitignore │ ├── pmr-libraries/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ ├── descriptors/ │ │ │ └── assembly.xml │ │ └── resources/ │ │ ├── classes/ │ │ │ ├── kettle-lifecycle-listeners.xml │ │ │ ├── kettle-password-encoder-plugins.xml │ │ │ ├── kettle-registry-extensions.xml │ │ │ ├── log4j2.xml │ │ │ ├── org/ │ │ │ │ └── apache/ │ │ │ │ └── commons/ │ │ │ │ └── vfs2/ │ │ │ │ └── impl/ │ │ │ │ └── providers.xml │ │ │ └── pmr.properties │ │ └── simple-jndi/ │ │ └── jdbc.properties │ ├── pom.xml │ └── samples/ │ ├── pom.xml │ └── src/ │ └── main/ │ ├── assembly/ │ │ └── descriptors/ │ │ └── samples.xml │ └── resources/ │ ├── .kettle-ignore │ └── jobs/ │ └── hadoop/ │ ├── Hadoop Job Executor 2 adv.kjb │ ├── Hadoop Job Executor adv.kjb │ ├── Hadoop Job Executor simple.kjb │ ├── Pentaho MapReduce - weblogs.kjb │ ├── Pentaho MapReduce - wordcount.kjb │ ├── Pig Script Executor tutorial local.kjb │ ├── Pig Script Executor tutorial.kjb │ ├── emr_job.kjb │ ├── excite-small.log │ ├── excite.log.bz2 │ ├── files/ │ │ ├── 2008.log │ │ ├── 2009.log │ │ ├── 2010.log │ │ └── readme.txt │ ├── pentaho-mapreduce-sample-src/ │ │ ├── README.TXT │ │ └── src/ │ │ └── org/ │ │ └── pentaho/ │ │ └── hadoop/ │ │ └── sample/ │ │ └── wordcount/ │ │ ├── WordCount.java │ │ ├── WordCountMapper.java │ │ └── WordCountReducer.java │ ├── pentaho-mapreduce-sample.jar │ ├── pentaho-mapreduce2-sample-src/ │ │ ├── README.TXT │ │ └── src/ │ │ └── org/ │ │ └── pentaho/ │ │ └── hadoop/ │ │ └── sample/ │ │ └── wordcount/ │ │ └── WordCount2.java │ ├── pentaho-mapreduce2-sample.jar │ ├── script1-hadoop-mod.pig │ ├── script1-local-mod.pig │ ├── tutorial.jar │ ├── weblogs-mapper.ktr │ ├── weblogs-reducer.ktr │ ├── wordcount-mapper.ktr │ ├── wordcount-reducer.ktr │ └── wordcount.jar ├── authentication-mapper/ │ ├── api/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── org/ │ │ └── pentaho/ │ │ └── authentication/ │ │ └── mapper/ │ │ └── api/ │ │ ├── AuthenticationMappingManager.java │ │ ├── AuthenticationMappingService.java │ │ └── MappingException.java │ ├── impl/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── authentication/ │ │ │ └── mapper/ │ │ │ └── impl/ │ │ │ └── AuthenticationMappingManagerImpl.java │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── authentication/ │ │ │ └── mapper/ │ │ │ └── impl/ │ │ │ └── AuthenticationMappingManagerImplTest.java │ │ └── resources/ │ │ ├── invalid_mapping.json │ │ └── mapping.json │ └── pom.xml ├── dev-doc/ │ ├── multishim/ │ │ ├── MultiShimHBase.sd │ │ ├── README.md │ │ └── SingleShimHBase.sd │ ├── shim-bridge-classloading.graphml │ ├── shim-bridging-classloading.md │ └── shimprovements.md ├── impl/ │ ├── cluster/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── it/ │ │ │ └── resources/ │ │ │ └── core-site.xml │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── impl/ │ │ │ │ └── cluster/ │ │ │ │ ├── NamedClusterImpl.java │ │ │ │ └── NamedClusterManager.java │ │ │ └── resources/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── impl/ │ │ │ └── cluster/ │ │ │ └── messages/ │ │ │ └── messages_en_US.properties │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── impl/ │ │ │ └── cluster/ │ │ │ ├── NamedClusterImplTest.java │ │ │ ├── NamedClusterManagerTest.java │ │ │ └── NamedClusterMetastoreIT.java │ │ └── resources/ │ │ ├── core-site.xml │ │ └── plugin.properties │ ├── clusterTests/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── impl/ │ │ │ │ └── cluster/ │ │ │ │ └── tests/ │ │ │ │ ├── ClusterRuntimeTestEntry.java │ │ │ │ ├── Constants.java │ │ │ │ ├── hdfs/ │ │ │ │ │ ├── GatewayListHomeDirectoryTest.java │ │ │ │ │ ├── GatewayListRootDirectoryTest.java │ │ │ │ │ ├── GatewayPingFileSystemEntryPoint.java │ │ │ │ │ ├── GatewayWriteToAndDeleteFromUsersHomeFolderTest.java │ │ │ │ │ ├── ListDirectoryTest.java │ │ │ │ │ ├── ListHomeDirectoryTest.java │ │ │ │ │ ├── ListRootDirectoryTest.java │ │ │ │ │ ├── PingFileSystemEntryPointTest.java │ │ │ │ │ └── WriteToAndDeleteFromUsersHomeFolderTest.java │ │ │ │ ├── kafka/ │ │ │ │ │ └── KafkaConnectTest.java │ │ │ │ ├── mr/ │ │ │ │ │ ├── GatewayPingJobTrackerTest.java │ │ │ │ │ └── PingJobTrackerTest.java │ │ │ │ ├── oozie/ │ │ │ │ │ ├── GatewayPingOozieHostTest.java │ │ │ │ │ └── PingOozieHostTest.java │ │ │ │ └── zookeeper/ │ │ │ │ ├── GatewayPingZookeeperEnsembleTest.java │ │ │ │ └── PingZookeeperEnsembleTest.java │ │ │ └── resources/ │ │ │ ├── OSGI-INF/ │ │ │ │ └── blueprint/ │ │ │ │ └── blueprint.xml │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── impl/ │ │ │ └── cluster/ │ │ │ └── tests/ │ │ │ ├── hdfs/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ ├── kafka/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ ├── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ ├── mr/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ ├── oozie/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ └── zookeeper/ │ │ │ └── messages/ │ │ │ └── messages_en_US.properties │ │ └── test/ │ │ └── java/ │ │ └── org/ │ │ └── pentaho/ │ │ └── big/ │ │ └── data/ │ │ └── impl/ │ │ └── cluster/ │ │ └── tests/ │ │ ├── hdfs/ │ │ │ ├── ListDirectoryTestTest.java │ │ │ ├── ListHomeDirectoryTestTest.java │ │ │ ├── ListRootDirectoryTestTest.java │ │ │ ├── PingFileSystemEntryPointTestTest.java │ │ │ └── WriteToAndDeleteFromUsersHomeFolderTestTest.java │ │ ├── kafka/ │ │ │ └── KafkaConnectTestTest.java │ │ ├── mr/ │ │ │ └── PingJobTrackerTestTest.java │ │ ├── oozie/ │ │ │ └── PingOozieHostTestTest.java │ │ └── zookeeper/ │ │ └── PingZookeeperEnsembleTestTest.java │ ├── pom.xml │ ├── shim/ │ │ ├── jaas/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── pentaho/ │ │ │ │ │ └── big/ │ │ │ │ │ └── data/ │ │ │ │ │ └── impl/ │ │ │ │ │ └── shim/ │ │ │ │ │ └── jaas/ │ │ │ │ │ ├── JaasConfigServiceFactory.java │ │ │ │ │ └── JaasConfigServiceImpl.java │ │ │ │ └── resources/ │ │ │ │ ├── OSGI-INF/ │ │ │ │ │ └── blueprint/ │ │ │ │ │ └── blueprint.xml │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── impl/ │ │ │ │ └── shim/ │ │ │ │ └── jaas/ │ │ │ │ └── messages.properties │ │ │ └── test/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── impl/ │ │ │ └── shim/ │ │ │ └── jaas/ │ │ │ ├── JaasConfigServiceFactoryTest.java │ │ │ └── JaasConfigServiceImplTest.java │ │ ├── pig/ │ │ │ └── pdi-testName │ │ ├── pom.xml │ │ └── shimTests/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── impl/ │ │ │ │ └── shim/ │ │ │ │ └── tests/ │ │ │ │ ├── TestShimConfig.java │ │ │ │ └── TestShimLoad.java │ │ │ └── resources/ │ │ │ ├── OSGI-INF/ │ │ │ │ └── blueprint/ │ │ │ │ └── blueprint.xml │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── impl/ │ │ │ └── shim/ │ │ │ └── tests/ │ │ │ └── messages/ │ │ │ └── messages_en_US.properties │ │ └── test/ │ │ └── java/ │ │ └── org/ │ │ └── pentaho/ │ │ └── big/ │ │ └── data/ │ │ └── impl/ │ │ └── shim/ │ │ └── tests/ │ │ └── TestShimLoadTest.java │ └── vfs-hdfs/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── impl/ │ │ │ └── vfs/ │ │ │ └── hdfs/ │ │ │ ├── AzureHdInsightsFileNameParser.java │ │ │ ├── HDFSFileNameParser.java │ │ │ ├── HDFSFileObject.java │ │ │ ├── HDFSFileProvider.java │ │ │ ├── HDFSFileSystem.java │ │ │ ├── MapRFileNameParser.java │ │ │ └── nc/ │ │ │ ├── NamedClusterConfigBuilder.java │ │ │ ├── NamedClusterFileObject.java │ │ │ ├── NamedClusterFileSystem.java │ │ │ └── NamedClusterProvider.java │ │ └── resources/ │ │ └── OSGI-INF/ │ │ └── blueprint/ │ │ └── blueprint.xml │ └── test/ │ └── java/ │ └── org/ │ └── pentaho/ │ └── big/ │ └── data/ │ └── impl/ │ └── vfs/ │ └── hdfs/ │ ├── AzureFileNameParserTest.java │ ├── HDFSFileNameParserTest.java │ ├── HDFSFileObjectTest.java │ ├── HDFSFileProviderTest.java │ ├── HDFSFileSystemTest.java │ ├── MapRFileNameParserTest.java │ └── nc/ │ ├── NamedClusterConfigBuilderTest.java │ └── NamedClusterProviderTest.java ├── kettle-plugins/ │ ├── browse/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── impl/ │ │ │ └── browse/ │ │ │ ├── NamedClusterProvider.java │ │ │ └── model/ │ │ │ ├── NamedClusterDirectory.java │ │ │ ├── NamedClusterFile.java │ │ │ └── NamedClusterTree.java │ │ └── resources/ │ │ └── OSGI-INF/ │ │ └── blueprint/ │ │ └── blueprint.xml │ ├── common/ │ │ ├── job/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── pentaho/ │ │ │ │ │ └── big/ │ │ │ │ │ └── data/ │ │ │ │ │ └── kettle/ │ │ │ │ │ └── plugins/ │ │ │ │ │ └── job/ │ │ │ │ │ ├── AbstractJobEntry.java │ │ │ │ │ ├── AbstractJobEntryController.java │ │ │ │ │ ├── BlockableJobConfig.java │ │ │ │ │ ├── JobEntryMode.java │ │ │ │ │ ├── JobEntrySerializationHelper.java │ │ │ │ │ ├── JobEntryUtils.java │ │ │ │ │ ├── Password.java │ │ │ │ │ └── PropertyEntry.java │ │ │ │ └── resources/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── job/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ └── test/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── job/ │ │ │ ├── AbstractJobEntryTest.java │ │ │ ├── BlockableJobConfigTest.java │ │ │ └── JobEntryUtilsTest.java │ │ ├── pom.xml │ │ └── ui/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── plugins/ │ │ │ │ └── common/ │ │ │ │ └── ui/ │ │ │ │ ├── ClusterTestDialog.java │ │ │ │ ├── ClusterTestResultsDialog.java │ │ │ │ ├── CommonDialogFactory.java │ │ │ │ ├── HadoopClusterDelegateImpl.java │ │ │ │ ├── NamedClusterComposite.java │ │ │ │ ├── NamedClusterDialogImpl.java │ │ │ │ ├── NamedClusterWidgetImpl.java │ │ │ │ ├── StateChangeListener.java │ │ │ │ ├── TestResultComposite.java │ │ │ │ └── VfsFileChooserHelper.java │ │ │ └── resources/ │ │ │ ├── apachesampleconfig.properties │ │ │ ├── apachevanillasampleconfig.properties │ │ │ ├── cdpdc71sampleconfig.properties │ │ │ ├── dataproc1421sampleconfig.properties │ │ │ ├── dataproc23sampleconfig.properties │ │ │ ├── emr521sampleconfig.properties │ │ │ ├── emr770sampleconfig.properties │ │ │ ├── hdi40sampleconfig.properties │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── plugins/ │ │ │ └── common/ │ │ │ └── ui/ │ │ │ └── messages/ │ │ │ └── messages_en_US.properties │ │ └── test/ │ │ └── java/ │ │ └── org/ │ │ └── pentaho/ │ │ └── big/ │ │ └── data/ │ │ └── plugins/ │ │ └── common/ │ │ └── ui/ │ │ ├── HadoopClusterDelegateImplTest.java │ │ └── TestClusterTestDialog.java │ ├── formats/ │ │ ├── assemblies/ │ │ │ ├── plugin/ │ │ │ │ ├── pom.xml │ │ │ │ └── src/ │ │ │ │ ├── assembly/ │ │ │ │ │ └── assembly.xml │ │ │ │ └── main/ │ │ │ │ └── resources/ │ │ │ │ └── version.xml │ │ │ └── pom.xml │ │ ├── core/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── pentaho/ │ │ │ │ │ └── big/ │ │ │ │ │ └── data/ │ │ │ │ │ └── kettle/ │ │ │ │ │ └── plugins/ │ │ │ │ │ └── formats/ │ │ │ │ │ └── impl/ │ │ │ │ │ ├── NamedClusterResolver.java │ │ │ │ │ ├── NullableValuesEnum.java │ │ │ │ │ ├── orc/ │ │ │ │ │ │ ├── BaseOrcStepDialog.java │ │ │ │ │ │ ├── input/ │ │ │ │ │ │ │ ├── OrcInput.java │ │ │ │ │ │ │ ├── OrcInputData.java │ │ │ │ │ │ │ ├── OrcInputDialog.java │ │ │ │ │ │ │ └── OrcInputMeta.java │ │ │ │ │ │ └── output/ │ │ │ │ │ │ ├── OrcOutput.java │ │ │ │ │ │ ├── OrcOutputData.java │ │ │ │ │ │ ├── OrcOutputDialog.java │ │ │ │ │ │ └── OrcOutputMeta.java │ │ │ │ │ ├── output/ │ │ │ │ │ │ └── PvfsFileAliaser.java │ │ │ │ │ └── parquet/ │ │ │ │ │ ├── BaseParquetStepDialog.java │ │ │ │ │ ├── input/ │ │ │ │ │ │ ├── ParquetInput.java │ │ │ │ │ │ ├── ParquetInputData.java │ │ │ │ │ │ ├── ParquetInputDialog.java │ │ │ │ │ │ ├── ParquetInputMeta.java │ │ │ │ │ │ └── VFSScheme.java │ │ │ │ │ └── output/ │ │ │ │ │ ├── ParquetOutput.java │ │ │ │ │ ├── ParquetOutputData.java │ │ │ │ │ ├── ParquetOutputDialog.java │ │ │ │ │ └── ParquetOutputMeta.java │ │ │ │ └── resources/ │ │ │ │ ├── OSGI-INF/ │ │ │ │ │ └── blueprint/ │ │ │ │ │ └── blueprint.xml │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── formats/ │ │ │ │ └── impl/ │ │ │ │ ├── orc/ │ │ │ │ │ ├── input/ │ │ │ │ │ │ └── messages/ │ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ │ ├── messages/ │ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ │ └── output/ │ │ │ │ │ └── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ └── parquet/ │ │ │ │ ├── input/ │ │ │ │ │ └── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ ├── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ └── output/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ └── test/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── formats/ │ │ │ └── impl/ │ │ │ ├── NamedClusterResolverTest.java │ │ │ ├── orc/ │ │ │ │ ├── input/ │ │ │ │ │ ├── OrcInputMetaInjectionTest.java │ │ │ │ │ └── OrcInputTest.java │ │ │ │ └── output/ │ │ │ │ ├── OrcOutputMetaInjectionTest.java │ │ │ │ └── OrcOutputTest.java │ │ │ ├── output/ │ │ │ │ └── PvfsFileAliaserTest.java │ │ │ └── parquet/ │ │ │ ├── input/ │ │ │ │ ├── ParquetInputMetaInjectionTest.java │ │ │ │ └── ParquetInputTest.java │ │ │ └── output/ │ │ │ ├── ParquetOutputMetaInjectionTest.java │ │ │ └── ParquetOutputTest.java │ │ └── pom.xml │ ├── formats-meta/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── formats/ │ │ │ │ ├── BaseFormatInputField.java │ │ │ │ ├── BaseFormatOutputField.java │ │ │ │ ├── FormatInputFile.java │ │ │ │ ├── FormatInputOutputField.java │ │ │ │ ├── orc/ │ │ │ │ │ ├── OrcFormatInputOutputField.java │ │ │ │ │ ├── OrcInputField.java │ │ │ │ │ ├── OrcTypeConverter.java │ │ │ │ │ ├── input/ │ │ │ │ │ │ └── OrcInputMetaBase.java │ │ │ │ │ └── output/ │ │ │ │ │ ├── OrcOutputField.java │ │ │ │ │ └── OrcOutputMetaBase.java │ │ │ │ └── parquet/ │ │ │ │ ├── ParquetTypeConverter.java │ │ │ │ ├── input/ │ │ │ │ │ ├── ParquetInputField.java │ │ │ │ │ └── ParquetInputMetaBase.java │ │ │ │ └── output/ │ │ │ │ ├── ParquetOutputField.java │ │ │ │ └── ParquetOutputMetaBase.java │ │ │ └── resources/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── formats/ │ │ │ └── parquet/ │ │ │ └── output/ │ │ │ └── messages/ │ │ │ └── messages_en_US.properties │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── formats/ │ │ │ ├── orc/ │ │ │ │ ├── OrcInputFieldTest.java │ │ │ │ ├── input/ │ │ │ │ │ └── OrcInputMetaBaseTest.java │ │ │ │ └── output/ │ │ │ │ ├── OrcOutputFieldTest.java │ │ │ │ └── OrcOutputMetabaseTest.java │ │ │ └── parquet/ │ │ │ ├── input/ │ │ │ │ └── ParquetInputMetaBaseTest.java │ │ │ └── output/ │ │ │ └── ParquetOutputMetaBaseTest.java │ │ └── resources/ │ │ └── org/ │ │ └── pentaho/ │ │ └── big/ │ │ └── data/ │ │ └── kettle/ │ │ └── plugins/ │ │ └── formats/ │ │ └── orc/ │ │ └── input/ │ │ └── OrcInput.xml │ ├── guiTestActionHandlers/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── plugins/ │ │ │ └── gui/ │ │ │ └── test/ │ │ │ └── actionHandlers/ │ │ │ └── ShowHelpDialogActionHandler.java │ │ └── resources/ │ │ └── OSGI-INF/ │ │ └── blueprint/ │ │ └── blueprint.xml │ ├── hadoop-cluster/ │ │ ├── pom.xml │ │ └── ui/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── hadoopcluster/ │ │ │ │ └── ui/ │ │ │ │ ├── dialog/ │ │ │ │ │ ├── HadoopClusterDelegate.java │ │ │ │ │ ├── HadoopClusterDialog.java │ │ │ │ │ └── wizard/ │ │ │ │ │ ├── NamedClusterDialog.java │ │ │ │ │ ├── pages/ │ │ │ │ │ │ ├── ClusterSettingsPage.java │ │ │ │ │ │ ├── KerberosSettingsPage.java │ │ │ │ │ │ ├── KnoxSettingsPage.java │ │ │ │ │ │ ├── ReportPage.java │ │ │ │ │ │ ├── SecuritySettingsPage.java │ │ │ │ │ │ └── TestResultsPage.java │ │ │ │ │ └── util/ │ │ │ │ │ ├── BadSiteFilesException.java │ │ │ │ │ ├── CustomWizardDialog.java │ │ │ │ │ └── NamedClusterHelper.java │ │ │ │ ├── endpoints/ │ │ │ │ │ ├── CachedFileItemStream.java │ │ │ │ │ ├── Category.java │ │ │ │ │ ├── HadoopClusterManager.java │ │ │ │ │ ├── Test.java │ │ │ │ │ └── TestCategory.java │ │ │ │ ├── lifecycle/ │ │ │ │ │ └── HadoopClusterLifecycleListener.java │ │ │ │ ├── model/ │ │ │ │ │ └── ThinNameClusterModel.java │ │ │ │ └── tree/ │ │ │ │ ├── HadoopClusterPopupMenuExtension.java │ │ │ │ ├── ThinHadoopClusterEditExtension.java │ │ │ │ ├── ThinHadoopClusterFolderProvider.java │ │ │ │ └── ThinHadoopClusterTreeDelegateExtension.java │ │ │ └── resources/ │ │ │ ├── kettle-password-encoder-plugins.xml │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── hadoopcluster/ │ │ │ └── ui/ │ │ │ ├── dialog/ │ │ │ │ ├── messages/ │ │ │ │ │ ├── messages.properties │ │ │ │ │ └── messages_en_US.properties │ │ │ │ └── wizard/ │ │ │ │ └── pages/ │ │ │ │ └── messages/ │ │ │ │ ├── messages.properties │ │ │ │ └── messages_en_US.properties │ │ │ ├── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ └── tree/ │ │ │ └── messages/ │ │ │ └── messages_en_US.properties │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── hadoopcluster/ │ │ │ └── ui/ │ │ │ └── endpoints/ │ │ │ └── HadoopClusterManagerTest.java │ │ └── resources/ │ │ ├── bad/ │ │ │ └── core-site.xml │ │ ├── dataproc/ │ │ │ ├── core-site.xml │ │ │ ├── hdfs-site.xml │ │ │ ├── hive-site.xml │ │ │ ├── mapred-site.xml │ │ │ └── yarn-site.xml │ │ ├── driver-source/ │ │ │ └── driver.kar │ │ ├── keytab/ │ │ │ └── test.keytab │ │ ├── missing-info/ │ │ │ ├── core-site.xml │ │ │ ├── hive-site.xml │ │ │ ├── oozie-default.xml │ │ │ └── yarn-site.xml │ │ ├── secured/ │ │ │ ├── core-site.xml │ │ │ ├── hive-site.xml │ │ │ └── yarn-site.xml │ │ └── unsecured/ │ │ ├── core-site.xml │ │ ├── hive-site.xml │ │ ├── oozie-default.xml │ │ └── yarn-site.xml │ ├── hbase/ │ │ ├── assemblies/ │ │ │ ├── plugin/ │ │ │ │ ├── pom.xml │ │ │ │ └── src/ │ │ │ │ ├── assembly/ │ │ │ │ │ └── assembly.xml │ │ │ │ └── main/ │ │ │ │ └── resources/ │ │ │ │ └── version.xml │ │ │ └── pom.xml │ │ ├── core/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── pentaho/ │ │ │ │ │ └── big/ │ │ │ │ │ └── data/ │ │ │ │ │ └── kettle/ │ │ │ │ │ └── plugins/ │ │ │ │ │ └── hbase/ │ │ │ │ │ ├── FilterDefinition.java │ │ │ │ │ ├── HBaseConnectionException.java │ │ │ │ │ ├── HbaseUtil.java │ │ │ │ │ ├── MappingDefinition.java │ │ │ │ │ ├── NamedClusterLoadSaveUtil.java │ │ │ │ │ ├── ServiceStatus.java │ │ │ │ │ ├── input/ │ │ │ │ │ │ ├── HBaseInput.java │ │ │ │ │ │ ├── HBaseInputData.java │ │ │ │ │ │ ├── HBaseInputDialog.java │ │ │ │ │ │ ├── HBaseInputMeta.java │ │ │ │ │ │ ├── Messages.java │ │ │ │ │ │ └── OutputFieldDefinition.java │ │ │ │ │ ├── mapping/ │ │ │ │ │ │ ├── ConfigurationProducer.java │ │ │ │ │ │ ├── FieldProducer.java │ │ │ │ │ │ ├── HBaseRowToKettleTuple.java │ │ │ │ │ │ ├── MappingAdmin.java │ │ │ │ │ │ ├── MappingEditor.java │ │ │ │ │ │ └── MappingUtils.java │ │ │ │ │ ├── output/ │ │ │ │ │ │ ├── HBaseOutput.java │ │ │ │ │ │ ├── HBaseOutputData.java │ │ │ │ │ │ ├── HBaseOutputDialog.java │ │ │ │ │ │ ├── HBaseOutputMeta.java │ │ │ │ │ │ ├── KettleRowToHBaseTuple.java │ │ │ │ │ │ └── Messages.java │ │ │ │ │ └── rowdecoder/ │ │ │ │ │ ├── HBaseRowDecoder.java │ │ │ │ │ ├── HBaseRowDecoderData.java │ │ │ │ │ ├── HBaseRowDecoderDialog.java │ │ │ │ │ └── HBaseRowDecoderMeta.java │ │ │ │ └── resources/ │ │ │ │ ├── OSGI-INF/ │ │ │ │ │ └── blueprint/ │ │ │ │ │ └── blueprint.xml │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── hbase/ │ │ │ │ ├── input/ │ │ │ │ │ └── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ ├── mapping/ │ │ │ │ │ └── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ ├── output/ │ │ │ │ │ └── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ └── rowdecoder/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── hbase/ │ │ │ │ ├── HbaseUtilTest.java │ │ │ │ ├── LogInjector.java │ │ │ │ ├── NamedClusterLoadSaveUtilTest.java │ │ │ │ ├── input/ │ │ │ │ │ ├── HBaseInputMetaInjectionTest.java │ │ │ │ │ └── HBaseInputMetaTest.java │ │ │ │ ├── mapping/ │ │ │ │ │ ├── MappingAdminTest.java │ │ │ │ │ ├── MappingUtilsTest.java │ │ │ │ │ └── MockHBaseByteConverterUsingJavaByteBuffer.java │ │ │ │ ├── output/ │ │ │ │ │ ├── HBaseOutputMetaInjectionTest.java │ │ │ │ │ ├── HBaseOutputMetaTest.java │ │ │ │ │ └── KettleRowToHBaseTupleTest.java │ │ │ │ └── rowdecoder/ │ │ │ │ ├── HBaseRowDecoderMetaInjectionTest.java │ │ │ │ └── HBaseRowDecoderMetaTest.java │ │ │ └── resources/ │ │ │ └── StubMapping.xml │ │ └── pom.xml │ ├── hbase-meta/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── hbase/ │ │ │ └── meta/ │ │ │ ├── AELHBaseMappingImpl.java │ │ │ └── AELHBaseValueMetaImpl.java │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── hbase/ │ │ │ └── meta/ │ │ │ ├── AELHBaseMappingTest.java │ │ │ └── AELHBaseValueMetaTest.java │ │ └── resources/ │ │ └── StubMapping.xml │ ├── hdfs/ │ │ ├── assemblies/ │ │ │ ├── plugin/ │ │ │ │ ├── pom.xml │ │ │ │ └── src/ │ │ │ │ ├── assembly/ │ │ │ │ │ └── assembly.xml │ │ │ │ └── main/ │ │ │ │ └── resources/ │ │ │ │ └── version.xml │ │ │ └── pom.xml │ │ ├── core/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── pentaho/ │ │ │ │ │ └── big/ │ │ │ │ │ └── data/ │ │ │ │ │ └── kettle/ │ │ │ │ │ └── plugins/ │ │ │ │ │ └── hdfs/ │ │ │ │ │ ├── HdfsLifecycleListener.java │ │ │ │ │ ├── job/ │ │ │ │ │ │ ├── JobEntryHadoopCopyFiles.java │ │ │ │ │ │ └── JobEntryHadoopCopyFilesDialog.java │ │ │ │ │ ├── trans/ │ │ │ │ │ │ ├── HadoopFileInputDialog.java │ │ │ │ │ │ ├── HadoopFileInputMeta.java │ │ │ │ │ │ ├── HadoopFileMeta.java │ │ │ │ │ │ ├── HadoopFileOutputDialog.java │ │ │ │ │ │ ├── HadoopFileOutputMeta.java │ │ │ │ │ │ ├── HadoopInputFileSelectionAdapter.java │ │ │ │ │ │ └── analyzer/ │ │ │ │ │ │ ├── HadoopBaseStepAnalyzer.java │ │ │ │ │ │ ├── HadoopFileInputExternalResourceConsumer.java │ │ │ │ │ │ ├── HadoopFileInputStepAnalyzer.java │ │ │ │ │ │ ├── HadoopFileOutputExternalResourceConsumer.java │ │ │ │ │ │ └── HadoopFileOutputStepAnalyzer.java │ │ │ │ │ └── vfs/ │ │ │ │ │ ├── HadoopVfsConnection.java │ │ │ │ │ ├── HadoopVfsFileChooserDialog.java │ │ │ │ │ ├── MapRFSFileChooserDialog.java │ │ │ │ │ ├── NamedClusterVfsFileChooserDialog.java │ │ │ │ │ └── Schemes.java │ │ │ │ └── resources/ │ │ │ │ ├── OSGI-INF/ │ │ │ │ │ └── blueprint/ │ │ │ │ │ └── blueprint.xml │ │ │ │ ├── graph.properties │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── hdfs/ │ │ │ │ ├── job/ │ │ │ │ │ └── messages/ │ │ │ │ │ ├── messages_en_US.properties │ │ │ │ │ └── messages_ko_KR.properties │ │ │ │ ├── trans/ │ │ │ │ │ └── messages/ │ │ │ │ │ ├── messages_en_US.properties │ │ │ │ │ └── messages_ko_KR.properties │ │ │ │ └── vfs/ │ │ │ │ └── messages/ │ │ │ │ ├── messages_en_US.properties │ │ │ │ └── messages_ko_KR.properties │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── hdfs/ │ │ │ │ ├── job/ │ │ │ │ │ ├── JobEntryHadoopCopyFilesLoadSaveTest.java │ │ │ │ │ └── JobEntryHadoopCopyFilesTest.java │ │ │ │ ├── trans/ │ │ │ │ │ ├── HadoopFileInputDialogTest.java │ │ │ │ │ ├── HadoopFileInputMetaTest.java │ │ │ │ │ ├── HadoopFileOutputDialogTest.java │ │ │ │ │ ├── HadoopFileOutputMetaTest.java │ │ │ │ │ └── analyzer/ │ │ │ │ │ ├── HadoopBaseStepAnalyzerTest.java │ │ │ │ │ ├── HadoopFileInputStepAnalyzerTest.java │ │ │ │ │ └── HadoopFileOutputStepAnalyzerTest.java │ │ │ │ └── vfs/ │ │ │ │ ├── HadoopVfsConnectionTest.java │ │ │ │ └── HadoopVfsFileChooserDialogTest.java │ │ │ └── resources/ │ │ │ ├── graph.properties │ │ │ ├── sample-hadoop-file-input-step.xml │ │ │ └── sample-hadoop-file-output-step.xml │ │ └── pom.xml │ ├── hive/ │ │ ├── assemblies/ │ │ │ ├── plugin/ │ │ │ │ ├── pom.xml │ │ │ │ └── src/ │ │ │ │ ├── assembly/ │ │ │ │ │ └── assembly.xml │ │ │ │ └── main/ │ │ │ │ └── resources/ │ │ │ │ └── version.xml │ │ │ └── pom.xml │ │ ├── core/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ └── java/ │ │ │ │ └── org/ │ │ │ │ ├── apache/ │ │ │ │ │ ├── hadoop/ │ │ │ │ │ │ └── hive/ │ │ │ │ │ │ └── jdbc/ │ │ │ │ │ │ └── HiveDriver.java │ │ │ │ │ └── hive/ │ │ │ │ │ └── jdbc/ │ │ │ │ │ ├── HiveDriver.java │ │ │ │ │ ├── HiveSimbaDriver.java │ │ │ │ │ ├── ImpalaDriver.java │ │ │ │ │ ├── ImpalaSimbaDriver.java │ │ │ │ │ └── SparkSqlSimbaDriver.java │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── hive/ │ │ │ │ ├── Activator.java │ │ │ │ ├── BaseSimbaDatabaseMeta.java │ │ │ │ ├── DatabaseMetaWithVersion.java │ │ │ │ ├── DummyDriver.java │ │ │ │ ├── Hive2DatabaseDialect.java │ │ │ │ ├── Hive2DatabaseMeta.java │ │ │ │ ├── Hive2SimbaDatabaseDialect.java │ │ │ │ ├── Hive2SimbaDatabaseMeta.java │ │ │ │ ├── HiveDatabaseDialect.java │ │ │ │ ├── HiveDatabaseMeta.java │ │ │ │ ├── HiveWarehouseDatabaseMeta.java │ │ │ │ ├── ImpalaDatabaseDialect.java │ │ │ │ ├── ImpalaDatabaseMeta.java │ │ │ │ ├── ImpalaSimbaDatabaseDialect.java │ │ │ │ ├── ImpalaSimbaDatabaseMeta.java │ │ │ │ ├── SimbaUrl.java │ │ │ │ ├── SparkSimbaDatabaseDialect.java │ │ │ │ └── SparkSimbaDatabaseMeta.java │ │ │ └── test/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ ├── apache/ │ │ │ │ ├── hadoop/ │ │ │ │ │ └── hive/ │ │ │ │ │ └── jdbc/ │ │ │ │ │ └── HiveDriverTest.java │ │ │ │ └── hive/ │ │ │ │ └── jdbc/ │ │ │ │ ├── HiveDriverTest.java │ │ │ │ ├── HiveSimbaDriverTest.java │ │ │ │ ├── ImpalaDriverTest.java │ │ │ │ └── ImpalaSimbaDriverTest.java │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── hive/ │ │ │ ├── BaseSimbaDatabaseMetaTest.java │ │ │ ├── Hive2DatabaseDialectTest.java │ │ │ ├── Hive2DatabaseMetaTest.java │ │ │ ├── Hive2SimbaDatabaseDialectTest.java │ │ │ ├── Hive2SimbaDatabaseMetaTest.java │ │ │ ├── HiveDatabaseDialectTest.java │ │ │ ├── HiveDatabaseMetaTest.java │ │ │ ├── ImpalaDatabaseDialectTest.java │ │ │ ├── ImpalaDatabaseMetaTest.java │ │ │ ├── ImpalaSimbaDatabaseDialectTest.java │ │ │ ├── ImpalaSimbaDatabaseMetaTest.java │ │ │ ├── SimbaUrlTest.java │ │ │ ├── SparkSimbaDatabaseDialectTest.java │ │ │ └── SparkSimbaDatabaseMetaTest.java │ │ └── pom.xml │ ├── mapreduce/ │ │ ├── assemblies/ │ │ │ ├── plugin/ │ │ │ │ ├── pom.xml │ │ │ │ └── src/ │ │ │ │ ├── assembly/ │ │ │ │ │ └── assembly.xml │ │ │ │ └── main/ │ │ │ │ └── resources/ │ │ │ │ └── version.xml │ │ │ └── pom.xml │ │ ├── core/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── pentaho/ │ │ │ │ │ └── big/ │ │ │ │ │ └── data/ │ │ │ │ │ └── kettle/ │ │ │ │ │ └── plugins/ │ │ │ │ │ └── mapreduce/ │ │ │ │ │ ├── DialogClassUtil.java │ │ │ │ │ ├── entry/ │ │ │ │ │ │ ├── NamedClusterLoadSaveUtil.java │ │ │ │ │ │ ├── UserDefinedItem.java │ │ │ │ │ │ ├── hadoop/ │ │ │ │ │ │ │ └── JobEntryHadoopJobExecutor.java │ │ │ │ │ │ └── pmr/ │ │ │ │ │ │ └── JobEntryHadoopTransJobExecutor.java │ │ │ │ │ ├── step/ │ │ │ │ │ │ ├── enter/ │ │ │ │ │ │ │ └── HadoopEnterMeta.java │ │ │ │ │ │ └── exit/ │ │ │ │ │ │ ├── HadoopExit.java │ │ │ │ │ │ ├── HadoopExitData.java │ │ │ │ │ │ └── HadoopExitMeta.java │ │ │ │ │ └── ui/ │ │ │ │ │ ├── entry/ │ │ │ │ │ │ ├── hadoop/ │ │ │ │ │ │ │ ├── JobEntryHadoopJobExecutorController.java │ │ │ │ │ │ │ └── JobEntryHadoopJobExecutorDialog.java │ │ │ │ │ │ └── pmr/ │ │ │ │ │ │ ├── JobEntryHadoopTransJobExecutorController.java │ │ │ │ │ │ └── JobEntryHadoopTransJobExecutorDialog.java │ │ │ │ │ └── step/ │ │ │ │ │ ├── enter/ │ │ │ │ │ │ ├── HadoopEnterDialog.java │ │ │ │ │ │ └── HadoopEnterMetaMapper.java │ │ │ │ │ └── exit/ │ │ │ │ │ ├── HadoopExitDialog.java │ │ │ │ │ └── HadoopExitMetaMapper.java │ │ │ │ └── resources/ │ │ │ │ ├── OSGI-INF/ │ │ │ │ │ └── blueprint/ │ │ │ │ │ └── blueprint.xml │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── mapreduce/ │ │ │ │ ├── entry/ │ │ │ │ │ ├── hadoop/ │ │ │ │ │ │ └── messages/ │ │ │ │ │ │ ├── messages_en_US.properties │ │ │ │ │ │ └── messages_ko_KR.properties │ │ │ │ │ └── pmr/ │ │ │ │ │ └── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ ├── step/ │ │ │ │ │ ├── enter/ │ │ │ │ │ │ └── messages/ │ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ │ └── exit/ │ │ │ │ │ └── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ └── ui/ │ │ │ │ ├── entry/ │ │ │ │ │ ├── JobEntryHadoopJobExecutorDialog.xul │ │ │ │ │ └── JobEntryHadoopTransJobExecutorDialog.xul │ │ │ │ └── step/ │ │ │ │ ├── enter/ │ │ │ │ │ └── dialog.xul │ │ │ │ └── exit/ │ │ │ │ └── dialog.xul │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── mapreduce/ │ │ │ │ ├── DialogClassUtilTest.java │ │ │ │ ├── JobEntryHadoopTransJobExecutorTest.java │ │ │ │ ├── entry/ │ │ │ │ │ ├── NamedClusterLoadSaveUtilTest.java │ │ │ │ │ ├── UserDefinedItemTest.java │ │ │ │ │ ├── hadoop/ │ │ │ │ │ │ └── JobEntryHadoopJobExecutorTest.java │ │ │ │ │ └── pmr/ │ │ │ │ │ └── JobEntryHadoopTransJobExecutorTest.java │ │ │ │ ├── step/ │ │ │ │ │ ├── HadoopExitMetaTest.java │ │ │ │ │ ├── enter/ │ │ │ │ │ │ ├── HadoopEnterMetaInjectionTest.java │ │ │ │ │ │ └── HadoopEnterMetaTest.java │ │ │ │ │ └── exit/ │ │ │ │ │ ├── HadoopExitDataTest.java │ │ │ │ │ ├── HadoopExitMetaInjectionTest.java │ │ │ │ │ ├── HadoopExitMetaTest.java │ │ │ │ │ └── HadoopExitTest.java │ │ │ │ └── ui/ │ │ │ │ └── entry/ │ │ │ │ └── pmr/ │ │ │ │ └── JobEntryHadoopTransJobExecutorControllerTest.java │ │ │ └── resources/ │ │ │ └── testTrans.ktr │ │ └── pom.xml │ ├── oozie/ │ │ ├── assemblies/ │ │ │ ├── plugin/ │ │ │ │ ├── pom.xml │ │ │ │ └── src/ │ │ │ │ ├── assembly/ │ │ │ │ │ └── assembly.xml │ │ │ │ └── main/ │ │ │ │ └── resources/ │ │ │ │ └── version.xml │ │ │ └── pom.xml │ │ ├── core/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── pentaho/ │ │ │ │ │ └── big/ │ │ │ │ │ └── data/ │ │ │ │ │ └── kettle/ │ │ │ │ │ └── plugins/ │ │ │ │ │ └── oozie/ │ │ │ │ │ ├── OozieJobExecutorConfig.java │ │ │ │ │ ├── OozieJobExecutorJobEntry.java │ │ │ │ │ ├── OozieJobExecutorJobEntryController.java │ │ │ │ │ └── OozieJobExecutorJobEntryDialog.java │ │ │ │ └── resources/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── oozie/ │ │ │ │ ├── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ └── xul/ │ │ │ │ ├── OozieJobExecutor.xul │ │ │ │ └── button-bar.xul │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── oozie/ │ │ │ │ ├── OozieJobExecutorConfigTest.java │ │ │ │ ├── OozieJobExecutorControllerTest.java │ │ │ │ └── OozieJobExecutorJobEntryTest.java │ │ │ └── resources/ │ │ │ ├── badJob.properties │ │ │ └── job.properties │ │ └── pom.xml │ ├── pig/ │ │ ├── assemblies/ │ │ │ ├── plugin/ │ │ │ │ ├── pom.xml │ │ │ │ └── src/ │ │ │ │ ├── assembly/ │ │ │ │ │ └── assembly.xml │ │ │ │ └── main/ │ │ │ │ └── resources/ │ │ │ │ └── version.xml │ │ │ └── pom.xml │ │ ├── core/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── pentaho/ │ │ │ │ │ └── big/ │ │ │ │ │ └── data/ │ │ │ │ │ └── kettle/ │ │ │ │ │ └── plugins/ │ │ │ │ │ └── pig/ │ │ │ │ │ ├── JobEntryPigScriptExecutor.java │ │ │ │ │ └── JobEntryPigScriptExecutorDialog.java │ │ │ │ └── resources/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── pig/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── pig/ │ │ │ │ ├── JobEntryPigScriptExecutorTest.java │ │ │ │ └── PigNamedClusterValidator.java │ │ │ └── resources/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── pig/ │ │ │ └── pig.script │ │ └── pom.xml │ ├── pom.xml │ ├── spark/ │ │ ├── README.md │ │ ├── assemblies/ │ │ │ ├── plugin/ │ │ │ │ ├── pom.xml │ │ │ │ └── src/ │ │ │ │ ├── assembly/ │ │ │ │ │ └── assembly.xml │ │ │ │ └── main/ │ │ │ │ └── resources/ │ │ │ │ └── version.xml │ │ │ └── pom.xml │ │ ├── core/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── pentaho/ │ │ │ │ │ └── di/ │ │ │ │ │ ├── job/ │ │ │ │ │ │ └── entries/ │ │ │ │ │ │ └── spark/ │ │ │ │ │ │ ├── JobEntrySparkSubmit.java │ │ │ │ │ │ ├── JobEntrySparkSubmitAnalyzer.java │ │ │ │ │ │ ├── PatternMatchingStreamLogger.java │ │ │ │ │ │ └── WinProcess.java │ │ │ │ │ └── ui/ │ │ │ │ │ └── job/ │ │ │ │ │ └── entries/ │ │ │ │ │ └── spark/ │ │ │ │ │ └── JobEntrySparkSubmitDialog.java │ │ │ │ └── resources/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── di/ │ │ │ │ └── job/ │ │ │ │ └── entries/ │ │ │ │ └── spark/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── di/ │ │ │ │ └── job/ │ │ │ │ └── entries/ │ │ │ │ └── spark/ │ │ │ │ ├── JobEntrySparkSubmitLoadSaveTest.java │ │ │ │ ├── JobEntrySparkSubmitTest.java │ │ │ │ ├── PatternMatchingStreamLoggerTest.java │ │ │ │ └── WinProcessTest.java │ │ │ └── resources/ │ │ │ ├── ChildProcessTester.java │ │ │ └── process.cmd │ │ └── pom.xml │ └── sqoop/ │ ├── assemblies/ │ │ ├── plugin/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── assembly/ │ │ │ │ └── assembly.xml │ │ │ └── main/ │ │ │ └── resources/ │ │ │ └── version.xml │ │ └── pom.xml │ ├── core/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── big/ │ │ │ │ └── data/ │ │ │ │ └── kettle/ │ │ │ │ └── plugins/ │ │ │ │ └── sqoop/ │ │ │ │ ├── AbstractSqoopJobEntry.java │ │ │ │ ├── ArgumentWrapper.java │ │ │ │ ├── CommandLineArgument.java │ │ │ │ ├── DatabaseItem.java │ │ │ │ ├── LoggingProxy.java │ │ │ │ ├── SqoopConfig.java │ │ │ │ ├── SqoopExportConfig.java │ │ │ │ ├── SqoopExportJobEntry.java │ │ │ │ ├── SqoopExportJobEntryDialog.java │ │ │ │ ├── SqoopImportConfig.java │ │ │ │ ├── SqoopImportJobEntry.java │ │ │ │ ├── SqoopImportJobEntryDialog.java │ │ │ │ ├── SqoopLog4jFilter.java │ │ │ │ ├── SqoopUtils.java │ │ │ │ └── ui/ │ │ │ │ ├── AbstractSqoopJobEntryController.java │ │ │ │ ├── AbstractSqoopJobEntryDialog.java │ │ │ │ ├── SqoopExportJobEntryController.java │ │ │ │ └── SqoopImportJobEntryController.java │ │ │ └── resources/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── big/ │ │ │ └── data/ │ │ │ └── kettle/ │ │ │ └── plugins/ │ │ │ └── sqoop/ │ │ │ ├── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ └── xul/ │ │ │ ├── SqoopExportJobEntry.xul │ │ │ ├── SqoopImportJobEntry.xul │ │ │ ├── advanced-mode.xul │ │ │ └── button-bar.xul │ │ └── test/ │ │ └── java/ │ │ └── org/ │ │ └── pentaho/ │ │ └── big/ │ │ └── data/ │ │ └── kettle/ │ │ └── plugins/ │ │ └── sqoop/ │ │ ├── AbstractSqoopJobEntryTest.java │ │ ├── PersistentPropertyChangeListener.java │ │ ├── PropertyFiringObjectTest.java │ │ ├── SqoopConfigTest.java │ │ ├── SqoopLog4jFilterTest.java │ │ └── util/ │ │ └── MockitoAutoBean.java │ └── pom.xml ├── legacy/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ ├── di/ │ │ │ │ ├── core/ │ │ │ │ │ └── hadoop/ │ │ │ │ │ ├── HadoopConfigurationInfo.java │ │ │ │ │ ├── HadoopConfigurationPrompter.java │ │ │ │ │ ├── HadoopSpoonPlugin.java │ │ │ │ │ ├── NoShimSpecifiedException.java │ │ │ │ │ └── SpoonExtensionPoint.java │ │ │ │ ├── trans/ │ │ │ │ │ └── steps/ │ │ │ │ │ ├── avroinput/ │ │ │ │ │ │ ├── AvroInput.java │ │ │ │ │ │ ├── AvroInputData.java │ │ │ │ │ │ ├── AvroInputDialog.java │ │ │ │ │ │ └── AvroInputMeta.java │ │ │ │ │ └── couchdbinput/ │ │ │ │ │ ├── CouchDbInput.java │ │ │ │ │ ├── CouchDbInputData.java │ │ │ │ │ └── CouchDbInputMeta.java │ │ │ │ └── ui/ │ │ │ │ ├── core/ │ │ │ │ │ └── namedcluster/ │ │ │ │ │ ├── HadoopClusterDelegate.java │ │ │ │ │ ├── NamedClusterDialog.java │ │ │ │ │ ├── NamedClusterUIFactory.java │ │ │ │ │ ├── NamedClusterUIHelper.java │ │ │ │ │ └── NamedClusterWidget.java │ │ │ │ ├── job/ │ │ │ │ │ └── entries/ │ │ │ │ │ └── hadoopjobexecutor/ │ │ │ │ │ └── UserDefinedItem.java │ │ │ │ ├── repository/ │ │ │ │ │ └── repositoryexplorer/ │ │ │ │ │ ├── controllers/ │ │ │ │ │ │ └── NamedClustersController.java │ │ │ │ │ └── model/ │ │ │ │ │ ├── UINamedCluster.java │ │ │ │ │ ├── UINamedClusterObjectRegistry.java │ │ │ │ │ └── UINamedClusters.java │ │ │ │ ├── trans/ │ │ │ │ │ └── steps/ │ │ │ │ │ └── couchdbinput/ │ │ │ │ │ └── CouchDbInputDialog.java │ │ │ │ └── vfs/ │ │ │ │ └── VfsFileChooserHelper.java │ │ │ └── hadoop/ │ │ │ ├── PluginPropertiesUtil.java │ │ │ └── PropertiesConfigurationProperties.java │ │ └── resources/ │ │ ├── META-INF/ │ │ │ └── version.properties │ │ └── org/ │ │ └── pentaho/ │ │ ├── di/ │ │ │ ├── core/ │ │ │ │ └── hadoop/ │ │ │ │ ├── explorer-layout-overlay.xul │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ ├── trans/ │ │ │ │ └── steps/ │ │ │ │ ├── avroinput/ │ │ │ │ │ └── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ └── couchdbinput/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ └── ui/ │ │ │ ├── core/ │ │ │ │ └── namedcluster/ │ │ │ │ └── dialog/ │ │ │ │ └── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ └── hadoop/ │ │ │ └── configuration/ │ │ │ ├── messages/ │ │ │ │ └── messages_en_US.properties │ │ │ ├── no-configs.xul │ │ │ ├── restart-prompt.xul │ │ │ ├── select-config.xul │ │ │ └── toolbar-overlay.xul │ │ └── hadoop/ │ │ └── messages/ │ │ └── messages_en_US.properties │ └── test/ │ ├── java/ │ │ └── org/ │ │ └── pentaho/ │ │ ├── database/ │ │ │ └── TestSelectCount.java │ │ ├── di/ │ │ │ ├── core/ │ │ │ │ └── hadoop/ │ │ │ │ └── HadoopConfigurationInfoTest.java │ │ │ ├── trans/ │ │ │ │ └── steps/ │ │ │ │ ├── avroinput/ │ │ │ │ │ ├── AvroInputDataTest.java │ │ │ │ │ ├── AvroInputMetaAvroFieldTest.java │ │ │ │ │ ├── AvroInputMetaLookupFieldTest.java │ │ │ │ │ ├── AvroInputMetaTest.java │ │ │ │ │ └── AvroInputTest.java │ │ │ │ └── couchdbinput/ │ │ │ │ ├── CouchDbInputMetaTest.java │ │ │ │ └── CouchDbInputTest.java │ │ │ └── ui/ │ │ │ ├── core/ │ │ │ │ └── namedcluster/ │ │ │ │ └── NamedClusterUIHelperTest.java │ │ │ └── vfs/ │ │ │ └── VfsFileChooserHelperTest.java │ │ ├── hadoop/ │ │ │ ├── PluginPropertiesUtilTest.java │ │ │ └── PropertiesConfigurationPropertiesTest.java │ │ ├── util/ │ │ │ └── FileUtil.java │ │ └── weblogs/ │ │ └── WebLogs.java │ └── resources/ │ ├── hadoop-configurations/ │ │ └── .gitignore │ ├── master.log │ ├── plugin.properties │ ├── s3OutputMetaTest.ktr │ ├── test-settings.properties │ ├── test-version.properties │ └── test.ktr ├── legacy-amazon/ │ ├── assemblies/ │ │ ├── plugin/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── assembly/ │ │ │ │ └── assembly.xml │ │ │ └── main/ │ │ │ └── resources/ │ │ │ └── version.xml │ │ └── pom.xml │ ├── core/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── pentaho/ │ │ │ │ └── amazon/ │ │ │ │ ├── AbstractAmazonJobEntry.java │ │ │ │ ├── AbstractAmazonJobEntryDialog.java │ │ │ │ ├── AbstractAmazonJobExecutor.java │ │ │ │ ├── AbstractAmazonJobExecutorController.java │ │ │ │ ├── AmazonEmrReleases.java │ │ │ │ ├── AmazonRegion.java │ │ │ │ ├── InstanceType.java │ │ │ │ ├── client/ │ │ │ │ │ ├── AbstractClientFactory.java │ │ │ │ │ ├── AmazonClientCredentials.java │ │ │ │ │ ├── ClientFactoriesManager.java │ │ │ │ │ ├── ClientType.java │ │ │ │ │ ├── api/ │ │ │ │ │ │ ├── AimClient.java │ │ │ │ │ │ ├── Ec2Client.java │ │ │ │ │ │ ├── EmrClient.java │ │ │ │ │ │ ├── PricingClient.java │ │ │ │ │ │ └── S3Client.java │ │ │ │ │ └── impl/ │ │ │ │ │ ├── AimClientFactory.java │ │ │ │ │ ├── AimClientImpl.java │ │ │ │ │ ├── Ec2ClientFactory.java │ │ │ │ │ ├── Ec2ClientImpl.java │ │ │ │ │ ├── EmrClientFactory.java │ │ │ │ │ ├── EmrClientImpl.java │ │ │ │ │ ├── PricingClientFactory.java │ │ │ │ │ ├── PricingClientImpl.java │ │ │ │ │ ├── S3ClientFactory.java │ │ │ │ │ └── S3ClientImpl.java │ │ │ │ ├── emr/ │ │ │ │ │ ├── job/ │ │ │ │ │ │ └── AmazonElasticMapReduceJobExecutor.java │ │ │ │ │ └── ui/ │ │ │ │ │ ├── AmazonElasticMapReduceJobExecutorController.java │ │ │ │ │ └── AmazonElasticMapReduceJobExecutorDialog.java │ │ │ │ ├── hive/ │ │ │ │ │ ├── job/ │ │ │ │ │ │ └── AmazonHiveJobExecutor.java │ │ │ │ │ └── ui/ │ │ │ │ │ ├── AmazonHiveJobExecutorController.java │ │ │ │ │ └── AmazonHiveJobExecutorDialog.java │ │ │ │ └── s3/ │ │ │ │ ├── S3VfsFileChooserHelper.java │ │ │ │ └── VfsFileChooserHelper.java │ │ │ └── resources/ │ │ │ ├── META-INF/ │ │ │ │ └── version.properties │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── amazon/ │ │ │ ├── emr/ │ │ │ │ ├── job/ │ │ │ │ │ └── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ └── ui/ │ │ │ │ └── AmazonElasticMapReduceJobExecutorDialog.xul │ │ │ ├── hive/ │ │ │ │ ├── job/ │ │ │ │ │ └── messages/ │ │ │ │ │ └── messages_en_US.properties │ │ │ │ └── ui/ │ │ │ │ └── AmazonHiveJobExecutorDialog.xul │ │ │ └── messages/ │ │ │ └── messages_en_US.properties │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── pentaho/ │ │ │ └── amazon/ │ │ │ ├── AbstractAmazonJobExecutorControllerTest.java │ │ │ ├── AbstractAmazonJobExecutorTest.java │ │ │ ├── AmazonRegionTest.java │ │ │ ├── InstanceTypeTest.java │ │ │ ├── PersistentPropertyChangeListener.java │ │ │ ├── PropertyFiringObjectTest.java │ │ │ ├── client/ │ │ │ │ ├── AmazonClientCredentialsTest.java │ │ │ │ ├── ClientFactoriesManagerTest.java │ │ │ │ └── impl/ │ │ │ │ ├── AimClientImplTest.java │ │ │ │ ├── Ec2ClientFactoryTest.java │ │ │ │ ├── Ec2ClientImplTest.java │ │ │ │ ├── EmrClientImplTest.java │ │ │ │ ├── PricingClientImplTest.java │ │ │ │ └── S3ClientImplTest.java │ │ │ ├── emr/ │ │ │ │ └── job/ │ │ │ │ └── AmazonElasticMapReduceJobExecutorLoadSaveTest.java │ │ │ └── hive/ │ │ │ └── job/ │ │ │ └── AmazonHiveJobExecutorLoadSaveTest.java │ │ └── resources/ │ │ └── master.log │ └── pom.xml ├── legacy-core/ │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── org/ │ └── pentaho/ │ └── big/ │ └── data/ │ └── api/ │ └── services/ │ └── BigDataServicesHelper.java ├── pom.xml └── services-bootstrap/ ├── pom.xml └── src/ ├── main/ │ └── java/ │ └── org/ │ └── pentaho/ │ └── big/ │ └── data/ │ ├── api/ │ │ └── services/ │ │ └── impl/ │ │ └── BigDataServicesProxyImpl.java │ ├── hadoop/ │ │ └── bootstrap/ │ │ ├── HadoopConfigurationBootstrap.java │ │ └── HadoopConfigurationListener.java │ └── services/ │ └── bootstrap/ │ ├── BigDataCEServiceInitializerImpl.java │ ├── BigDataLogConfig.java │ └── BigDataPluginLifecycleListener.java └── test/ └── java/ └── org/ └── pentaho/ └── big/ └── data/ └── services/ └── bootstrap/ ├── BigDataCEServiceInitializerImplTest.java └── BigDataLogConfigTest.java ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitattributes ================================================ # Auto detect text files and perform LF normalization * text=auto ================================================ FILE: .github/CODEOWNERS ================================================ * @pentaho/sp-branch-write ================================================ FILE: .gitignore ================================================ bin/ dist/ lib/ lib-provided/ stage-pmr/ test-lib/ eclipse-bin/ override.properties .settings/ .classpath .project /dev-lib /pdi-null /legacy/pdi-null /pdi-bin *.iml .idea/ .vscode/ target/ rebel.xml .DS_Store ================================================ FILE: LICENSE.txt ================================================ Pentaho Developer Edition 10.3 Copyright 2024 Hitachi Vantara, LLC; licensed under the Business Source License 1.1 (BSL). This project may include third party components that are individually licensed per the terms indicated by their respective copyright owners included in text file or in the source code. License text copyright (c) 2020 MariaDB Corporation Ab, All Rights Reserved. "Business Source License" is a trademark of MariaDB Corporation Ab. Parameters Licensor: Hitachi Vantara, LLC. Licensed Work: Pentaho Developer Edition 10.3. The Licensed Work is (c) 2024 Hitachi Vantara, LLC. Additional Use Grant: None Change Date: Four years from the date the Licensed Work is published. Change License: Apache 2.0 For information about alternative licensing arrangements for the Licensed Work, please contact support@pentaho.com. Notice Business Source License 1.1 Terms The Licensor hereby grants you the right to copy, modify, create derivative works, redistribute, and make non-production use of the Licensed Work. The Licensor may make an Additional Use Grant, above, permitting limited production use. Effective on the Change Date, or the fourth anniversary of the first publicly available distribution of a specific version of the Licensed Work under this License, whichever comes first, the Licensor hereby grants you rights under the terms of the Change License, and the rights granted in the paragraph above terminate. If your use of the Licensed Work does not comply with the requirements currently in effect as described in this License, you must purchase a commercial license from the Licensor, its affiliated entities, or authorized resellers, or you must refrain from using the Licensed Work. All copies of the original and modified Licensed Work, and derivative works of the Licensed Work, are subject to this License. This License applies separately for each version of the Licensed Work and the Change Date may vary for each version of the Licensed Work released by Licensor. You must conspicuously display this License on each original or modified copy of the Licensed Work. If you receive the Licensed Work in original or modified form from a third party, the terms and conditions set forth in this License apply to your use of that work. Any use of the Licensed Work in violation of this License will automatically terminate your rights under this License for the current and all other versions of the Licensed Work. This License does not grant you any right in any trademark or logo of Licensor or its affiliates (provided that you may use a trademark or logo of Licensor as expressly required by this License). TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON AN "AS IS" BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND TITLE. ================================================ FILE: README.markdown ================================================ Pentaho Big Data Plugin ======================= The Pentaho Big Data Plugin Project provides support for an ever-expanding Big Data community within the Pentaho ecosystem. It is a plugin for the Pentaho Kettle engine which can be used within Pentaho Data Integration (Kettle), Pentaho Reporting, and the Pentaho BI Platform. Building -------- It's a maven build, so `mvn clean install` is a typical default for a local build. Pre-requisites --------------- JDK 11 in your path. Maven 3.3.9 in your path. This [settings.xml](https://raw.githubusercontent.com/pentaho/maven-parent-poms/master/maven-support-files/settings.xml) How to use the custom settings.xml --------------- Option 1: Copy this file into your /.m2 folder and name it "settings.xml". Warning: If you do this, it will become your default settings.xml for all maven builds. Option 2: Copy this file into some other folder--possibly the project folder for the project you want to build and use the maven 's' option to build with this settings.xml file. Example: `mvn -s public-settings.xml install`. The Pentaho profile defaults to pull all artifacts through the Pentaho public repository. If you want to try resolving maven plugin dependencies through the maven central repository instead of the Pentaho public repository, activate the "central" profile like this: `mvn -s -public-settings.xml -P central install` If your fails to resolve the jacoco-maven-plugin version 0.7.7-SNAPSHOT --------------- The 0.7.7-SNAPSHOT property version for the jacoco-maven-plugin is defined in several releases of the Pentaho parent poms, but it is only available in the Pentaho artifact repositories. If you are trying to resolve through maven central or other public repositories you should override to get the latest version like this: `mvn -s -public-settings.xml -P central install -Djacoco-maven-plugin.version=0.7.7.201606060606` Further Reading --------------- Additional documentation is available on the Community wiki: [Big Data Plugin for Java Developers](https://pentaho-community.atlassian.net/wiki/display/BAD/Getting+Started+for+Java+Developers) License ------- Licensed under the Apache License, Version 2.0. See LICENSE.txt for more information. ================================================ FILE: api/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-parent 11.1.0.0-SNAPSHOT pentaho-big-data-api 11.1.0.0-SNAPSHOT pom runtimeTest ================================================ FILE: api/runtimeTest/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-api 11.1.0.0-SNAPSHOT pentaho-big-data-api-runtimeTest 11.1.0.0-SNAPSHOT jar Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com site 5.17.0 org.slf4j slf4j-api pentaho-kettle kettle-core ${pdi.version} provided junit junit ${dependency.junit.revision} test org.mockito mockito-core ${mockito-core.version} test org.apache.httpcomponents httpclient compile org.apache.httpcomponents httpcore pentaho pentaho-big-data-impl-cluster ${pdi.version} org.pentaho shim-api-core ${pdi.version} org.apache.logging.log4j log4j-1.2-api ${log4j.version} maven-jar-plugin 2.5 test-jar ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/RuntimeTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import java.util.Set; /** * Created by bryan on 8/11/15. */ public interface RuntimeTest { boolean accepts( Object objectUnderTest ); String getModule(); String getId(); String getName(); boolean isConfigInitTest(); Set getDependencies(); RuntimeTestResultSummary runTest( Object objectUnderTest ); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/RuntimeTestProgressCallback.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test; /** * Created by bryan on 8/11/15. */ public interface RuntimeTestProgressCallback { void onProgress( RuntimeTestStatus runtimeTestStatus ); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/RuntimeTestStatus.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test; import org.pentaho.runtime.test.module.RuntimeTestModuleResults; import java.util.List; /** * Created by bryan on 8/18/15. */ public interface RuntimeTestStatus { List getModuleResults(); int getTestsDone(); int getTestsRunning(); int getTestsOutstanding(); boolean isDone(); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/RuntimeTester.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test; /** * Created by bryan on 8/11/15. */ public interface RuntimeTester { void runtimeTest( Object objectUnderTest, RuntimeTestProgressCallback runtimeTestProgressCallback ); void addRuntimeTest( RuntimeTest test ); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/action/RuntimeTestAction.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; /** * Created by bryan on 9/8/15. */ public interface RuntimeTestAction { String getName(); String getDescription(); RuntimeTestEntrySeverity getSeverity(); RuntimeTestActionPayload getPayload(); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/action/RuntimeTestActionHandler.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action; /** * Created by bryan on 9/8/15. */ public interface RuntimeTestActionHandler { boolean canHandle( RuntimeTestAction runtimeTestAction ); void handle( RuntimeTestAction runtimeTestAction ); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/action/RuntimeTestActionPayload.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action; /** * Created by bryan on 9/9/15. */ public interface RuntimeTestActionPayload { /** * This will be called and logged when the Action isn't handled by any registered handlers * * @return the message associated with the payload */ String getMessage(); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/action/RuntimeTestActionService.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action; /** * Created by bryan on 9/8/15. */ public interface RuntimeTestActionService { void handle( RuntimeTestAction runtimeTestAction ); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/action/impl/HelpUrlPayload.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action.impl; import org.pentaho.runtime.test.action.RuntimeTestActionPayload; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; /** * Created by bryan on 9/9/15. */ public class HelpUrlPayload implements RuntimeTestActionPayload { public static final String HELP_URL_PAYLOAD_MESSAGE = "HelpUrlPayload.Message"; private final MessageGetter messageGetter; private final String title; private final String header; private final String url; public HelpUrlPayload( MessageGetterFactory messageGetterFactory, String title, String header, String url ) { this.messageGetter = messageGetterFactory.create( getClass() ); this.title = title; this.header = header; this.url = url; } @Override public String getMessage() { return messageGetter.getMessage( HELP_URL_PAYLOAD_MESSAGE, url ); } public String getTitle() { return title; } public String getHeader() { return header; } public String getUrl() { return url; } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/action/impl/LoggingRuntimeTestActionHandlerImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action.impl; import org.pentaho.runtime.test.action.RuntimeTestAction; import org.pentaho.runtime.test.action.RuntimeTestActionHandler; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.i18n.impl.BaseMessagesMessageGetterFactoryImpl; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; /** * Created by bryan on 9/8/15. */ public class LoggingRuntimeTestActionHandlerImpl implements RuntimeTestActionHandler { public static final String LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL = "LoggingRuntimeTestActionHandlerImpl.Action"; public static final String LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL_MISSING_SEVERITY = "LoggingRuntimeTestActionHandlerImpl.MissingSeverity"; private final Logger logger; private final MessageGetter messageGetter; private static LoggingRuntimeTestActionHandlerImpl instance = new LoggingRuntimeTestActionHandlerImpl( BaseMessagesMessageGetterFactoryImpl.getInstance() ); public static LoggingRuntimeTestActionHandlerImpl getInstance() { return instance; } public LoggingRuntimeTestActionHandlerImpl( MessageGetterFactory messageGetterFactory ) { this( messageGetterFactory, LogManager.getLogger( LoggingRuntimeTestActionHandlerImpl.class ) ); } public LoggingRuntimeTestActionHandlerImpl( MessageGetterFactory messageGetterFactory, Logger logger ) { this.messageGetter = messageGetterFactory.create( LoggingRuntimeTestActionHandlerImpl.class ); this.logger = logger; } @Override public boolean canHandle( RuntimeTestAction runtimeTestAction ) { return true; } private String getMessage( RuntimeTestAction runtimeTestAction ) { return messageGetter .getMessage( LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL, runtimeTestAction.getName(), runtimeTestAction.getDescription(), String.valueOf( runtimeTestAction.getPayload() ) ); } @Override public void handle( RuntimeTestAction runtimeTestAction ) { RuntimeTestEntrySeverity severity = runtimeTestAction.getSeverity(); if ( severity == null ) { logger.warn( messageGetter .getMessage( LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL_MISSING_SEVERITY, runtimeTestAction.getName(), runtimeTestAction.getDescription(), String.valueOf( runtimeTestAction.getPayload() ) ) ); return; } switch( severity ) { case DEBUG: logger.debug( getMessage( runtimeTestAction ) ); break; case SKIPPED: case WARNING: logger.warn( getMessage( runtimeTestAction ) ); break; case ERROR: case FATAL: logger.error( getMessage( runtimeTestAction ) ); break; default: logger.info( getMessage( runtimeTestAction ) ); break; } } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/action/impl/RuntimeTestActionImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action.impl; import org.pentaho.runtime.test.action.RuntimeTestAction; import org.pentaho.runtime.test.action.RuntimeTestActionPayload; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; /** * Created by bryan on 9/9/15. */ public class RuntimeTestActionImpl implements RuntimeTestAction { private final String name; private final String description; private final RuntimeTestEntrySeverity severity; private final RuntimeTestActionPayload payload; public RuntimeTestActionImpl( String name, String description, RuntimeTestEntrySeverity severity, RuntimeTestActionPayload payload ) { this.name = name; this.description = description; this.severity = severity; this.payload = payload; } @Override public String getName() { return name; } @Override public String getDescription() { return description; } @Override public RuntimeTestEntrySeverity getSeverity() { return severity; } @Override public RuntimeTestActionPayload getPayload() { return payload; } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/action/impl/RuntimeTestActionServiceImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action.impl; import org.pentaho.runtime.test.action.RuntimeTestAction; import org.pentaho.runtime.test.action.RuntimeTestActionHandler; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.runtime.test.i18n.impl.BaseMessagesMessageGetterFactoryImpl; import java.util.ArrayList; import java.util.List; /** * Created by bryan on 9/8/15. */ public class RuntimeTestActionServiceImpl implements RuntimeTestActionService { private final List runtimeTestActionHandlers; private final RuntimeTestActionHandler defaultHandler; private static RuntimeTestActionServiceImpl instance; /** * Creates the RuntimeTestActionService * * @param runtimeTestActionHandlers list of handlers * @param defaultHandler fallback handler (MUST BE ABLE TO HANDLE ANY PAYLOAD) */ public RuntimeTestActionServiceImpl( List runtimeTestActionHandlers, RuntimeTestActionHandler defaultHandler ) { this.runtimeTestActionHandlers = runtimeTestActionHandlers; this.defaultHandler = defaultHandler; } public static RuntimeTestActionServiceImpl getInstance() { if ( instance == null ){ LoggingRuntimeTestActionHandlerImpl loggingRuntimeTestActionHandler = LoggingRuntimeTestActionHandlerImpl.getInstance(); List runtimeTestActionHandlers = new ArrayList<>(); runtimeTestActionHandlers.add( loggingRuntimeTestActionHandler ); instance = new RuntimeTestActionServiceImpl( runtimeTestActionHandlers, loggingRuntimeTestActionHandler ); } return instance; } @Override public void handle( RuntimeTestAction runtimeTestAction ) { for ( RuntimeTestActionHandler runtimeTestActionHandler : runtimeTestActionHandlers ) { if ( runtimeTestActionHandler.canHandle( runtimeTestAction ) ) { runtimeTestActionHandler.handle( runtimeTestAction ); return; } } defaultHandler.handle( runtimeTestAction ); } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/i18n/MessageGetter.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.i18n; /** * Created by bryan on 8/21/15. */ public interface MessageGetter { String getMessage( String key, String... parameters ); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/i18n/MessageGetterFactory.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.i18n; /** * Created by bryan on 8/21/15. */ public interface MessageGetterFactory { MessageGetter create( Class PKG ); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/i18n/impl/BaseMessagesMessageGetterFactoryImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.i18n.impl; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; /** * Created by bryan on 8/21/15. */ public class BaseMessagesMessageGetterFactoryImpl implements MessageGetterFactory { private static BaseMessagesMessageGetterFactoryImpl instance = new BaseMessagesMessageGetterFactoryImpl(); @Override public MessageGetter create( Class PKG ) { return new BaseMessagesMessageGetterImpl( PKG ); } public static BaseMessagesMessageGetterFactoryImpl getInstance() { return instance; } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/i18n/impl/BaseMessagesMessageGetterImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.i18n.impl; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.runtime.test.i18n.MessageGetter; /** * Created by bryan on 8/21/15. */ public class BaseMessagesMessageGetterImpl implements MessageGetter { private final Class PKG; public BaseMessagesMessageGetterImpl( Class PKG ) { this.PKG = PKG; } @Override public String getMessage( String key, String... parameters ) { if ( parameters != null && parameters.length > 0 ) { return BaseMessages.getString( PKG, key, parameters ); } else { return BaseMessages.getString( PKG, key ); } } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/impl/RuntimeTestComparator.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.impl; import org.pentaho.runtime.test.RuntimeTest; import java.util.Comparator; import java.util.Map; /** * Created by bryan on 8/18/15. */ public class RuntimeTestComparator implements Comparator { private final Map orderedModules; public RuntimeTestComparator( Map orderedModules ) { this.orderedModules = orderedModules; } private Integer nullSafeCompare( Object first, Object second ) { if ( first == null ) { if ( second == null ) { return null; } else { return 1; } } if ( second == null ) { return -1; } if ( first.equals( second ) ) { return 0; } return null; } private int compareModuleNames( String o1Module, String o2Module ) { Integer result = nullSafeCompare( o1Module, o2Module ); if ( result != null ) { return result; } Integer o1OrderNum = orderedModules.get( o1Module ); Integer o2OrderNum = orderedModules.get( o2Module ); result = nullSafeCompare( o1OrderNum, o2OrderNum ); if ( result != null ) { return result; } return o1Module.compareTo( o2Module ); } @Override public int compare( RuntimeTest o1, RuntimeTest o2 ) { Integer result = compareModuleNames( o1.getModule(), o2.getModule() ); if ( result != 0 ) { return result; } String o1Id = o1.getId(); String o2Id = o2.getId(); result = nullSafeCompare( o1Id, o2Id ); if ( result == null ) { result = o1Id.compareTo( o2Id ); } return result; } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/impl/RuntimeTestRunner.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.impl; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.RuntimeTestProgressCallback; import org.pentaho.runtime.test.module.RuntimeTestModuleResults; import org.pentaho.runtime.test.module.impl.RuntimeTestModuleResultsImpl; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResult; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.RuntimeTestDelegateWithMoreDependencies; import org.pentaho.runtime.test.test.impl.RuntimeTestResultEntryImpl; import org.pentaho.runtime.test.test.impl.RuntimeTestResultImpl; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; /** * Created by bryan on 8/11/15. */ public class RuntimeTestRunner { private static final Class PKG = RuntimeTestRunner.class; private final Set remainingTests; private final Object objectUnderTest; private final RuntimeTestProgressCallback runtimeTestProgressCallback; private final ExecutorService executorService; private final Set satisfiedDependencies; private final Set failedDependencies; private final List runtimeModuleList; private final Map> stringRuntimeTestModuleToTestIdMap; private final Map runtimeTestResultMap; private final Set outstandingTestIds; private final Set runningTestIds; private final int numberOfTests; @SuppressWarnings( "unchecked" ) public RuntimeTestRunner( Collection runtimeTests, Object objectUnderTest, RuntimeTestProgressCallback runtimeTestProgressCallback, ExecutorService executorService ) { this.objectUnderTest = objectUnderTest; runtimeModuleList = new ArrayList<>(); stringRuntimeTestModuleToTestIdMap = new HashMap<>(); runtimeTestResultMap = new HashMap<>(); outstandingTestIds = new HashSet<>(); runningTestIds = new HashSet<>(); Set initTests = new HashSet<>(); Set initTestIds = new HashSet<>(); Set nonInitTests = new HashSet<>(); int numberOfTests = 0; for ( RuntimeTest runtimeTest : runtimeTests ) { if ( runtimeTest.accepts( objectUnderTest ) ) { numberOfTests++; String runtimeTestModule = runtimeTest.getModule(); List runtimeIdsForModule = stringRuntimeTestModuleToTestIdMap.get( runtimeTestModule ); if ( runtimeIdsForModule == null ) { runtimeModuleList.add( runtimeTestModule ); runtimeIdsForModule = new ArrayList<>(); stringRuntimeTestModuleToTestIdMap.put( runtimeTestModule, runtimeIdsForModule ); } String runtimeTestId = runtimeTest.getId(); runtimeIdsForModule.add( runtimeTestId ); if ( runtimeTest.isConfigInitTest() ) { initTests.add( runtimeTest ); initTestIds.add( runtimeTestId ); } else { nonInitTests.add( runtimeTest ); } } } this.numberOfTests = numberOfTests; this.remainingTests = new HashSet<>( initTests ); for ( RuntimeTest nonInitTest : nonInitTests ) { remainingTests.add( new RuntimeTestDelegateWithMoreDependencies( nonInitTest, initTestIds ) ); } for ( RuntimeTest remainingTest : remainingTests ) { String remainingTestId = remainingTest.getId(); runtimeTestResultMap .put( remainingTestId, new RuntimeTestResultImpl( remainingTest, false, new RuntimeTestResultSummaryImpl(), 0L ) ); outstandingTestIds.add( remainingTestId ); } this.satisfiedDependencies = new HashSet<>(); this.failedDependencies = new HashSet<>(); this.runtimeTestProgressCallback = runtimeTestProgressCallback; this.executorService = executorService; } private void markSkipped( RuntimeTest runtimeTest ) { Set relevantFailed = new HashSet<>( failedDependencies ); relevantFailed.retainAll( runtimeTest.getDependencies() ); // Get one of the dependencies' names for display String failedDependencyName = "a prerequisite"; if ( !relevantFailed.isEmpty() ) { String failedDependencyId = relevantFailed.iterator().next(); RuntimeTestResult runtimeTestResult = runtimeTestResultMap.get( failedDependencyId ); if ( runtimeTestResult != null ) { failedDependencyName = runtimeTestResult.getRuntimeTest().getName(); } } // We had a dependency fail so we need to skip String runtimeTestId = runtimeTest.getId(); failedDependencies.add( runtimeTestId ); outstandingTestIds.remove( runtimeTestId ); runningTestIds.remove( runtimeTestId ); runtimeTestResultMap.put( runtimeTestId, new RuntimeTestResultImpl( runtimeTest, true, new RuntimeTestResultSummaryImpl( new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.SKIPPED, BaseMessages.getString( PKG, "RuntimeTestRunner.Skipped.Desc", failedDependencyName ), BaseMessages.getString( PKG, "RuntimeTestRunner.Skipped.Message", runtimeTest.getName(), relevantFailed ), (Throwable) null ) ), 0L ) ); } private void callbackState() { callbackState( false ); } private void callbackState( boolean done ) { if ( runtimeTestProgressCallback != null ) { List moduleResults = new ArrayList<>( runtimeModuleList.size() ); for ( String runtimeModule : runtimeModuleList ) { List runtimeTestResults = new ArrayList<>(); Set runningTests = new HashSet<>(); HashSet outstandingTests = new HashSet<>(); for ( String testId : stringRuntimeTestModuleToTestIdMap.get( runtimeModule ) ) { RuntimeTestResult runtimeTestResult = runtimeTestResultMap.get( testId ); runtimeTestResults.add( runtimeTestResult ); if ( runningTestIds.contains( testId ) ) { runningTests.add( runtimeTestResult.getRuntimeTest() ); } else if ( outstandingTestIds.contains( testId ) ) { outstandingTests.add( runtimeTestResult.getRuntimeTest() ); } } moduleResults .add( new RuntimeTestModuleResultsImpl( runtimeModule, runtimeTestResults, runningTests, outstandingTests ) ); } int testsRunning = runningTestIds.size(); int testsOutstanding = outstandingTestIds.size(); int testsDone = numberOfTests - testsOutstanding - testsRunning; runtimeTestProgressCallback.onProgress( new RuntimeTestStatusImpl( Collections.unmodifiableList( moduleResults ), testsDone, testsRunning, testsOutstanding, done ) ); } } private void runTest( RuntimeTest runtimeTest ) { String eligibleTestId = runtimeTest.getId(); RuntimeTestResultSummary runtimeTestResultSummary; long before = System.currentTimeMillis(); RuntimeTestEntrySeverity overallSeverity; try { runtimeTestResultSummary = runtimeTest.runTest( objectUnderTest ); overallSeverity = runtimeTestResultSummary.getOverallStatusEntry().getSeverity(); } catch ( Throwable e ) { overallSeverity = RuntimeTestEntrySeverity.FATAL; runtimeTestResultSummary = new RuntimeTestResultSummaryImpl( new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.FATAL, BaseMessages.getString( PKG, "RuntimeTestRunner.Error.Desc", runtimeTest.getName() ), e.getMessage(), e ) ); } long after = System.currentTimeMillis(); RuntimeTestResult runtimeTestResult = new RuntimeTestResultImpl( runtimeTest, true, runtimeTestResultSummary, after - before ); synchronized ( this ) { if ( overallSeverity == RuntimeTestEntrySeverity.ERROR || overallSeverity == RuntimeTestEntrySeverity.FATAL ) { failedDependencies.add( eligibleTestId ); } else { satisfiedDependencies.add( eligibleTestId ); } runtimeTestResultMap.put( eligibleTestId, runtimeTestResult ); runningTestIds.remove( eligibleTestId ); callbackState(); notifyAll(); } } public synchronized void runTests() { callbackState(); while ( remainingTests.size() > 0 || runningTestIds.size() > 0 ) { Set eligibleTests = new HashSet<>(); Set skippingTests = new HashSet<>(); Set possibleToSatisfyIds = new HashSet<>( satisfiedDependencies ); for ( RuntimeTest remainingTest : remainingTests ) { possibleToSatisfyIds.add( remainingTest.getId() ); } possibleToSatisfyIds.addAll( outstandingTestIds ); possibleToSatisfyIds.addAll( runningTestIds ); for ( RuntimeTest remainingTest : remainingTests ) { Set remainingTestDependencies = remainingTest.getDependencies(); if ( satisfiedDependencies.containsAll( remainingTestDependencies ) ) { eligibleTests.add( remainingTest ); } else if ( !Collections.disjoint( remainingTestDependencies, failedDependencies ) || !possibleToSatisfyIds .containsAll( remainingTestDependencies ) ) { skippingTests.add( remainingTest ); markSkipped( remainingTest ); } } remainingTests.removeAll( eligibleTests ); remainingTests.removeAll( skippingTests ); for ( RuntimeTest eligibleTest : eligibleTests ) { String eligibleTestId = eligibleTest.getId(); outstandingTestIds.remove( eligibleTestId ); runningTestIds.add( eligibleTestId ); } final int wasRunning = runningTestIds.size(); for ( final RuntimeTest eligibleTest : eligibleTests ) { executorService.submit( new Runnable() { @Override public void run() { runTest( eligibleTest ); } } ); } // If we skipped test(s) state has changed and we should rerun immediately, otherwise we can wait until one // finishes if ( skippingTests.size() == 0 ) { if ( wasRunning > 0 ) { while ( wasRunning == runningTestIds.size() ) { try { // Wait until a test finishes wait(); } catch ( InterruptedException e ) { // Ignore } } } } else { callbackState(); } } callbackState( true ); } public static class Factory { public RuntimeTestRunner create( Collection runtimeTests, Object objectUnderTest, RuntimeTestProgressCallback runtimeTestProgressCallback, ExecutorService executorService ) { return new RuntimeTestRunner( runtimeTests, objectUnderTest, runtimeTestProgressCallback, executorService ); } } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/impl/RuntimeTestStatusImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.impl; import org.pentaho.runtime.test.RuntimeTestStatus; import org.pentaho.runtime.test.module.RuntimeTestModuleResults; import java.util.List; /** * Created by bryan on 8/18/15. */ public class RuntimeTestStatusImpl implements RuntimeTestStatus { private final List runtimeTestModuleResults; private final int testsDone; private final int testsRunning; private final int testsOutstanding; private final boolean done; public RuntimeTestStatusImpl( List runtimeTestModuleResults, int testsDone, int testsRunning, int testsOutstanding, boolean done ) { this.runtimeTestModuleResults = runtimeTestModuleResults; this.testsDone = testsDone; this.testsRunning = testsRunning; this.testsOutstanding = testsOutstanding; this.done = done; } @Override public List getModuleResults() { return runtimeTestModuleResults; } @Override public int getTestsDone() { return testsDone; } @Override public int getTestsRunning() { return testsRunning; } @Override public int getTestsOutstanding() { return testsOutstanding; } @Override public boolean isDone() { return done; } //OperatorWrap isn't helpful for autogenerated methods //CHECKSTYLE:OperatorWrap:OFF @Override public String toString() { return "RuntimeTestStatusImpl{" + "runtimeTestModuleResults=" + runtimeTestModuleResults + ", testsDone=" + testsDone + ", testsRunning=" + testsRunning + ", testsOutstanding=" + testsOutstanding + ", done=" + done + '}'; } //CHECKSTYLE:OperatorWrap:ON } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/impl/RuntimeTesterImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.impl; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.RuntimeTestProgressCallback; import org.pentaho.runtime.test.RuntimeTester; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * Created by bryan on 8/12/15. */ public class RuntimeTesterImpl implements RuntimeTester { private final List runtimeTests; private final ExecutorService executorService; private final RuntimeTestRunner.Factory runtimeTestRunnerFactory; private RuntimeTestComparator runtimeTestComparator; private static RuntimeTesterImpl instance; public RuntimeTesterImpl( List runtimeTests, ExecutorService executorService, String orderedModulesString ) { this( runtimeTests, executorService, orderedModulesString, new RuntimeTestRunner.Factory() ); } public static RuntimeTester getInstance(){ if ( instance == null ) { List runtimeTests = new ArrayList<>(); instance = new RuntimeTesterImpl( runtimeTests, Executors.newCachedThreadPool(), "Hadoop Configuration,Hadoop File System,Map Reduce,Oozie,Zookeeper" ); } return instance; } public RuntimeTesterImpl( List runtimeTests, ExecutorService executorService, String orderedModulesString, RuntimeTestRunner.Factory runtimeTestRunnerFactory ) { this.runtimeTests = runtimeTests; this.executorService = executorService; this.runtimeTestRunnerFactory = runtimeTestRunnerFactory; HashMap orderedModules = new HashMap<>(); String[] split = orderedModulesString.split( "," ); for ( int module = 0; module < split.length; module++ ) { orderedModules.put( split[ module ].trim(), module ); } runtimeTestComparator = new RuntimeTestComparator( orderedModules ); } @Override public void runtimeTest( final Object objectUnderTest, final RuntimeTestProgressCallback runtimeTestProgressCallback ) { final List runtimeTests = new ArrayList<>( this.runtimeTests ); Collections.sort( runtimeTests, runtimeTestComparator ); executorService.submit( new Runnable() { @Override public void run() { runtimeTestRunnerFactory.create( runtimeTests, objectUnderTest, runtimeTestProgressCallback, executorService ) .runTests(); } } ); } public void addRuntimeTest( RuntimeTest test ) { this.runtimeTests.add( test ); } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/module/RuntimeTestModuleResults.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.module; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResult; import java.util.List; import java.util.Set; /** * Created by bryan on 8/11/15. */ public interface RuntimeTestModuleResults { String getName(); List getRuntimeTestResults(); Set getRunningTests(); Set getOutstandingTests(); RuntimeTestEntrySeverity getMaxSeverity(); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/module/impl/RuntimeTestModuleResultsImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.module.impl; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.module.RuntimeTestModuleResults; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResult; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Created by bryan on 8/11/15. */ public class RuntimeTestModuleResultsImpl implements RuntimeTestModuleResults { private final String name; private final List runtimeTestResults; private final Set runningTests; private final Set outstandingTests; private final RuntimeTestEntrySeverity maxSeverity; public RuntimeTestModuleResultsImpl( String name, List runtimeTestResults, Set runningTests, Set outstandingTests ) { this.name = name; this.runningTests = Collections.unmodifiableSet( new HashSet<>( runningTests ) ); this.outstandingTests = Collections.unmodifiableSet( new HashSet<>( outstandingTests ) ); this.runtimeTestResults = Collections.unmodifiableList( new ArrayList<>( runtimeTestResults ) ); this.maxSeverity = RuntimeTestEntrySeverity.maxSeverityResult( runtimeTestResults ); } @Override public String getName() { return name; } @Override public List getRuntimeTestResults() { return runtimeTestResults; } @Override public RuntimeTestEntrySeverity getMaxSeverity() { return maxSeverity; } @Override public Set getRunningTests() { return runningTests; } @Override public Set getOutstandingTests() { return outstandingTests; } //OperatorWrap isn't helpful for autogenerated methods //CHECKSTYLE:OperatorWrap:OFF @Override public String toString() { return "RuntimeTestModuleResultsImpl{" + "getName='" + name + '\'' + ", runtimeTestResults=" + runtimeTestResults + ", runningTests=" + runningTests + ", outstandingTests=" + outstandingTests + ", maxSeverity=" + maxSeverity + '}'; } //CHECKSTYLE:OperatorWrap:ON } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/network/ConnectivityTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.network; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; /** * Created by bryan on 8/24/15. */ public interface ConnectivityTest { RuntimeTestResultEntry runTest(); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/network/ConnectivityTestFactory.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.network; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; /** * Created by bryan on 8/24/15. */ public interface ConnectivityTestFactory { ConnectivityTest create( MessageGetterFactory messageGetterFactory, String hostname, String port, boolean haPossible ); ConnectivityTest create( MessageGetterFactory messageGetterFactory, String hostname, String port, boolean haPossible, RuntimeTestEntrySeverity severityOfFailures ); ConnectivityTest create( MessageGetterFactory messageGetterFactory, String url, String testPath, String user, String password ); ConnectivityTest create( MessageGetterFactory messageGetterFactory, String url, String testPath, String user, String password, RuntimeTestEntrySeverity severityOfFailures ); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/network/impl/ConnectivityTestFactoryImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.network.impl; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTest; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import java.net.URI; /** * Created by bryan on 8/24/15. */ public class ConnectivityTestFactoryImpl implements ConnectivityTestFactory { @Override public ConnectivityTest create( MessageGetterFactory messageGetterFactory, String hostname, String port, boolean haPossible ) { return create( messageGetterFactory, hostname, port, haPossible, RuntimeTestEntrySeverity.FATAL ); } @Override public ConnectivityTest create( MessageGetterFactory messageGetterFactory, String hostname, String port, boolean haPossible, RuntimeTestEntrySeverity severityOfFailures ) { return new ConnectivityTestImpl( messageGetterFactory, hostname, port, haPossible, severityOfFailures ); } @Override public ConnectivityTest create( MessageGetterFactory messageGetterFactory, String url, String testPath, String user, String password ) { return new GatewayConnectivityTestImpl( messageGetterFactory, URI.create( url ), testPath, user, password, RuntimeTestEntrySeverity.FATAL ); } @Override public ConnectivityTest create( MessageGetterFactory messageGetterFactory, String url, String testPath, String user, String password, RuntimeTestEntrySeverity severityOfFailures ) { return new GatewayConnectivityTestImpl( messageGetterFactory, URI.create( url ), testPath, user, password, severityOfFailures ); } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/network/impl/ConnectivityTestImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.network.impl; import org.pentaho.di.core.Const; import org.pentaho.di.core.variables.Variables; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTest; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.test.impl.RuntimeTestResultEntryImpl; import java.io.IOException; import java.net.InetSocketAddress; import java.net.InetAddress; import java.net.Proxy; import java.net.Socket; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; /** * Created by bryan on 8/14/15. */ public class ConnectivityTestImpl implements ConnectivityTest { public static final String CONNECT_TEST_HOST_BLANK_DESC = "ConnectTest.HostBlank.Desc"; public static final String CONNECT_TEST_HOST_BLANK_MESSAGE = "ConnectTest.HostBlank.Message"; public static final String CONNECT_TEST_HA_DESC = "ConnectTest.HA.Desc"; public static final String CONNECT_TEST_HA_MESSAGE = "ConnectTest.HA.Message"; public static final String CONNECT_TEST_PORT_BLANK_DESC = "ConnectTest.PortBlank.Desc"; public static final String CONNECT_TEST_PORT_BLANK_MESSAGE = "ConnectTest.PortBlank.Message"; public static final String CONNECT_TEST_CONNECT_SUCCESS_DESC = "ConnectTest.ConnectSuccess.Desc"; public static final String CONNECT_TEST_CONNECT_SUCCESS_MESSAGE = "ConnectTest.ConnectSuccess.Message"; public static final String CONNECT_TEST_CONNECT_FAIL_DESC = "ConnectTest.ConnectFail.Desc"; public static final String CONNECT_TEST_CONNECT_FAIL_MESSAGE = "ConnectTest.ConnectFail.Message"; public static final String CONNECT_TEST_UNKNOWN_HOSTNAME_DESC = "ConnectTest.UnknownHostname.Desc"; public static final String CONNECT_TEST_UNKNOWN_HOSTNAME_MESSAGE = "ConnectTest.UnknownHostname.Message"; public static final String CONNECT_TEST_NETWORK_ERROR_DESC = "ConnectTest.NetworkError.Desc"; public static final String CONNECT_TEST_NETWORK_ERROR_MESSAGE = "ConnectTest.NetworkError.Message"; public static final String CONNECT_TEST_PORT_NUMBER_FORMAT_DESC = "ConnectTest.PortNumberFormat.Desc"; public static final String CONNECT_TEST_PORT_NUMBER_FORMAT_MESSAGE = "ConnectTest.PortNumberFormat.Message"; public static final String CONNECT_TEST_UNREACHABLE_DESC = "ConnectTest.Unreachable.Desc"; public static final String CONNECT_TEST_UNREACHABLE_MESSAGE = "ConnectTest.Unreachable.Message"; private static final Class PKG = ConnectivityTestImpl.class; protected final MessageGetter messageGetter; protected final String hostname; protected final String port; private final boolean haPossible; protected final RuntimeTestEntrySeverity severityOfFalures; private final SocketFactory socketFactory; protected final InetAddressFactory inetAddressFactory; public ConnectivityTestImpl( MessageGetterFactory messageGetterFactory, String hostname, String port, boolean haPossible ) { this( messageGetterFactory, hostname, port, haPossible, RuntimeTestEntrySeverity.FATAL ); } public ConnectivityTestImpl( MessageGetterFactory messageGetterFactory, String hostname, String port, boolean haPossible, RuntimeTestEntrySeverity severityOfFailures ) { this( messageGetterFactory, hostname, port, haPossible, severityOfFailures, new SocketFactory(), new InetAddressFactory() ); } public ConnectivityTestImpl( MessageGetterFactory messageGetterFactory, String hostname, String port, boolean haPossible, RuntimeTestEntrySeverity severityOfFailures, SocketFactory socketFactory, InetAddressFactory inetAddressFactory ) { this.messageGetter = messageGetterFactory.create( PKG ); // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); this.hostname = variables.environmentSubstitute( hostname ); this.port = variables.environmentSubstitute( port ); this.haPossible = haPossible; this.severityOfFalures = severityOfFailures; this.socketFactory = socketFactory; this.inetAddressFactory = inetAddressFactory; } @Override public RuntimeTestResultEntry runTest() { List runtimeTestResultEntries = new ArrayList<>(); if ( Const.isEmpty( hostname ) ) { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( CONNECT_TEST_HOST_BLANK_DESC ), messageGetter.getMessage( CONNECT_TEST_HOST_BLANK_MESSAGE ) ); } else if ( Const.isEmpty( port ) ) { if ( haPossible ) { return new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( CONNECT_TEST_HA_DESC ), messageGetter.getMessage( CONNECT_TEST_HA_MESSAGE, hostname ) ); } else { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( CONNECT_TEST_PORT_BLANK_DESC ), messageGetter.getMessage( CONNECT_TEST_PORT_BLANK_MESSAGE ) ); } } else { Socket socket = null; try { if( !isSocks5ProxyServer() ) { if ( inetAddressFactory.create( hostname ).isReachable( 10 * 1000 ) ) { try { socket = socketFactory.create( hostname, Integer.parseInt( port ) ); return new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( CONNECT_TEST_CONNECT_SUCCESS_DESC ), messageGetter.getMessage( CONNECT_TEST_CONNECT_SUCCESS_MESSAGE, hostname, port ) ); } catch ( IOException e ) { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( CONNECT_TEST_CONNECT_FAIL_DESC ), messageGetter.getMessage( CONNECT_TEST_CONNECT_FAIL_MESSAGE, hostname, port ), e ); } finally { if ( socket != null ) { try { socket.close(); } catch ( IOException e ) { // Ignore } } } } else { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( CONNECT_TEST_UNREACHABLE_DESC, hostname ), messageGetter.getMessage( CONNECT_TEST_UNREACHABLE_MESSAGE, hostname ) ); } } else { String proxyHost = System.getProperty( "socksProxyHost" ); int proxyPort = Integer.parseInt( System.getProperty( "socksProxyPort" ) ); SocketFactory proxySocketFactory = new SocketFactory( proxyHost, proxyPort ); try { socket = proxySocketFactory.create( hostname, Integer.parseInt( port ) ); return new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( CONNECT_TEST_CONNECT_SUCCESS_DESC ), messageGetter.getMessage( CONNECT_TEST_CONNECT_SUCCESS_MESSAGE, hostname, port ) ); } catch ( IOException e ) { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( CONNECT_TEST_CONNECT_FAIL_DESC ), messageGetter.getMessage( CONNECT_TEST_CONNECT_FAIL_MESSAGE, hostname, port ), e); } finally { if ( socket != null ) { try { socket.close(); } catch ( IOException e ) { // Ignore } } } } } catch ( UnknownHostException e ) { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( CONNECT_TEST_UNKNOWN_HOSTNAME_DESC ), messageGetter.getMessage( CONNECT_TEST_UNKNOWN_HOSTNAME_MESSAGE, hostname ), e ); } catch ( IOException e ) { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( CONNECT_TEST_NETWORK_ERROR_DESC ), messageGetter.getMessage( CONNECT_TEST_NETWORK_ERROR_MESSAGE, hostname, port ), e ); } catch ( NumberFormatException e ) { return new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( CONNECT_TEST_PORT_NUMBER_FORMAT_DESC ), messageGetter.getMessage( CONNECT_TEST_PORT_NUMBER_FORMAT_MESSAGE, port ), e ); } } } private boolean isSocks5ProxyServer() { String proxyHost = System.getProperty( "socksProxyHost" ); String proxyPort = System.getProperty( "socksProxyPort" ); return proxyHost != null && !proxyHost.isEmpty() && proxyPort != null && !proxyPort.isEmpty(); } /** * Pulled out class to enable mock injection in tests */ public static class SocketFactory { private final String proxyHost; private final int proxyPort; public SocketFactory() { this.proxyHost = null; this.proxyPort = -1; } public SocketFactory( String proxyHost, int proxyPort ) { this.proxyHost = proxyHost; this.proxyPort = proxyPort; } public Socket create( String hostname, int port ) throws IOException { if ( proxyHost != null && proxyPort > 0 ) { Proxy proxy = new Proxy( Proxy.Type.SOCKS, new InetSocketAddress( proxyHost, proxyPort ) ); Socket socket = new Socket( proxy); socket.connect( new InetSocketAddress( hostname, port ), 10000 ); return socket; } else { return new Socket( hostname, port ); } } } /** * Pulled out class to enable mock injection in tests */ public static class InetAddressFactory { public InetAddress create( String hostname ) throws UnknownHostException { return InetAddress.getByName( hostname ); } } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/network/impl/GatewayConnectivityTestImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.network.impl; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang.StringUtils; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.protocol.HttpClientContext; import org.pentaho.di.core.util.HttpClientManager; import org.pentaho.di.core.util.HttpClientUtil; import org.pentaho.di.core.variables.Variables; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.test.impl.RuntimeTestResultEntryImpl; import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLException; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import java.io.IOException; import java.net.URI; import java.net.UnknownHostException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; /** * Created by dstepanov on 26/04/17. */ public class GatewayConnectivityTestImpl extends ConnectivityTestImpl { public static final String GATEWAY_CONNECT_TEST_CONNECT_SUCCESS_DESC = "GatewayConnectTest.Success.Desc"; public static final String GATEWAY_CONNECT_TEST_CONNECT_SUCCESS_MESSAGE = "GatewayConnectTest.Success.Message"; public static final String GATEWAY_CONNECT_TEST_CONNECT_UNKNOWN_RETURN_CODE_DESC = "GatewayConnectTest.UnknownReturnCode.Desc"; public static final String GATEWAY_CONNECT_TEST_CONNECT_UNKNOWN_RETURN_CODE_MESSAGE = "GatewayConnectTest.UnknownReturnCode.Message"; public static final String GATEWAY_CONNECT_TEST_SERVICE_NOT_FOUND_DESC = "GatewayConnectTest.ServiceNotFound.Desc"; public static final String GATEWAY_CONNECT_TEST_SERVICE_NOT_FOUND_MESSAGE = "GatewayConnectTest.ServiceNotFound.Message"; public static final String GATEWAY_CONNECT_TEST_FORBIDDEN_DESC = "GatewayConnectTest.Forbidden.Desc"; public static final String GATEWAY_CONNECT_TEST_FORBIDDEN_MESSAGE = "GatewayConnectTest.Forbidden.Message"; public static final String GATEWAY_CONNECT_TLSCONTEXT_DESC = "GatewayConnectTest.TLSContext.Desc"; public static final String GATEWAY_CONNECT_SSLEXCEPTION_MESSAGE = "GatewayConnectTest.SSLException.Message"; public static final String GATEWAY_CONNECT_SSLEXCEPTION_DESC = "GatewayConnectTest.SSLException.Desc"; public static final String GATEWAY_CONNECT_TLSCONTEXT_MESSAGE = "GatewayConnectTest.TLSContext.Message"; public static final String GATEWAY_CONNECT_TEST_UNAUTHORIZED_DESC = "GatewayConnectTest.Unauthorized.Desc"; public static final String GATEWAY_CONNECT_TEST_UNAUTHORIZED_MESSAGE = "GatewayConnectTest.Unauthorized.Message"; public static final String GATEWAY_CONNECT_TLSCONTEXTINIT_DESC = "GatewayConnectTest.TLSContextInit.Desc"; public static final String GATEWAY_CONNECT_TLSCONTEXTINIT_MESSAGE = "GatewayConnectTest.TLSContextInit.Message"; public static final String GATEWAY_CONNECT_EXECUTION_FAILED_DESC = "GatewayConnectTest.ExecutionFailed.Desc"; public static final String GATEWAY_CONNECT_EXECUTION_FAILED_MESSAGE = "GatewayConnectTest.ExecutionFailed.Message"; private static final Class PKG = GatewayConnectivityTestImpl.class; private final URI uri; private final String path; private final String user; private final String password; private final Variables variables; private HttpClientManager httpClientManager = HttpClientManager.getInstance(); public GatewayConnectivityTestImpl( MessageGetterFactory messageGetterFactory, URI uri, String testPath, String user, String password, RuntimeTestEntrySeverity severity ) { super( messageGetterFactory, uri.getHost(), Integer.toString( uri.getPort() ), true, severity ); // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: variables = new Variables(); variables.initializeVariablesFrom( null ); this.path = variables.environmentSubstitute( testPath ); this.password = variables.environmentSubstitute( password ); this.user = variables.environmentSubstitute( user ); this.uri = uri.resolve( uri.getPath() + path ); } @Override public RuntimeTestResultEntry runTest() { if ( StringUtils.isBlank( hostname ) ) { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( CONNECT_TEST_HOST_BLANK_DESC ), messageGetter.getMessage( CONNECT_TEST_HOST_BLANK_MESSAGE ) ); } else { try { Integer portInt = Integer.parseInt( port ); // Ignore ssl certificate issues if KETTLE_KNOX_IGNORE_SSL = true if ( variables.getBooleanValueOfVariable( "${KETTLE_KNOX_IGNORE_SSL}", false ) ) { SSLContext ctx = getTlsContext(); initContextWithTrustAll( ctx ); SSLContext.setDefault( ctx ); } String userString = ""; HttpClientContext context = null; HttpGet method = new HttpGet( uri.toString() ); HttpClient httpClient; if ( StringUtils.isNotBlank( user ) ) { userString = user; httpClient = getHttpClient( user, password ); context = HttpClientUtil.createPreemptiveBasicAuthentication( uri.getHost(), portInt, user, password ); } else { httpClient = getHttpClient(); } HttpResponse httpResponse = context != null ? httpClient.execute( method, context ) : httpClient.execute( method ); Integer returnCode = httpResponse.getStatusLine().getStatusCode(); switch ( returnCode ) { case 200: { return new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( GATEWAY_CONNECT_TEST_CONNECT_SUCCESS_DESC ), messageGetter.getMessage( GATEWAY_CONNECT_TEST_CONNECT_SUCCESS_MESSAGE, uri.toString() ) ); } case 404: { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( GATEWAY_CONNECT_TEST_SERVICE_NOT_FOUND_DESC ), messageGetter.getMessage( GATEWAY_CONNECT_TEST_SERVICE_NOT_FOUND_MESSAGE, uri.toString() ) ); } case 403: { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( GATEWAY_CONNECT_TEST_FORBIDDEN_DESC ), messageGetter.getMessage( GATEWAY_CONNECT_TEST_FORBIDDEN_MESSAGE, uri.toString(), userString ) ); } case 401: { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( GATEWAY_CONNECT_TEST_UNAUTHORIZED_DESC ), messageGetter.getMessage( GATEWAY_CONNECT_TEST_UNAUTHORIZED_MESSAGE, uri.toString(), userString ) ); } default: { return new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( GATEWAY_CONNECT_TEST_CONNECT_UNKNOWN_RETURN_CODE_DESC ), messageGetter.getMessage( GATEWAY_CONNECT_TEST_CONNECT_UNKNOWN_RETURN_CODE_MESSAGE, userString, returnCode.toString(), uri.toString() ) ); } } } catch ( NoSuchAlgorithmException e ) { return new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( GATEWAY_CONNECT_TLSCONTEXT_DESC ), messageGetter.getMessage( GATEWAY_CONNECT_TLSCONTEXT_MESSAGE ), e ); } catch ( SSLException e ) { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( GATEWAY_CONNECT_SSLEXCEPTION_DESC ), messageGetter.getMessage( GATEWAY_CONNECT_SSLEXCEPTION_MESSAGE, uri.toString(), e.getMessage() ), e ); } catch ( UnknownHostException e ) { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( CONNECT_TEST_UNKNOWN_HOSTNAME_DESC ), messageGetter.getMessage( CONNECT_TEST_UNKNOWN_HOSTNAME_MESSAGE, uri.getHost() ), e ); } catch ( KeyManagementException e ) { return new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( GATEWAY_CONNECT_TLSCONTEXTINIT_DESC ), messageGetter.getMessage( GATEWAY_CONNECT_TLSCONTEXTINIT_MESSAGE ), e ); } catch ( IOException e ) { return new RuntimeTestResultEntryImpl( severityOfFalures, messageGetter.getMessage( GATEWAY_CONNECT_EXECUTION_FAILED_DESC ), messageGetter.getMessage( GATEWAY_CONNECT_EXECUTION_FAILED_MESSAGE, uri.toString() ), e ); } catch ( NumberFormatException e ) { return new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( CONNECT_TEST_PORT_NUMBER_FORMAT_DESC ), messageGetter.getMessage( CONNECT_TEST_PORT_NUMBER_FORMAT_MESSAGE, port ), e ); } } } void initContextWithTrustAll( SSLContext ctx ) throws KeyManagementException { ctx.init( new KeyManager[ 0 ], new TrustManager[] { new X509TrustManager() { @Override public void checkClientTrusted( X509Certificate[] x509Certificates, String s ) throws CertificateException { } @Override public void checkServerTrusted( X509Certificate[] x509Certificates, String s ) throws CertificateException { } @Override public X509Certificate[] getAcceptedIssuers() { return null; } } }, new SecureRandom() ); } SSLContext getTlsContext() throws NoSuchAlgorithmException { return SSLContext.getInstance( "TLS" ); } @VisibleForTesting HttpClient getHttpClient() { return httpClientManager.createDefaultClient(); } @VisibleForTesting HttpClient getHttpClient( String user, String password ) { HttpClientManager.HttpClientBuilderFacade clientBuilder = httpClientManager.createBuilder(); clientBuilder.setCredentials( user, password ); return clientBuilder.build(); } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/result/RuntimeTestEntrySeverity.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.result; import java.util.Collection; /** * Created by bryan on 8/11/15. */ public enum RuntimeTestEntrySeverity { DEBUG, INFO, WARNING, SKIPPED, ERROR, FATAL; public static RuntimeTestEntrySeverity maxSeverityResult( Collection runtimeTestResults ) { RuntimeTestEntrySeverity maxSeverity = null; for ( RuntimeTestResult runtimeTestResult : runtimeTestResults ) { if ( runtimeTestResult.isDone() ) { RuntimeTestEntrySeverity severity = runtimeTestResult.getOverallStatusEntry().getSeverity(); if ( maxSeverity == null || ( severity != null && severity.ordinal() > maxSeverity.ordinal() ) ) { maxSeverity = severity; } } } return maxSeverity; } } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/result/RuntimeTestResult.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.result; import org.pentaho.runtime.test.RuntimeTest; /** * Created by bryan on 8/11/15. */ public interface RuntimeTestResult extends RuntimeTestResultSummary { RuntimeTest getRuntimeTest(); boolean isDone(); long getTimeTaken(); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/result/RuntimeTestResultEntry.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.result; import org.pentaho.runtime.test.action.RuntimeTestAction; /** * Created by bryan on 8/11/15. */ public interface RuntimeTestResultEntry { RuntimeTestEntrySeverity getSeverity(); String getDescription(); String getMessage(); Throwable getException(); RuntimeTestAction getAction(); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/result/RuntimeTestResultSummary.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.result; import java.util.List; /** * Created by bryan on 8/26/15. */ public interface RuntimeTestResultSummary { RuntimeTestResultEntry getOverallStatusEntry(); List getRuntimeTestResultEntries(); } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/result/org/pentaho/runtime/test/result/impl/RuntimeTestResultSummaryImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * Created by bryan on 8/26/15. */ public class RuntimeTestResultSummaryImpl implements RuntimeTestResultSummary { private final RuntimeTestResultEntry rollupTestResultEntry; private final List runtimeTestResultEntries; public RuntimeTestResultSummaryImpl() { this( null ); } @SuppressWarnings( "unchecked" ) public RuntimeTestResultSummaryImpl( RuntimeTestResultEntry rollupTestResultEntry ) { this( rollupTestResultEntry, Collections.EMPTY_LIST ); } public RuntimeTestResultSummaryImpl( RuntimeTestResultEntry rollupTestResultEntry, List runtimeTestResultEntries ) { this.rollupTestResultEntry = rollupTestResultEntry; this.runtimeTestResultEntries = Collections.unmodifiableList( new ArrayList<>( runtimeTestResultEntries ) ); } @Override public RuntimeTestResultEntry getOverallStatusEntry() { return rollupTestResultEntry; } @Override public List getRuntimeTestResultEntries() { return runtimeTestResultEntries; } //OperatorWrap isn't helpful for autogenerated methods //CHECKSTYLE:OperatorWrap:OFF @Override public String toString() { return "RuntimeTestResultSummaryImpl{" + "rollupTestResultEntry=" + rollupTestResultEntry + ", runtimeTestResultEntries=" + runtimeTestResultEntries + '}'; } //CHECKSTYLE:OperatorWrap:OFF } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/test/impl/BaseRuntimeTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.test.impl; import org.pentaho.runtime.test.RuntimeTest; import java.util.Collections; import java.util.HashSet; import java.util.Set; /** * Created by bryan on 8/11/15. */ public abstract class BaseRuntimeTest implements RuntimeTest { private final Class classUnderTest; private final String module; private final String id; private final String name; private final boolean configInitTest; private final Set dependencies; public BaseRuntimeTest( Class classUnderTest, String module, String id, String name, Set dependencies ) { this( classUnderTest, module, id, name, false, dependencies ); } public BaseRuntimeTest( Class classUnderTest, String module, String id, String name, boolean configInitTest, Set dependencies ) { this.classUnderTest = classUnderTest; this.module = module; this.id = id; this.name = name; this.configInitTest = configInitTest; this.dependencies = Collections.unmodifiableSet( new HashSet<>( dependencies ) ); } @Override public boolean accepts( Object objectUnderTest ) { return classUnderTest.isInstance( objectUnderTest ); } @Override public String getModule() { return module; } @Override public String getId() { return id; } @Override public String getName() { return name; } @Override public Set getDependencies() { return dependencies; } @Override public boolean isConfigInitTest() { return configInitTest; } //OperatorWrap isn't helpful for autogenerated methods //CHECKSTYLE:OperatorWrap:OFF @Override public String toString() { return "BaseRuntimeTest{" + "module='" + module + '\'' + ", id='" + id + '\'' + ", getName='" + name + '\'' + ", configInitTest=" + configInitTest + ", dependencies=" + dependencies + '}'; } //CHECKSTYLE:OperatorWrap:ON } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/test/impl/RuntimeTestDelegateWithMoreDependencies.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.test.impl; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import java.util.Collections; import java.util.HashSet; import java.util.Set; /** * Created by bryan on 8/17/15. */ public class RuntimeTestDelegateWithMoreDependencies implements RuntimeTest { private final RuntimeTest delegate; private final Set extraDependencies; public RuntimeTestDelegateWithMoreDependencies( RuntimeTest delegate, Set extraDependencies ) { this.delegate = delegate; this.extraDependencies = new HashSet<>( extraDependencies ); } @Override public boolean accepts( Object objectUnderTest ) { return delegate.accepts( objectUnderTest ); } @Override public String getModule() { return delegate.getModule(); } @Override public String getId() { return delegate.getId(); } @Override public String getName() { return delegate.getName(); } @Override public boolean isConfigInitTest() { return delegate.isConfigInitTest(); } @Override public Set getDependencies() { HashSet set = new HashSet( extraDependencies ); set.addAll( delegate.getDependencies() ); return Collections.unmodifiableSet( set ); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { return delegate.runTest( objectUnderTest ); } //OperatorWrap isn't helpful for autogenerated methods //CHECKSTYLE:OperatorWrap:OFF @Override public String toString() { return "RuntimeTestDelegateWithMoreDependencies{" + "delegate=" + delegate + ", extraDependencies=" + extraDependencies + '}'; } //CHECKSTYLE:OperatorWrap:ON } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/test/impl/RuntimeTestResultEntryImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.test.impl; import org.pentaho.runtime.test.action.RuntimeTestAction; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; /** * Created by bryan on 8/12/15. */ public class RuntimeTestResultEntryImpl implements RuntimeTestResultEntry { private final RuntimeTestEntrySeverity severity; private final String description; private final String message; private final Throwable exception; private final RuntimeTestAction runtimeTestAction; public RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity severity, String description, String message ) { this( severity, description, message, (Throwable) null ); } public RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity severity, String description, String message, RuntimeTestAction runtimeTestAction ) { this( severity, description, message, null, runtimeTestAction ); } public RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity severity, String description, String message, Throwable exception ) { this( severity, description, message, exception, null ); } public RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity severity, String description, String message, Throwable exception, RuntimeTestAction runtimeTestAction ) { this.severity = severity; this.description = description; this.message = message; this.exception = exception; this.runtimeTestAction = runtimeTestAction; } @Override public RuntimeTestEntrySeverity getSeverity() { return severity; } @Override public String getDescription() { return description; } @Override public String getMessage() { return message; } @Override public Throwable getException() { return exception; } @Override public RuntimeTestAction getAction() { return runtimeTestAction; } //OperatorWrap isn't helpful for autogenerated methods //CHECKSTYLE:OperatorWrap:OFF @Override public String toString() { return "RuntimeTestResultEntryImpl{" + "severity=" + severity + ", description='" + description + '\'' + ", message='" + message + '\'' + ", exception=" + exception + '}'; } //CHECKSTYLE:OperatorWrap:ON } ================================================ FILE: api/runtimeTest/src/main/java/org/pentaho/runtime/test/test/impl/RuntimeTestResultImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.test.impl; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.result.RuntimeTestResult; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import java.util.List; /** * Created by bryan on 8/12/15. */ public class RuntimeTestResultImpl implements RuntimeTestResult { private final RuntimeTest runtimeTest; private final boolean isDone; private final RuntimeTestResultSummary runtimeTestResultSummary; private final long timeTaken; public RuntimeTestResultImpl( RuntimeTest runtimeTest, boolean isDone, RuntimeTestResultSummary runtimeTestResultSummary, long timeTaken ) { this.runtimeTest = runtimeTest; this.isDone = isDone; this.runtimeTestResultSummary = runtimeTestResultSummary == null ? new RuntimeTestResultSummaryImpl( null ) : runtimeTestResultSummary; this.timeTaken = timeTaken; } @Override public RuntimeTest getRuntimeTest() { return runtimeTest; } @Override public boolean isDone() { return isDone; } @Override public long getTimeTaken() { return timeTaken; } @Override public RuntimeTestResultEntry getOverallStatusEntry() { return runtimeTestResultSummary.getOverallStatusEntry(); } @Override public List getRuntimeTestResultEntries() { return runtimeTestResultSummary.getRuntimeTestResultEntries(); } //OperatorWrap isn't helpful for autogenerated methods //CHECKSTYLE:OperatorWrap:OFF @Override public String toString() { return "RuntimeTestResultImpl{" + "runtimeTest=" + runtimeTest + ", isDone=" + isDone + ", runtimeTestResultSummary=" + runtimeTestResultSummary + ", timeTaken=" + timeTaken + '}'; } //CHECKSTYLE:OperatorWrap:ON } ================================================ FILE: api/runtimeTest/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: api/runtimeTest/src/main/resources/org/pentaho/runtime/test/action/impl/messages/messages_en_US.properties ================================================ HelpUrlPayload.Message=Please see help at {0} LoggingRuntimeTestActionHandlerImpl.Action=Recommended action: {0}, {1}\nResource: {2} LoggingRuntimeTestActionHandlerImpl.MissingSeverity=Recommended action: {0}, {1}\nResource: {2} ================================================ FILE: api/runtimeTest/src/main/resources/org/pentaho/runtime/test/impl/messages/messages_en_US.properties ================================================ RuntimeTestRunner.Skipped.Desc=This test was skipped because {0} was not successful. RuntimeTestRunner.Skipped.Message=The {0} test was skipped because test {1} was not successful. RuntimeTestRunner.Error.Desc=We couldn''t run test {0}. ================================================ FILE: api/runtimeTest/src/main/resources/org/pentaho/runtime/test/network/impl/messages/messages_en_US.properties ================================================ ConnectTest.HostBlank.Desc=Hostname is required. ConnectTest.HostBlank.Message=A hostname was not found for this service. ConnectTest.HA.Desc=This service supports High Availability. ConnectTest.HA.Message=Since no port is set, we assume that High Availability has been enabled for {0}. ConnectTest.PortBlank.Desc=Port number is required. ConnectTest.PortBlank.Message=Port number is required. ConnectTest.ConnectSuccess.Desc=Successfully connected to host. ConnectTest.ConnectSuccess.Message=Successfully connected to {0} at port {1}. ConnectTest.ConnectFail.Desc=Unable to connect to the host. ConnectTest.ConnectFail.Message=Unable to connect to {0} at port: {1}. ConnectTest.UnknownHostname.Desc=Hostname is unknown. ConnectTest.UnknownHostname.Message=Hostname {0} is unknown. Verify that the hostname is valid. ConnectTest.Unreachable.Desc=Unable to connect to hostname {0}. ConnectTest.Unreachable.Message=Unable to connect to hostname {0}. Contact the network or Hadoop administrator for help. ConnectTest.NetworkError.Desc=Unable connect because of network problems. ConnectTest.NetworkError.Message=There was network problem when we tried to connect to hostname {0} and port: {1}. ConnectTest.PortNumberFormat.Desc=The port must be a number. ConnectTest.PortNumberFormat.Message=Port {0} must be a number. GatewayConnectTest.Success.Desc=Successfully connected to gateway. GatewayConnectTest.Success.Message=Successfully connected to gateway {0}. GatewayConnectTest.UnknownReturnCode.Desc=UnknownReturnCode returned from gateway. GatewayConnectTest.UnknownReturnCode.Message=UnknownReturnCode {1} returned for user {0} from gateway for uri {2}. GatewayConnectTest.ServiceNotFound.Desc=Desired service not found. GatewayConnectTest.ServiceNotFound.Message=Service for uri {0} not found. GatewayConnectTest.Forbidden.Desc=Forbidden answer from gateway. GatewayConnectTest.Forbidden.Message=Desired resource {0} is not available for user {1}. GatewayConnectTest.TLSContext.Desc=TLSContext failed to create. GatewayConnectTest.TLSContext.Message=TLSContext failed to create. GatewayConnectTest.SSLException.Desc=SSLException. GatewayConnectTest.SSLException.Message=SSLException {1} occurs with accessing {0} GatewayConnectTest.Unauthorized.Desc=Authorization is required. GatewayConnectTest.Unauthorized.Message=Authorization is required for uri {0}. User {1}. GatewayConnectTest.TLSContextInit.Desc=TLSContextInitialization failed. GatewayConnectTest.TLSContextInit.Message=TLSContextInitialization failed. GatewayConnectTest.ExecutionFailed.Desc=Unable to check service at gateway. GatewayConnectTest.ExecutionFailed.Message=Unable to check service {0} at gateway. ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/RuntimeTestEntryUtil.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; /** * Created by bryan on 8/21/15. */ public class RuntimeTestEntryUtil { public static RuntimeTestResultEntry expectOneEntry( List runtimeTestResultEntries ) { assertNotNull( runtimeTestResultEntries ); assertEquals( 1, runtimeTestResultEntries.size() ); return runtimeTestResultEntries.get( 0 ); } public static void verifyRuntimeTestResultEntry( RuntimeTestResultEntry runtimeTestResultEntry, RuntimeTestEntrySeverity severity, String desc, String message ) { verifyRuntimeTestResultEntry( runtimeTestResultEntry, severity, desc, message, null ); } public static Throwable verifyRuntimeTestResultEntry( RuntimeTestResultEntry runtimeTestResultEntry, RuntimeTestEntrySeverity severity, String desc, String message, Class exceptionClass ) { assertNotNull( runtimeTestResultEntry ); assertEquals( severity, runtimeTestResultEntry.getSeverity() ); assertEquals( desc, runtimeTestResultEntry.getDescription() ); assertEquals( message, runtimeTestResultEntry.getMessage() ); Throwable runtimeTestResultEntryException = runtimeTestResultEntry.getException(); if ( exceptionClass == null ) { assertNull( runtimeTestResultEntryException ); } else { assertTrue( "expected exception of type " + exceptionClass, exceptionClass.isInstance( runtimeTestResultEntryException ) ); } return runtimeTestResultEntryException; } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/TestMessageGetter.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.runtime.test.i18n.MessageGetter; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; /** * Created by bryan on 8/21/15. */ public class TestMessageGetter implements MessageGetter { private final Class PKG; public TestMessageGetter( Class PKG ) { this.PKG = PKG; } @Override public String getMessage( String key, String... parameters ) { StringBuilder stringBuilder = new StringBuilder( "BaseMessages equivalent: BaseMessage.getMessage( " ); stringBuilder.append( PKG ); stringBuilder.append( ", \"" ); stringBuilder.append( key ); stringBuilder.append( "\"" ); String realValue; boolean hasParameters = parameters != null && parameters.length > 0; if ( hasParameters ) { realValue = BaseMessages.getString( PKG, key, parameters ); stringBuilder.append( ", \"" ); for ( String parameter : parameters ) { stringBuilder.append( parameter ); stringBuilder.append( "\", \"" ); } stringBuilder.setLength( stringBuilder.length() - 3 ); } else { realValue = BaseMessages.getString( PKG, key ); } assertNotEquals( "!" + key + "!", realValue ); stringBuilder.append( " )" ); if ( hasParameters ) { for ( String parameter : parameters ) { assertTrue( "Expected " + realValue + " to contain \"" + parameter + "\"", realValue.contains( parameter ) ); } } return stringBuilder.toString(); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/TestMessageGetterFactory.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; /** * Created by bryan on 8/21/15. */ public class TestMessageGetterFactory implements MessageGetterFactory { @Override public MessageGetter create( Class PKG ) { return new TestMessageGetter( PKG ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/action/impl/HelpUrlPayloadTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import static org.junit.Assert.assertEquals; /** * Created by bryan on 9/10/15. */ public class HelpUrlPayloadTest { private MessageGetter messageGetter; private String title; private String header; private String url; private HelpUrlPayload helpUrlPayload; @Before public void setup() { TestMessageGetterFactory messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( HelpUrlPayload.class ); title = "title"; header = "header"; url = "url"; helpUrlPayload = new HelpUrlPayload( messageGetterFactory, title, header, url ); } @Test public void testGetTitle() { assertEquals( title, helpUrlPayload.getTitle() ); } @Test public void testGetHeader() { assertEquals( header, helpUrlPayload.getHeader() ); } @Test public void testGetUrl() { assertEquals( url, helpUrlPayload.getUrl() ); } @Test public void testGetMessage() { assertEquals( messageGetter.getMessage( HelpUrlPayload.HELP_URL_PAYLOAD_MESSAGE, url ), helpUrlPayload.getMessage() ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/action/impl/LoggingRuntimeTestActionHandlerImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action.impl; import org.apache.logging.log4j.Logger; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.action.RuntimeTestAction; import org.pentaho.runtime.test.action.RuntimeTestActionPayload; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by bryan on 9/10/15. */ public class LoggingRuntimeTestActionHandlerImplTest { private MessageGetter messageGetter; private Logger logger; private LoggingRuntimeTestActionHandlerImpl loggingRuntimeTestActionHandler; private RuntimeTestAction runtimeTestAction; private String actionDescription; private String actionName; private RuntimeTestActionPayload runtimeTestActionPayload; @Before public void setup() { TestMessageGetterFactory messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( LoggingRuntimeTestActionHandlerImpl.class ); logger = mock( Logger.class ); loggingRuntimeTestActionHandler = new LoggingRuntimeTestActionHandlerImpl( messageGetterFactory, logger ); runtimeTestAction = mock( RuntimeTestAction.class ); actionName = "actionName"; actionDescription = "actionDescription"; runtimeTestActionPayload = mock( RuntimeTestActionPayload.class ); } @Test public void testCanHandle() { // Should work with least specific payload as it always returns true when( runtimeTestAction.getPayload() ).thenReturn( mock( RuntimeTestActionPayload.class ) ); assertTrue( loggingRuntimeTestActionHandler.canHandle( runtimeTestAction ) ); } private void handleSetup( RuntimeTestEntrySeverity severity ) { when( runtimeTestAction.getSeverity() ).thenReturn( severity ); when( runtimeTestAction.getName() ).thenReturn( actionName ); when( runtimeTestAction.getDescription() ).thenReturn( actionDescription ); when( runtimeTestAction.getPayload() ).thenReturn( runtimeTestActionPayload ); loggingRuntimeTestActionHandler.handle( runtimeTestAction ); } @Test public void testHandleNullSeverity() { handleSetup( null ); verify( logger ).warn( messageGetter .getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL_MISSING_SEVERITY, actionName, actionDescription, runtimeTestActionPayload.toString() ) ); } @Test public void testHandleDebugSeverity() { handleSetup( RuntimeTestEntrySeverity.DEBUG ); verify( logger ).debug( messageGetter .getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL, actionName, actionDescription, runtimeTestActionPayload.toString() ) ); } @Test public void testHandleInfoSeverity() { handleSetup( RuntimeTestEntrySeverity.INFO ); verify( logger ).info( messageGetter .getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL, actionName, actionDescription, runtimeTestActionPayload.toString() ) ); } @Test public void testHandleWarningSeverity() { handleSetup( RuntimeTestEntrySeverity.WARNING ); verify( logger ).warn( messageGetter .getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL, actionName, actionDescription, runtimeTestActionPayload.toString() ) ); } @Test public void testHandleSkippedSeverity() { handleSetup( RuntimeTestEntrySeverity.SKIPPED ); verify( logger ).warn( messageGetter .getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL, actionName, actionDescription, runtimeTestActionPayload.toString() ) ); } @Test public void testHandleErrorSeverity() { handleSetup( RuntimeTestEntrySeverity.ERROR ); verify( logger ).error( messageGetter .getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL, actionName, actionDescription, runtimeTestActionPayload.toString() ) ); } @Test public void testHandleFatalSeverity() { handleSetup( RuntimeTestEntrySeverity.FATAL ); verify( logger ).error( messageGetter .getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL, actionName, actionDescription, runtimeTestActionPayload.toString() ) ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/action/impl/RuntimeTestActionImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.action.RuntimeTestActionPayload; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; /** * Created by bryan on 9/10/15. */ public class RuntimeTestActionImplTest { private String name; private String description; private RuntimeTestEntrySeverity severity; private RuntimeTestActionPayload payload; private RuntimeTestActionImpl runtimeTestAction; @Before public void setup() { name = "name"; description = "description"; severity = RuntimeTestEntrySeverity.DEBUG; payload = mock( RuntimeTestActionPayload.class ); runtimeTestAction = new RuntimeTestActionImpl( name, description, severity, payload ); } @Test public void testGetName() { assertEquals( name, runtimeTestAction.getName() ); } @Test public void testGetDescription() { assertEquals( description, runtimeTestAction.getDescription() ); } @Test public void testGetSeverity() { assertEquals( severity, runtimeTestAction.getSeverity() ); } @Test public void testGetPayload() { assertEquals( payload, runtimeTestAction.getPayload() ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/action/impl/RuntimeTestActionServiceImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.action.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.action.RuntimeTestAction; import org.pentaho.runtime.test.action.RuntimeTestActionHandler; import java.util.Arrays; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; /** * Created by bryan on 9/10/15. */ public class RuntimeTestActionServiceImplTest { private RuntimeTestActionHandler runtimeTestActionHandler; private RuntimeTestActionHandler defaultHandler; private RuntimeTestActionServiceImpl runtimeTestActionService; private RuntimeTestAction runtimeTestAction; @Before public void setup() { runtimeTestActionHandler = mock( RuntimeTestActionHandler.class ); defaultHandler = mock( RuntimeTestActionHandler.class ); runtimeTestActionService = new RuntimeTestActionServiceImpl( Arrays.asList( runtimeTestActionHandler ), defaultHandler ); runtimeTestAction = mock( RuntimeTestAction.class ); } @Test public void testHandleDefault() { when( runtimeTestActionHandler.canHandle( runtimeTestAction ) ).thenReturn( false ); runtimeTestActionService.handle( runtimeTestAction ); verify( runtimeTestActionHandler, never() ).handle( runtimeTestAction ); verify( defaultHandler ).handle( runtimeTestAction ); verifyNoMoreInteractions( defaultHandler ); } @Test public void testHandleNormal() { when( runtimeTestActionHandler.canHandle( runtimeTestAction ) ).thenReturn( true ); runtimeTestActionService.handle( runtimeTestAction ); verify( runtimeTestActionHandler ).handle( runtimeTestAction ); verifyNoMoreInteractions( defaultHandler ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/i18n/impl/BaseMessagesMessageGetterFactoryImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.i18n.impl; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertTrue; /** * Created by bryan on 8/27/15. */ public class BaseMessagesMessageGetterFactoryImplTest { private BaseMessagesMessageGetterFactoryImpl baseMessagesMessageGetterFactory; @Before public void setup() { baseMessagesMessageGetterFactory = new BaseMessagesMessageGetterFactoryImpl(); } @Test public void testCreate() { assertTrue( baseMessagesMessageGetterFactory .create( BaseMessagesMessageGetterFactoryImplTest.class ) instanceof BaseMessagesMessageGetterImpl ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/i18n/impl/BaseMessagesMessageGetterImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.i18n.impl; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * Created by bryan on 8/27/15. */ public class BaseMessagesMessageGetterImplTest { private BaseMessagesMessageGetterImpl baseMessagesMessageGetter; @Before public void setup() { baseMessagesMessageGetter = new BaseMessagesMessageGetterImpl( BaseMessagesMessageGetterFactoryImplTest.class ); } @Test public void testGetMesssage() { String message = "message"; String expected = "!" + message + "!"; assertEquals( expected, baseMessagesMessageGetter.getMessage( message ) ); assertEquals( expected, baseMessagesMessageGetter.getMessage( message, "testParam" ) ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/impl/RuntimeTestComparatorTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.RuntimeTest; import java.util.HashMap; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Created by bryan on 8/20/15. */ public class RuntimeTestComparatorTest { private RuntimeTestComparator runtimeTestComparator; private Map orderedModules; private RuntimeTest runtimeTest1; private RuntimeTest runtimeTest2; private String d = "d"; private String c = "c"; private String a = "a"; private String b = "b"; @Before public void setup() { orderedModules = new HashMap<>(); orderedModules.put( d, 0 ); orderedModules.put( c, 1 ); orderedModules.put( a, 2 ); orderedModules.put( b, 3 ); runtimeTestComparator = new RuntimeTestComparator( orderedModules ); runtimeTest1 = mock( RuntimeTest.class ); runtimeTest2 = mock( RuntimeTest.class ); } @Test public void testModuleSameOrderedIdsSame() { when( runtimeTest1.getModule() ).thenReturn( a ); when( runtimeTest2.getModule() ).thenReturn( a ); when( runtimeTest1.getId() ).thenReturn( b ); when( runtimeTest2.getId() ).thenReturn( b ); assertEquals( 0, runtimeTestComparator.compare( runtimeTest1, runtimeTest2 ) ); } @Test public void testModuleSameOrderedIdsDifferent1() { when( runtimeTest1.getModule() ).thenReturn( a ); when( runtimeTest2.getModule() ).thenReturn( a ); when( runtimeTest1.getId() ).thenReturn( a ); when( runtimeTest2.getId() ).thenReturn( b ); assertTrue( runtimeTestComparator.compare( runtimeTest1, runtimeTest2 ) < 0 ); } @Test public void testModuleSameOrderedIdsDifferent2() { when( runtimeTest1.getModule() ).thenReturn( a ); when( runtimeTest2.getModule() ).thenReturn( a ); when( runtimeTest1.getId() ).thenReturn( b ); when( runtimeTest2.getId() ).thenReturn( a ); assertTrue( runtimeTestComparator.compare( runtimeTest1, runtimeTest2 ) > 0 ); } @Test public void testModuleSameUnrderedIdsSame() { when( runtimeTest1.getModule() ).thenReturn( "e" ); when( runtimeTest2.getModule() ).thenReturn( "e" ); when( runtimeTest1.getId() ).thenReturn( b ); when( runtimeTest2.getId() ).thenReturn( b ); assertEquals( 0, runtimeTestComparator.compare( runtimeTest1, runtimeTest2 ) ); } @Test public void testModuleDifferentOrdered() { when( runtimeTest1.getModule() ).thenReturn( a ); when( runtimeTest2.getModule() ).thenReturn( b ); when( runtimeTest1.getId() ).thenReturn( d ); when( runtimeTest2.getId() ).thenReturn( c ); assertTrue( runtimeTestComparator.compare( runtimeTest1, runtimeTest2 ) < 0 ); } @Test public void testModuleDifferentFirstOrdered() { orderedModules.remove( a ); when( runtimeTest1.getModule() ).thenReturn( b ); when( runtimeTest2.getModule() ).thenReturn( a ); when( runtimeTest1.getId() ).thenReturn( d ); when( runtimeTest2.getId() ).thenReturn( c ); assertTrue( runtimeTestComparator.compare( runtimeTest1, runtimeTest2 ) < 0 ); } @Test public void testModuleDifferentSecondOrdered() { orderedModules.remove( b ); when( runtimeTest1.getModule() ).thenReturn( b ); when( runtimeTest2.getModule() ).thenReturn( a ); when( runtimeTest1.getId() ).thenReturn( d ); when( runtimeTest2.getId() ).thenReturn( c ); assertTrue( runtimeTestComparator.compare( runtimeTest1, runtimeTest2 ) > 0 ); } @Test public void testModuleDifferentNotOrdered() { orderedModules.remove( a ); orderedModules.remove( b ); when( runtimeTest1.getModule() ).thenReturn( a ); when( runtimeTest2.getModule() ).thenReturn( b ); when( runtimeTest1.getId() ).thenReturn( d ); when( runtimeTest2.getId() ).thenReturn( c ); assertTrue( runtimeTestComparator.compare( runtimeTest1, runtimeTest2 ) < 0 ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/impl/RuntimeTestRunnerTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.impl; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.RuntimeTestProgressCallback; import org.pentaho.runtime.test.RuntimeTestStatus; import org.pentaho.runtime.test.module.RuntimeTestModuleResults; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResult; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.BaseRuntimeTest; import org.pentaho.runtime.test.test.impl.RuntimeTestResultEntryImpl; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * Created by bryan on 8/12/15. */ public class RuntimeTestRunnerTest { private ExecutorService executorService; private TestRuntimeTest moduleATestA; private TestRuntimeTest moduleATestB; private TestRuntimeTest moduleATestC; private TestRuntimeTest moduleBTestA; private TestRuntimeTest moduleBTestB; private TestRuntimeTest moduleBTestC; private Object objectUnderTest; private TestRuntimeTest unsatisfiableDependencyA; private TestRuntimeTest moduleCTestA; private TestRuntimeTest moduleATestD; private static Set dependenciesToIds( Set testRuntimeTests ) { Set result = new HashSet<>(); for ( TestRuntimeTest testRuntimeTest : testRuntimeTests ) { result.add( testRuntimeTest.getId() ); } return result; } @Before public void setup() { executorService = Executors.newCachedThreadPool(); RuntimeTestResultEntryImpl overallEntry = new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.INFO, "testDesc", "testMessage" ); unsatisfiableDependencyA = new TestRuntimeTest( "unsatisfiableDependency", "unsatisfiableDependencyTestA", "Test A", new HashSet<>( Arrays.asList( new TestRuntimeTest( "fake-module", "fake-test-id", "fake-getName", new HashSet(), 5, overallEntry, new ArrayList(), false ) ) ), 5, overallEntry, new ArrayList(), false ); moduleATestA = new TestRuntimeTest( "moduleA", "moduleATestA", "Test A", new HashSet<>( Arrays.asList() ), 5, overallEntry, new ArrayList(), true ); moduleATestB = new TestRuntimeTest( "moduleA", "moduleATestB", "Test B", new HashSet<>( Arrays.asList( moduleATestA ) ), 5, overallEntry, new ArrayList(), true ); moduleATestC = new TestRuntimeTest( "moduleA", "moduleATestC", "Test C", new HashSet<>( Arrays.asList( moduleATestB ) ), 5, overallEntry, new ArrayList(), true ); moduleATestD = new TestRuntimeTest( "moduleA", "moduleATestD", "Test D", new HashSet<>( Arrays.asList( moduleATestB ) ), 5, overallEntry, new ArrayList(), true ); moduleBTestA = new TestRuntimeTest( "moduleB", "moduleBTestA", "Test A", new HashSet<>( Arrays.asList( moduleATestA ) ), 5, overallEntry, new ArrayList(), true ); moduleBTestB = new TestRuntimeTest( "moduleB", "moduleBTestB", "Test B", new HashSet<>( Arrays.asList( moduleATestC ) ), 5, overallEntry, new ArrayList(), true ); moduleBTestC = new TestRuntimeTest( "moduleB", "moduleBTestC", "Test C", new HashSet<>( Arrays.asList( moduleBTestB, moduleATestC ) ), 5, overallEntry, new ArrayList(), true ); moduleCTestA = new TestRuntimeTest( "moduleC", "moduleCTestA", "Test A", new HashSet<>( Arrays.asList( moduleBTestC, moduleATestC ) ), 5, overallEntry, new ArrayList(), true ); objectUnderTest = new Object(); } @After public void tearDown() { executorService.shutdown(); } @Test public void testSingleTestNoDependencies() { testScenario( Arrays.asList( moduleATestA ) ); } @Test public void testSingleTestWithDependencies() { testScenario( Arrays.asList( unsatisfiableDependencyA ) ); } @Test public void testModuleA() { testScenario( Arrays.asList( moduleATestA, moduleATestB, moduleATestC, moduleATestD ) ); } @Test public void testModuleAAndB() { testScenario( Arrays .asList( moduleATestA, moduleATestB, moduleATestC, moduleATestD, moduleBTestA, moduleBTestB, moduleBTestC ) ); } @Test public void testModuleAthruC() { testScenario( Arrays .asList( moduleATestA, moduleATestB, moduleATestC, moduleATestD, moduleBTestA, moduleBTestB, moduleBTestC, moduleCTestA ) ); } @Test public void testModuleAthruCUnsat() { testScenario( Arrays .asList( moduleATestA, moduleATestB, moduleATestC, moduleATestD, moduleBTestA, moduleBTestB, moduleBTestC, moduleCTestA, unsatisfiableDependencyA ) ); } private void testScenario( List runtimeTests ) { final List runtimeTestStatuses = Collections.synchronizedList( new ArrayList () ); final RuntimeTestProgressCallback runtimeTestProgressCallback = new RuntimeTestProgressCallback() { @Override public void onProgress( RuntimeTestStatus runtimeTestStatus ) { runtimeTestStatuses.add( runtimeTestStatus ); if ( runtimeTestStatus.isDone() ) { synchronized ( this ) { notifyAll(); } } } }; long before = System.currentTimeMillis(); new RuntimeTestRunner( runtimeTests, objectUnderTest, runtimeTestProgressCallback, executorService ).runTests(); synchronized ( runtimeTestProgressCallback ) { while ( runtimeTestStatuses.size() == 0 || !runtimeTestStatuses.get( runtimeTestStatuses.size() - 1 ).isDone() ) { try { runtimeTestProgressCallback.wait(); } catch ( InterruptedException e ) { // Ignore } } } long after = System.currentTimeMillis(); Set doneIds = new HashSet<>(); for ( int i = 0; i < runtimeTestStatuses.size(); i++ ) { RuntimeTestStatus runtimeTestStatus = runtimeTestStatuses.get( i ); if ( i < runtimeTestStatuses.size() - 1 ) { assertFalse( runtimeTestStatus.isDone() ); } else { assertTrue( runtimeTestStatus.isDone() ); } Set justDoneIds = new HashSet<>(); for ( RuntimeTestModuleResults runtimeTestModuleResults : runtimeTestStatus.getModuleResults() ) { Set outstandingIds = new HashSet<>(); Set runningIds = new HashSet<>(); for ( RuntimeTest runtimeTest : runtimeTestModuleResults.getOutstandingTests() ) { outstandingIds.add( runtimeTest.getId() ); } for ( RuntimeTest runtimeTest : runtimeTestModuleResults.getRunningTests() ) { runningIds.add( runtimeTest.getId() ); } Set resultIds = new HashSet<>(); for ( RuntimeTestResult runtimeTestResult : runtimeTestModuleResults.getRuntimeTestResults() ) { resultIds.add( runtimeTestResult.getRuntimeTest().getId() ); } // We should have results for all ids in module assertTrue( resultIds.containsAll( outstandingIds ) ); assertTrue( resultIds.containsAll( runningIds ) ); // No done ides should be in outstanding or running assertTrue( Collections.disjoint( doneIds, outstandingIds ) ); assertTrue( Collections.disjoint( doneIds, runningIds ) ); resultIds.removeAll( outstandingIds ); resultIds.removeAll( runningIds ); justDoneIds.addAll( resultIds ); } // All previously done ids should still be done assertTrue( justDoneIds.containsAll( doneIds ) ); // We should get called back for each one that finishes assertTrue( justDoneIds.size() == doneIds.size() || justDoneIds.size() == doneIds.size() + 1 ); doneIds.addAll( justDoneIds ); } for ( TestRuntimeTest runtimeTest : runtimeTests ) { assertTrue( doneIds.contains( runtimeTest.getId() ) ); runtimeTest.validateRunState(); } System.out.println( "Ran in " + ( after - before ) + " ms" ); System.out.flush(); } public class TestRuntimeTest extends BaseRuntimeTest { private final long delay; private final Set dependencies; private final AtomicBoolean hasRun; private final RuntimeTestResultEntry overallEntry; private final List runtimeTestResultEntries; private final boolean shouldRun; public TestRuntimeTest( String module, String id, String name, Set dependencies, long delay, RuntimeTestResultEntry overallEntry, List runtimeTestResultEntries, boolean shouldRun ) { super( Object.class, module, id, name, dependenciesToIds( dependencies ) ); this.delay = delay; this.dependencies = dependencies; this.overallEntry = overallEntry; this.runtimeTestResultEntries = runtimeTestResultEntries; this.shouldRun = shouldRun; hasRun = new AtomicBoolean( false ); } public String getLogName() { return getModule() + ":" + getId(); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { assertTrue( shouldRun ); assertEquals( RuntimeTestRunnerTest.this.objectUnderTest, objectUnderTest ); String logName = getLogName(); System.out.println( "Running: " + logName ); for ( TestRuntimeTest dependency : dependencies ) { assertTrue( logName + " expected dependency " + dependency.getLogName() + " to have already run", dependency.hasRun.get() ); } try { Thread.sleep( delay ); } catch ( InterruptedException e ) { // Ignore } hasRun.set( true ); System.out.println( "Done running: " + logName ); return new RuntimeTestResultSummaryImpl( overallEntry, runtimeTestResultEntries ); } public void validateRunState() { String moduleString = getLogName(); assertEquals( "Expected " + moduleString + " hasRun value of " + shouldRun + " but was " + hasRun.get(), shouldRun, hasRun.get() ); System.out.println( "Got correct shouldRun value of " + shouldRun + " from " + moduleString ); } } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/impl/RuntimeTestStatusImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.module.RuntimeTestModuleResults; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; /** * Created by bryan on 8/20/15. */ public class RuntimeTestStatusImplTest { private List runtimeTestModuleResults; private RuntimeTestStatusImpl runtimeTestStatus; private int testsDone; private int testsRunning; private int testsOutstanding; private boolean done; @Before public void setup() { runtimeTestModuleResults = mock( List.class ); testsDone = 1011; testsRunning = 11213; testsOutstanding = 12213; done = true; initStatus(); } private void initStatus() { runtimeTestStatus = new RuntimeTestStatusImpl( runtimeTestModuleResults, testsDone, testsRunning, testsOutstanding, done ); } @Test public void testConstructor() { assertEquals( runtimeTestModuleResults, runtimeTestStatus.getModuleResults() ); assertTrue( runtimeTestStatus.isDone() ); assertEquals( testsDone, runtimeTestStatus.getTestsDone() ); assertEquals( testsRunning, runtimeTestStatus.getTestsRunning() ); assertEquals( testsOutstanding, runtimeTestStatus.getTestsOutstanding() ); done = false; initStatus(); assertEquals( runtimeTestModuleResults, runtimeTestStatus.getModuleResults() ); assertFalse( runtimeTestStatus.isDone() ); assertEquals( testsDone, runtimeTestStatus.getTestsDone() ); assertEquals( testsRunning, runtimeTestStatus.getTestsRunning() ); assertEquals( testsOutstanding, runtimeTestStatus.getTestsOutstanding() ); } @Test public void testToString() { assertTrue( runtimeTestStatus.toString().contains( runtimeTestModuleResults.toString() ) ); assertTrue( runtimeTestStatus.toString().contains( Integer.toString( testsDone ) ) ); assertTrue( runtimeTestStatus.toString().contains( Integer.toString( testsRunning ) ) ); assertTrue( runtimeTestStatus.toString().contains( Integer.toString( testsOutstanding ) ) ); assertTrue( runtimeTestStatus.toString().contains( "done=" + done ) ); done = false; initStatus(); assertTrue( runtimeTestStatus.toString().contains( runtimeTestModuleResults.toString() ) ); assertTrue( runtimeTestStatus.toString().contains( Integer.toString( testsDone ) ) ); assertTrue( runtimeTestStatus.toString().contains( Integer.toString( testsRunning ) ) ); assertTrue( runtimeTestStatus.toString().contains( Integer.toString( testsOutstanding ) ) ); assertTrue( runtimeTestStatus.toString().contains( "done=" + done ) ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/impl/RuntimeTesterImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.impl; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.RuntimeTestProgressCallback; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.ExecutorService; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by bryan on 8/20/15. */ public class RuntimeTesterImplTest { private RuntimeTesterImpl runtimeTester; private List runtimeTests; private ExecutorService executorService; private String orderedModulesString; private RuntimeTestRunner.Factory runtimeTestRunnerFactory; @Before public void setup() { runtimeTests = new ArrayList<>( Arrays.asList( mock( RuntimeTest.class ) ) ); executorService = mock( ExecutorService.class ); orderedModulesString = "test-modules"; runtimeTestRunnerFactory = mock( RuntimeTestRunner.Factory.class ); runtimeTester = new RuntimeTesterImpl( runtimeTests, executorService, orderedModulesString, runtimeTestRunnerFactory ); } @Test public void testRunTests() { Object objectUnderTest = new Object(); RuntimeTestProgressCallback runtimeTestProgressCallback = mock( RuntimeTestProgressCallback.class ); runtimeTester.runtimeTest( objectUnderTest, runtimeTestProgressCallback ); ArgumentCaptor runnableArgumentCaptor = ArgumentCaptor.forClass( Runnable.class ); verify( executorService ).submit( runnableArgumentCaptor.capture() ); RuntimeTestRunner runtimeTestRunner = mock( RuntimeTestRunner.class ); when( runtimeTestRunnerFactory.create( runtimeTests, objectUnderTest, runtimeTestProgressCallback, executorService ) ) .thenReturn( runtimeTestRunner ); runnableArgumentCaptor.getValue().run(); verify( runtimeTestRunner ).runTests(); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/module/impl/RuntimeTestModuleResultsImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.module.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResult; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Created by bryan on 8/20/15. */ public class RuntimeTestModuleResultsImplTest { private String name; private List runtimeTestResults; private Set runningTests; private Set outstandingTests; private RuntimeTestModuleResultsImpl runtimeTestModuleResults; private RuntimeTestResult runtimeTestResult; private RuntimeTest runningTest; private RuntimeTest outstandingTest; private RuntimeTestEntrySeverity maxSeverity; @Before public void setup() { name = "testName"; runtimeTestResult = mock( RuntimeTestResult.class ); when( runtimeTestResult.isDone() ).thenReturn( true ); maxSeverity = RuntimeTestEntrySeverity.INFO; RuntimeTestResultEntry runtimeTestResultEntry = mock( RuntimeTestResultEntry.class ); when( runtimeTestResult.getOverallStatusEntry() ).thenReturn( runtimeTestResultEntry ); when( runtimeTestResultEntry.getSeverity() ).thenReturn( maxSeverity ); runtimeTestResults = new ArrayList<>( Arrays.asList( runtimeTestResult ) ); runningTest = mock( RuntimeTest.class ); runningTests = new HashSet<>( Arrays.asList( runningTest ) ); outstandingTest = mock( RuntimeTest.class ); outstandingTests = new HashSet<>( Arrays.asList( outstandingTest ) ); runtimeTestModuleResults = new RuntimeTestModuleResultsImpl( name, runtimeTestResults, runningTests, outstandingTests ); } @Test public void testName() { assertEquals( name, runtimeTestModuleResults.getName() ); } @Test public void testGetRuntimeTestResults() { assertEquals( runtimeTestResults, runtimeTestModuleResults.getRuntimeTestResults() ); } @Test public void testGetRunningTests() { assertEquals( runningTests, runtimeTestModuleResults.getRunningTests() ); } @Test public void testGetOutstandingTests() { assertEquals( outstandingTests, runtimeTestModuleResults.getOutstandingTests() ); } @Test public void testGetMaxSeverity() { assertEquals( maxSeverity, runtimeTestModuleResults.getMaxSeverity() ); } @Test public void testToString() { String string = runtimeTestModuleResults.toString(); assertTrue( string.contains( name ) ); assertTrue( string.contains( runtimeTestResult.toString() ) ); assertTrue( string.contains( runningTest.toString() ) ); assertTrue( string.contains( outstandingTest.toString() ) ); assertTrue( string.contains( maxSeverity.toString() ) ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/network/impl/ConnectivityTestImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.network.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import java.io.IOException; import java.net.InetAddress; import java.net.Socket; import java.net.UnknownHostException; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.pentaho.runtime.test.RuntimeTestEntryUtil.verifyRuntimeTestResultEntry; /** * Created by bryan on 8/21/15. */ public class ConnectivityTestImplTest { private String hostname; private String port; private boolean haPossible; private RuntimeTestEntrySeverity severityOfFailures; private ConnectivityTestImpl.SocketFactory socketFactory; private ConnectivityTestImpl.InetAddressFactory inetAddressFactory; private ConnectivityTestImpl connectTest; private MessageGetterFactory messageGetterFactory; private MessageGetter messageGetter; private InetAddress inetAddress; private Socket socket; @Before public void setup() throws IOException { messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( ConnectivityTestImpl.class ); hostname = "hostname"; port = "89"; haPossible = false; severityOfFailures = RuntimeTestEntrySeverity.WARNING; socketFactory = mock( ConnectivityTestImpl.SocketFactory.class ); socket = mock( Socket.class ); when( socketFactory.create( hostname, Integer.valueOf( port ) ) ).thenReturn( socket ); inetAddressFactory = mock( ConnectivityTestImpl.InetAddressFactory.class ); inetAddress = mock( InetAddress.class ); when( inetAddressFactory.create( hostname ) ).thenReturn( inetAddress ); when( inetAddress.isReachable( anyInt() ) ).thenReturn( true ); init(); } private void init() { connectTest = new ConnectivityTestImpl( messageGetterFactory, hostname, port, haPossible, severityOfFailures, socketFactory, inetAddressFactory ); } @Test public void testBlankHostname() { hostname = ""; init(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_HOST_BLANK_DESC ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_HOST_BLANK_MESSAGE ) ); } @Test public void testBlankPortNoHa() { port = ""; init(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_PORT_BLANK_DESC ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_PORT_BLANK_MESSAGE ) ); } @Test public void testBlankPortHa() { port = ""; haPossible = true; init(); verifyRuntimeTestResultEntry( connectTest.runTest(), RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_HA_DESC ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_HA_MESSAGE, hostname ) ); } @Test public void testNonNumericPort() { port = "abc"; haPossible = true; init(); verifyRuntimeTestResultEntry( connectTest.runTest(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_PORT_NUMBER_FORMAT_DESC ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_PORT_NUMBER_FORMAT_MESSAGE, port ), NumberFormatException.class ); } @Test public void testUnreachableHostname() throws IOException { inetAddressFactory = mock( ConnectivityTestImpl.InetAddressFactory.class ); inetAddress = mock( InetAddress.class ); when( inetAddressFactory.create( hostname ) ).thenReturn( inetAddress ); when( inetAddress.isReachable( anyInt() ) ).thenReturn( false ); init(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_UNREACHABLE_DESC, hostname ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_UNREACHABLE_MESSAGE, hostname ) ); } @Test public void testUnknownHostException() throws IOException { inetAddressFactory = mock( ConnectivityTestImpl.InetAddressFactory.class ); inetAddress = mock( InetAddress.class ); when( inetAddressFactory.create( hostname ) ).thenReturn( inetAddress ); when( inetAddress.isReachable( anyInt() ) ).thenThrow( new UnknownHostException() ); init(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_UNKNOWN_HOSTNAME_DESC ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_UNKNOWN_HOSTNAME_MESSAGE, hostname ), UnknownHostException.class ); } @Test public void testReachableIOException() throws IOException { inetAddressFactory = mock( ConnectivityTestImpl.InetAddressFactory.class ); inetAddress = mock( InetAddress.class ); when( inetAddressFactory.create( hostname ) ).thenReturn( inetAddress ); when( inetAddress.isReachable( anyInt() ) ).thenThrow( new IOException() ); init(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_NETWORK_ERROR_DESC ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_NETWORK_ERROR_MESSAGE, hostname, port ), IOException.class ); } @Test public void testSocketIOException() throws IOException { when( socketFactory.create( hostname, Integer.valueOf( port ) ) ).thenThrow( new IOException() ); init(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_CONNECT_FAIL_DESC ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_CONNECT_FAIL_MESSAGE, hostname, port ), IOException.class ); } @Test public void testSuccess() throws IOException { verifyRuntimeTestResultEntry( connectTest.runTest(), RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_CONNECT_SUCCESS_DESC ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_CONNECT_SUCCESS_MESSAGE, hostname, port ) ); verify( socket ).close(); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/network/impl/GatewayConnectivityTestImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.network.impl; import org.apache.http.HttpResponse; import org.apache.http.StatusLine; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.impl.client.HttpClients; import org.apache.http.protocol.HttpContext; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLException; import java.io.IOException; import java.net.URI; import java.net.UnknownHostException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; import static org.pentaho.runtime.test.RuntimeTestEntryUtil.verifyRuntimeTestResultEntry; /** * Created by dstepanov on 29/04/17. */ public class GatewayConnectivityTestImplTest { public static final String HTTPS = "https://"; public static final String HTTP = "http://"; public static final String KETTLE_KNOX_IGNORE_SSL = "KETTLE_KNOX_IGNORE_SSL"; private String hostname; private String port; private RuntimeTestEntrySeverity severityOfFailures; private ConnectivityTestImpl connectTest; private MessageGetterFactory messageGetterFactory; private MessageGetter messageGetter; private URI uri; private String path; private String topology; private String user; private String password; private HttpClient httpClient; @Before public void setup() throws IOException { messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( ConnectivityTestImpl.class ); hostname = "hostname"; port = "8443"; user = "user"; password = "password"; topology = "/gateway/default"; path = "/testPath"; uri = URI.create( HTTPS + hostname + ":" + port + topology ); severityOfFailures = RuntimeTestEntrySeverity.WARNING; httpClient = mock( HttpClient.class, Mockito.CALLS_REAL_METHODS ); HttpResponse httpResponseMock = mock(HttpResponse.class); StatusLine statusLineMock = mock(StatusLine.class); doReturn( httpResponseMock ).when( httpClient ).execute( any() ); doReturn( httpResponseMock ).when( httpClient ).execute( any( HttpUriRequest.class ), any( HttpContext.class) ); doReturn( statusLineMock ).when( httpResponseMock ).getStatusLine(); doReturn( 200 ).when( statusLineMock ).getStatusCode(); init(); System.setProperty( KETTLE_KNOX_IGNORE_SSL, "false" ); } private void init() { connectTest = new GatewayConnectivityTestImpl( messageGetterFactory, uri, path, user, password, severityOfFailures ) { @Override HttpClient getHttpClient() { return HttpClients.createDefault(); } }; } private void initMock() { connectTest = new GatewayConnectivityTestImpl( messageGetterFactory, uri, path, user, password, severityOfFailures ) { @Override HttpClient getHttpClient() { return httpClient; } @Override HttpClient getHttpClient( String user, String password ) { return httpClient; } }; } @Test public void testHttp() throws IOException { uri = URI.create( HTTP + hostname + ":" + port + topology ); initMock(); verifyRuntimeTestResultEntry( connectTest.runTest(), RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_CONNECT_SUCCESS_DESC ), messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_CONNECT_SUCCESS_MESSAGE, uri.toString() + path ) ); } @Test public void testBlankHostname() { uri = URI.create( HTTPS + "" + ":" + port + topology ); initMock(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_HOST_BLANK_DESC ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_HOST_BLANK_MESSAGE ) ); } @Test public void testUnknownHostException() throws IOException { verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_UNKNOWN_HOSTNAME_DESC ), messageGetter.getMessage( ConnectivityTestImpl.CONNECT_TEST_UNKNOWN_HOSTNAME_MESSAGE, hostname ), UnknownHostException.class ); } @Test public void testIOException() throws IOException { doThrow( new IOException() ).when( httpClient ) .execute( any( HttpUriRequest.class ), any( HttpContext.class ) ); initMock(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_EXECUTION_FAILED_DESC ), messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_EXECUTION_FAILED_MESSAGE, uri.toString() + path ), IOException.class ); } @Test public void testSSLException() throws IOException { doThrow( new SSLException( "errorMessage" ) ).when( httpClient ) .execute( any( HttpUriRequest.class ), any( HttpContext.class ) ); initMock(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_SSLEXCEPTION_DESC ), messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_SSLEXCEPTION_MESSAGE, uri.toString() + path, "errorMessage" ), SSLException.class ); } @Test public void testNoSuchAlgorithmException() { System.setProperty( KETTLE_KNOX_IGNORE_SSL, "true" ); connectTest = new GatewayConnectivityTestImpl( messageGetterFactory, uri, path, user, password, severityOfFailures ) { @Override SSLContext getTlsContext() throws NoSuchAlgorithmException { throw new NoSuchAlgorithmException(); } }; verifyRuntimeTestResultEntry( connectTest.runTest(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TLSCONTEXT_DESC ), messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TLSCONTEXT_MESSAGE ), NoSuchAlgorithmException.class ); } @Test public void testKeyManagementException() { System.setProperty( KETTLE_KNOX_IGNORE_SSL, "true" ); connectTest = new GatewayConnectivityTestImpl( messageGetterFactory, uri, path, user, password, severityOfFailures ) { @Override void initContextWithTrustAll( SSLContext ctx ) throws KeyManagementException { throw new KeyManagementException(); } }; verifyRuntimeTestResultEntry( connectTest.runTest(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TLSCONTEXTINIT_DESC ), messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TLSCONTEXTINIT_MESSAGE ), KeyManagementException.class ); } @Test public void testSuccess() throws IOException { initMock(); verifyRuntimeTestResultEntry( connectTest.runTest(), RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_CONNECT_SUCCESS_DESC ), messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_CONNECT_SUCCESS_MESSAGE, uri.toString() + path ) ); } @Test public void test401() throws IOException { HttpResponse httpResponseMock = mock(HttpResponse.class); StatusLine statusLineMock = mock(StatusLine.class); doReturn( httpResponseMock ).when( httpClient ).execute( any( HttpUriRequest.class ), any( HttpContext.class) ); doReturn( statusLineMock ).when( httpResponseMock ).getStatusLine(); doReturn( 401 ).when( statusLineMock ).getStatusCode(); initMock(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_UNAUTHORIZED_DESC ), messageGetter .getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_UNAUTHORIZED_MESSAGE, uri.toString() + path, user ) ); } @Test public void test403() throws IOException { HttpResponse httpResponseMock = mock(HttpResponse.class); StatusLine statusLineMock = mock(StatusLine.class); doReturn( httpResponseMock ).when( httpClient ).execute( any( HttpUriRequest.class ), any( HttpContext.class) ); doReturn( statusLineMock ).when( httpResponseMock ).getStatusLine(); doReturn( 403 ).when( statusLineMock ).getStatusCode(); initMock(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_FORBIDDEN_DESC ), messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_FORBIDDEN_MESSAGE, uri.toString() + path, user ) ); } @Test public void test404() throws IOException { HttpResponse httpResponseMock = mock(HttpResponse.class); StatusLine statusLineMock = mock(StatusLine.class); doReturn( httpResponseMock ).when( httpClient ).execute( any( HttpUriRequest.class ), any( HttpContext.class) ); doReturn( statusLineMock ).when( httpResponseMock ).getStatusLine(); doReturn( 404 ).when( statusLineMock ).getStatusCode(); initMock(); verifyRuntimeTestResultEntry( connectTest.runTest(), severityOfFailures, messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_SERVICE_NOT_FOUND_DESC ), messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_SERVICE_NOT_FOUND_MESSAGE, uri.toString() + path ) ); } @Test public void testUnknownCode() throws IOException { Integer returnCode = 0; HttpResponse httpResponseMock = mock(HttpResponse.class); StatusLine statusLineMock = mock(StatusLine.class); doReturn( httpResponseMock ).when( httpClient ).execute( any( HttpUriRequest.class ), any( HttpContext.class ) ); doReturn( statusLineMock ).when( httpResponseMock ).getStatusLine(); doReturn( returnCode ).when( statusLineMock ).getStatusCode(); initMock(); verifyRuntimeTestResultEntry( connectTest.runTest(), RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_CONNECT_UNKNOWN_RETURN_CODE_DESC ), messageGetter .getMessage( GatewayConnectivityTestImpl.GATEWAY_CONNECT_TEST_CONNECT_UNKNOWN_RETURN_CODE_MESSAGE, user, returnCode.toString(), uri.toString() + path ) ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/result/RuntimeTestEntrySeverityTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.result; import org.junit.Test; import java.util.Arrays; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Created by bryan on 8/27/15. */ public class RuntimeTestEntrySeverityTest { @Test public void testMaxSeverityResult() { RuntimeTestResult runtimeTestResult1 = mock( RuntimeTestResult.class ); RuntimeTestResult runtimeTestResult2 = mock( RuntimeTestResult.class ); RuntimeTestResult runtimeTestResult3 = mock( RuntimeTestResult.class ); RuntimeTestResultEntry runtimeTestResultEntry1 = mock( RuntimeTestResultEntry.class ); RuntimeTestResultEntry runtimeTestResultEntry2 = mock( RuntimeTestResultEntry.class ); RuntimeTestResultEntry runtimeTestResultEntry3 = mock( RuntimeTestResultEntry.class ); when( runtimeTestResult1.getOverallStatusEntry() ).thenReturn( runtimeTestResultEntry1 ); when( runtimeTestResult2.getOverallStatusEntry() ).thenReturn( runtimeTestResultEntry2 ); when( runtimeTestResult3.getOverallStatusEntry() ).thenReturn( runtimeTestResultEntry3 ); when( runtimeTestResultEntry1.getSeverity() ).thenReturn( RuntimeTestEntrySeverity.INFO ); when( runtimeTestResultEntry2.getSeverity() ).thenReturn( RuntimeTestEntrySeverity.FATAL ); when( runtimeTestResultEntry3.getSeverity() ).thenReturn( null ); when( runtimeTestResult1.isDone() ).thenReturn( true ); when( runtimeTestResult2.isDone() ).thenReturn( false ).thenReturn( true ); when( runtimeTestResult3.isDone() ).thenReturn( true ); assertEquals( RuntimeTestEntrySeverity.INFO, RuntimeTestEntrySeverity .maxSeverityResult( Arrays.asList( runtimeTestResult1, runtimeTestResult2, runtimeTestResult3 ) ) ); assertEquals( RuntimeTestEntrySeverity.FATAL, RuntimeTestEntrySeverity .maxSeverityResult( Arrays.asList( runtimeTestResult1, runtimeTestResult2, runtimeTestResult3 ) ) ); } @Test public void testValuesAndValueOf() { for ( RuntimeTestEntrySeverity runtimeTestEntrySeverity : RuntimeTestEntrySeverity.values() ) { assertEquals( runtimeTestEntrySeverity, RuntimeTestEntrySeverity.valueOf( runtimeTestEntrySeverity.name() ) ); } } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/test/impl/BaseRuntimeTestTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.test.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import java.util.Arrays; import java.util.HashSet; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * Created by bryan on 8/20/15. */ public class BaseRuntimeTestTest { private String module; private String id; private String name; private boolean configInitTest; private HashSet dependencies; private BaseRuntimeTest baseRuntimeTest; @Before public void setup() { module = "module"; id = "id"; name = "name"; configInitTest = true; dependencies = new HashSet<>( Arrays.asList( "dependency" ) ); baseRuntimeTest = new BaseRuntimeTest( Object.class, module, id, name, configInitTest, dependencies ) { @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { throw new UnsupportedOperationException( "This is a test object, don't run it... ever..." ); } }; } @Test public void testGetModule() { assertEquals( module, baseRuntimeTest.getModule() ); } @Test public void testGetId() { assertEquals( id, baseRuntimeTest.getId() ); } @Test public void testGetName() { assertEquals( name, baseRuntimeTest.getName() ); } @Test public void testIsConfigInitTest() { assertTrue( baseRuntimeTest.isConfigInitTest() ); assertFalse( new BaseRuntimeTest( Object.class, module, id, name, dependencies ) { @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { throw new UnsupportedOperationException( "This is a test object, don't run it... ever..." ); } }.isConfigInitTest() ); } @Test public void testToString() { String string = baseRuntimeTest.toString(); assertTrue( string.contains( module ) ); assertTrue( string.contains( id ) ); assertTrue( string.contains( name ) ); assertTrue( string.contains( "true" ) ); assertTrue( string.contains( dependencies.toString() ) ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/test/impl/RuntimeTestDelegateWithMoreDependenciesTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.test.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.RuntimeTest; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Created by bryan on 8/20/15. */ public class RuntimeTestDelegateWithMoreDependenciesTest { private RuntimeTest delegate; private HashSet extraDependencies; private RuntimeTestDelegateWithMoreDependencies runtimeTestDelegateWithMoreDependencies; private String module; private String id; private String name; private String inheritedDep; private String newDep; @Before public void setup() { delegate = mock( RuntimeTest.class ); module = "module"; id = "id"; name = "name"; inheritedDep = "inheritedDep"; newDep = "newDep"; when( delegate.getModule() ).thenReturn( module ); when( delegate.getId() ).thenReturn( id ); when( delegate.getName() ).thenReturn( name ); when( delegate.getDependencies() ).thenReturn( new HashSet<>( Arrays.asList( inheritedDep ) ) ); extraDependencies = new HashSet<>( Arrays.asList( newDep ) ); runtimeTestDelegateWithMoreDependencies = new RuntimeTestDelegateWithMoreDependencies( delegate, extraDependencies ); } @Test public void testGetModule() { assertEquals( module, runtimeTestDelegateWithMoreDependencies.getModule() ); } @Test public void testGetId() { assertEquals( id, runtimeTestDelegateWithMoreDependencies.getId() ); } @Test public void testGetName() { assertEquals( name, runtimeTestDelegateWithMoreDependencies.getName() ); } @Test public void testIsConfigInitTest() { when( delegate.isConfigInitTest() ).thenReturn( false ).thenReturn( true ); assertFalse( runtimeTestDelegateWithMoreDependencies.isConfigInitTest() ); assertTrue( runtimeTestDelegateWithMoreDependencies.isConfigInitTest() ); } @Test public void testGetDependencies() { Set dependencies = runtimeTestDelegateWithMoreDependencies.getDependencies(); assertTrue( dependencies.contains( inheritedDep ) ); assertTrue( dependencies.contains( newDep ) ); } @Test public void testToString() { String string = runtimeTestDelegateWithMoreDependencies.toString(); assertTrue( string.contains( delegate.toString() ) ); assertTrue( string.contains( newDep ) ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/test/impl/RuntimeTestResultEntryImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.test.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * Created by bryan on 8/21/15. */ public class RuntimeTestResultEntryImplTest { private RuntimeTestEntrySeverity severity; private String description; private String message; private Exception exception; private RuntimeTestResultEntryImpl runtimeTestResultEntry; @Before public void setup() { severity = RuntimeTestEntrySeverity.ERROR; description = "desc"; message = "msg"; exception = new Exception(); runtimeTestResultEntry = new RuntimeTestResultEntryImpl( severity, description, message, exception ); } @Test public void test3ArgConstructor() { exception = null; runtimeTestResultEntry = new RuntimeTestResultEntryImpl( severity, description, message ); testGetSeverity(); testGetDescription(); testGetMessage(); testToString(); } @Test public void testGetSeverity() { assertEquals( severity, runtimeTestResultEntry.getSeverity() ); } @Test public void testGetDescription() { assertEquals( description, runtimeTestResultEntry.getDescription() ); } @Test public void testGetMessage() { assertEquals( message, runtimeTestResultEntry.getMessage() ); } @Test public void testGetException() { assertEquals( exception, runtimeTestResultEntry.getException() ); } @Test public void testToString() { String string = runtimeTestResultEntry.toString(); assertTrue( string.contains( severity.toString() ) ); assertTrue( string.contains( description ) ); assertTrue( string.contains( message ) ); assertTrue( string.contains( String.valueOf( exception ) ) ); } } ================================================ FILE: api/runtimeTest/src/test/java/org/pentaho/runtime/test/test/impl/RuntimeTestResultImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.runtime.test.test.impl; import org.junit.Before; import org.junit.Test; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; /** * Created by bryan on 8/20/15. */ public class RuntimeTestResultImplTest { private RuntimeTest runtimeTest; private List runtimeTestResultEntries; private long timeTaken; private RuntimeTestResultImpl runtimeTestResult; private RuntimeTestResultEntry runtimeTestResultEntry; private RuntimeTestEntrySeverity info; @Before public void setup() { runtimeTest = mock( RuntimeTest.class ); info = RuntimeTestEntrySeverity.INFO; runtimeTestResultEntry = new RuntimeTestResultEntryImpl( info, "testDesc", "testMessage" ); runtimeTestResultEntries = new ArrayList<>( Arrays.asList( runtimeTestResultEntry ) ); timeTaken = 10L; runtimeTestResult = new RuntimeTestResultImpl( runtimeTest, true, new RuntimeTestResultSummaryImpl( runtimeTestResultEntry, runtimeTestResultEntries ), timeTaken ); } @Test public void testGetMaxSeverity() { assertEquals( info, runtimeTestResult.getOverallStatusEntry().getSeverity() ); } @Test public void testGetRuntimeTestResultEntries() { assertEquals( runtimeTestResultEntries, runtimeTestResult.getRuntimeTestResultEntries() ); } @Test public void testGetRuntimeTest() { assertEquals( runtimeTest, runtimeTestResult.getRuntimeTest() ); } @Test public void testGetTimeTaken() { assertEquals( timeTaken, runtimeTestResult.getTimeTaken() ); } @Test public void testToString() { String string = runtimeTestResult.toString(); assertTrue( string.contains( info.toString() ) ); assertTrue( string.contains( runtimeTestResultEntry.toString() ) ); assertTrue( string.contains( runtimeTest.toString() ) ); assertTrue( string.contains( String.valueOf( timeTaken ) ) ); } } ================================================ FILE: assemblies/pentaho-big-data-plugin/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-assemblies 11.1.0.0-SNAPSHOT pentaho-big-data-plugin 11.1.0.0-SNAPSHOT pom Apache License, Version 2.0 http://www.apache.org/licenses/LICENSE-2.0.txt repo A business-friendly OSS license scm:git:git@github.com:${github.user}/${project.artifactId}.git scm:git:git@github.com:${github.user}/${project.artifactId}.git scm:git:git@github.com:${github.user}/${project.artifactId}.git pentaho-hadoop-shims-cdpdc71 cdpdc71 -cdp cdpdc71 cdp ${project.version} hdi pentaho-hadoop-shims-hdi40 hdi40 -hdi hdi40 hdi org.pentaho.hadoop.shims pentaho-hadoop-shims-hdi40 ${pentaho-hadoop-shims.version} zip provided * * maven-assembly-plugin pkg-hdi package single emr pentaho-hadoop-shims-emr770 emr770 -emr emr770 emr org.pentaho.hadoop.shims pentaho-hadoop-shims-emr770 ${pentaho-hadoop-shims.version} zip provided * * maven-assembly-plugin pkg-emr package single dataproc pentaho-hadoop-shims-dataproc23 dataproc23 -dataproc dataproc23 dataproc org.pentaho.hadoop.shims pentaho-hadoop-shims-dataproc23 ${pentaho-hadoop-shims.version} zip provided * * maven-assembly-plugin pkg-dataproc package single cdp true pentaho-hadoop-shims-cdpdc71 cdpdc71 -cdp cdpdc71 cdp org.pentaho.hadoop.shims pentaho-hadoop-shims-cdpdc71 ${pentaho-hadoop-shims.version} zip provided * * maven-assembly-plugin pkg-cdp package single apachevanilla pentaho-hadoop-shims-apachevanilla apachevanilla -apachevanilla apachevanilla apachevanilla org.pentaho.hadoop.shims pentaho-hadoop-shims-apachevanilla ${project.version} zip provided * * maven-assembly-plugin pkg-apachevanilla package single org.slf4j slf4j-api ${slf4j.version} provided org.apache.logging.log4j log4j-slf4j2-impl provided pentaho pentaho-big-data-kettle-plugins-browse ${big-data-plugin.version} pentaho pentaho-big-data-api-runtimeTest ${big-data-plugin.version} pentaho pentaho-big-data-impl-cluster ${big-data-plugin.version} pentaho pentaho-big-data-kettle-plugins-common-ui ${big-data-plugin.version} pentaho pentaho-big-data-kettle-plugins-common-job ${big-data-plugin.version} org.pentaho shim-api-core ${pentaho-hadoop-shims.version} org.pentaho.hadoop.shims pentaho-hadoop-shims-common-base ${pentaho-hadoop-shims.version} org.apache.commons commons-vfs2-hdfs ${commons-vfs2.version} pentaho pentaho-authentication-mapper-impl ${big-data-plugin.version} org.pentaho.hadoop.shims pentaho-hadoop-shims-common-dependencies ${pentaho-hadoop-shims.version} pentaho pentaho-big-data-impl-clusterTests ${big-data-plugin.version} pentaho pentaho-authentication-mapper-api ${big-data-plugin.version} org.pentaho pentaho-hadoop-shims-common-services-api ${pentaho-hadoop-shims.version} org.pentaho pentaho-hadoop-shims-common-mapreduce ${pentaho-hadoop-shims.version} pentaho hadoop-cluster-ui ${big-data-plugin.version} pentaho pentaho-big-data-legacy-core ${big-data-plugin.version} pentaho pentaho-big-data-legacy ${big-data-plugin.version} pentaho pdi-legacy-amazon-core ${big-data-plugin.version} pentaho pentaho-big-data-impl-vfs-hdfs-core ${big-data-plugin.version} pentaho services-bootstrap ${big-data-plugin.version} com.amazonaws aws-java-sdk-iam com.amazonaws jmespath-java com.amazonaws aws-java-sdk-core com.fasterxml.jackson.dataformat jackson-dataformat-cbor com.fasterxml.jackson.core jackson-databind software.amazon.ion ion-java com.amazonaws aws-java-sdk-emr com.amazonaws jmespath-java com.amazonaws aws-java-sdk-pricing com.amazonaws jmespath-java com.amazonaws aws-java-sdk-s3 com.amazonaws aws-java-sdk-kms com.amazonaws jmespath-java joda-time joda-time ${dependency.joda-time.revision} * * org.apache.avro avro ${org.apache.avro.version} org.eclipse.core commands ${dependency.commands.revision} org.eclipse.equinox common ${dependency.common.revision} com.github.stephenc.high-scale-lib high-scale-lib ${dependency.high-scale-lib.revision} org.apache.httpcomponents httpclient org.apache.httpcomponents httpcore net.java.dev.jets3t jets3t ${dependency.jets3t.revision} jline jline ${dependency.jline.revision} org.apache.thrift libthrift ${org.apache.thrift.version} xmlpull xmlpull ${dependency.xmlpull.revision} xpp3 xpp3_min ${dependency.xpp3.revision} com.thoughtworks.xstream xstream pentaho pdi-pig-plugin ${big-data-plugin.version} zip provided * * pentaho pdi-sqoop-plugin ${big-data-plugin.version} zip provided * * pentaho pdi-hbase-plugin ${big-data-plugin.version} zip provided * * pentaho pdi-mapreduce-plugin ${big-data-plugin.version} zip provided * * pentaho pdi-hive-plugin ${big-data-plugin.version} zip provided * * pentaho pdi-formats-plugin ${big-data-plugin.version} zip provided * * pentaho pdi-hdfs-plugin ${big-data-plugin.version} zip provided * * pentaho pdi-sqoop-plugin ${big-data-plugin.version} zip provided * * pentaho pdi-oozie-plugin ${big-data-plugin.version} zip provided * * pentaho pdi-spark-plugin ${big-data-plugin.version} zip provided * * pentaho pdi-legacy-amazon-plugin ${big-data-plugin.version} zip provided * * pentaho pentaho-big-data-assemblies-pmr-libraries ${big-data-plugin.version} zip plugin pentaho pentaho-big-data-plugin-samples ${big-data-plugin.version} zip * * true src/main/resources **/*.properties false src/main/resources **/*.properties maven-assembly-plugin ${basedir}/src/main/assembly/descriptors/plugin.xml org.codehaus.plexus plexus-interpolation 1.26 maven-dependency-plugin unpack generate-resources unpack org.pentaho.hadoop.shims ${hadoop.shim.artifactId} zip ${basedir}/target/plugins/pentaho-big-data-plugin/hadoop-configurations ${hadoop.shim.destFileName} pentaho pdi-pig-plugin zip ${basedir}/target/plugins/pentaho-big-data-plugin/plugins pentaho pdi-hbase-plugin zip ${basedir}/target/plugins/pentaho-big-data-plugin/plugins pentaho pdi-hdfs-plugin zip ${basedir}/target/plugins/pentaho-big-data-plugin/plugins pentaho pdi-sqoop-plugin zip ${basedir}/target/plugins/pentaho-big-data-plugin/plugins pentaho pdi-oozie-plugin zip ${basedir}/target/plugins/pentaho-big-data-plugin/plugins pentaho pdi-spark-plugin zip ${basedir}/target/plugins/pentaho-big-data-plugin/plugins pentaho pdi-mapreduce-plugin zip ${basedir}/target/plugins/pentaho-big-data-plugin/plugins pentaho pdi-hive-plugin zip ${basedir}/target/plugins/pentaho-big-data-plugin/plugins pentaho pdi-formats-plugin zip ${basedir}/target/plugins/pentaho-big-data-plugin/plugins pentaho pdi-legacy-amazon-plugin zip ${basedir}/target/plugins/pentaho-big-data-plugin/plugins maven-resources-plugin filter-assembly-resources process-resources copy-resources ${basedir}/target/filtered-assembly-resources src/main/assembly/resources true **/*.properties src/main/assembly/resources false **/*.properties maven-assembly-plugin ================================================ FILE: assemblies/pentaho-big-data-plugin/src/main/assembly/descriptors/plugin.xml ================================================ ${assembly.id} pentaho-big-data-plugin zip target/filtered-assembly-resources / target/plugins/pentaho-big-data-plugin/hadoop-configurations /hadoop-configurations target/plugins/pentaho-big-data-plugin/plugins / pentaho:pentaho-big-data-plugin-samples:zip true . false false / false runtime false ${artifact.artifactId}-${artifact.baseVersion}.${artifact.extension} pentaho:pentaho-big-data-legacy pentaho:pentaho-big-data-legacy-amazon pentaho:hadoop-cluster-ui pentaho:services-bootstrap pentaho:pentaho-big-data-kettle-plugins-kafka / false runtime false pentaho-mapreduce-libraries.zip pentaho:pentaho-big-data-assemblies-pmr-libraries /lib false runtime false ${artifact.artifactId}-${artifact.baseVersion}.${artifact.extension} pentaho:pentaho-big-data-legacy-core pentaho:pentaho-big-data-api-runtimeTest pentaho:pentaho-big-data-impl-clusterTests pentaho:pentaho-big-data-impl-cluster pentaho:pentaho-big-data-kettle-plugins-common-ui pentaho:pentaho-big-data-kettle-plugins-common-job pentaho:pentaho-big-data-impl-vfs-hdfs-core pentaho:pentaho-authentication-mapper-impl pentaho:pentaho-big-data-kettle-plugins-browse org.pentaho:shim-api-core org.pentaho.hadoop.shims:pentaho-hadoop-shims-common-base org.pentaho.hadoop.shims:pentaho-hadoop-shims-common-dependencies org.pentaho:pentaho-hadoop-shims-common-services-api org.pentaho:pentaho-hadoop-shims-common-mapreduce com.pentaho:pentaho-yarn-api pentaho:pentaho-authentication-mapper-api org.pentaho:pentaho-hadoop-shims-common-services-api org.apache.avro:avro joda-time:joda-time com.amazonaws:aws-java-sdk-core com.amazonaws:aws-java-sdk-iam com.amazonaws:aws-java-sdk-emr com.amazonaws:aws-java-sdk-s3 com.amazonaws:aws-java-sdk-pricing org.eclipse.core:commands org.eclipse.equinox:common commons-cli:commons-cli com.github.stephenc.high-scale-lib:high-scale-lib org.codehaus.jackson:jackson-core-asl net.java.dev.jets3t:jets3t jline:jline com.googlecode.json-simple:json-simple org.apache.commons:commons-vfs2-hdfs pentaho:pentaho-big-data-kettle-plugins-common-ui xmlpull:xmlpull xpp3:xpp3_min com.thoughtworks.xstream:xstream org.slf4j:slf4j-reload4j org.apache.httpcomponents:httpmime / false false provided true PentahoBigDataPlugin_OSS_Licenses.html ================================================ FILE: assemblies/pentaho-big-data-plugin/src/main/assembly/resources/bigdata-logging.properties ================================================ # Big Data Plugin Logging Configuration # # This file defines the loggers that should be configured for Big Data plugin components. # Each logger entry should follow the format: logger.= # # Available log levels: TRACE, DEBUG, INFO, WARN, ERROR, FATAL # Pentaho Big Data Plugin Loggers (org.* packages only) logger.org.pentaho.big.data=INFO # Hadoop Core Loggers logger.org.apache.hadoop=INFO logger.org.apache.hadoop.io.retry=WARN # HBase Loggers logger.org.apache.hbase=INFO # Hive Loggers logger.org.apache.hive=INFO # Sqoop Loggers logger.org.apache.sqoop=INFO # Kafka Loggers logger.org.apache.kafka=WARN # Spark Loggers logger.org.apache.spark=WARN # Add custom loggers below as needed # logger.my.custom.package=DEBUG ================================================ FILE: assemblies/pentaho-big-data-plugin/src/main/assembly/resources/classpath.properties ================================================ classpath=./${hadoop.configurations.path}/${active.hadoop.configuration} ================================================ FILE: assemblies/pentaho-big-data-plugin/src/main/assembly/resources/hadoop-configurations/.kettle-ignore ================================================ ================================================ FILE: assemblies/pentaho-big-data-plugin/src/main/assembly/resources/plugin.properties ================================================ # The Hadoop Configuration to use when communicating with a Hadoop cluster. This is used for all Hadoop client tools # including HDFS, Hive, HBase, and Sqoop. # For more configuration options specific to the Hadoop configuration choosen # here see the config.properties file in that configuration's directory. # Note: should no longer be used and will be ignored after named cluster configurations have been updated for 9.0+ # Will only be referenced if site configuration files are not found in the expected locations in the metastore folders active.hadoop.configuration=${active.hadoop.configuration} # If using shim configurations from 8.3 and prior and have not migrated to 9.0 configurations: # Path to the directory that contains the available Hadoop configurations # If using shim configurations from 9.0+: # Path to metastore to use when running on a Pentaho slave server (e.g. Pan or Kitchen) # To use an existing PDI metastore, for example, the directory is /home/user/.pentaho hadoop.configurations.path=hadoop-configurations # Version of Kettle to use from the Kettle HDFS installation directory. This can be set globally here or overridden per job # as a User Defined property. If not set we will use the version of Kettle that is used to submit the Pentaho MapReduce job. pmr.kettle.installation.id= # Installation path in HDFS for the Pentaho MapReduce Hadoop Distribution # The directory structure should follow this structure where {version} can be configured through the Pentaho MapReduce # User Defined properties as kettle.runtime.version # # /opt/pentaho/mapreduce/ # +- {version}/ # | +- lib/ # | .. # | +- kettle-core-{version}.jar # | +- kettle-engine-{version}.jar # | .. # | +- plugins/ # | pentaho-big-data-plugin/ # | .. (additional optional plugins) # +- {another-version}/ # | +- lib/ # | .. # | +- kettle-core-{version}.jar # | +- kettle-engine-{version}.jar # | .. # | +- plugins/ # | +- pentaho-big-data-plugin/ # | .. # | +- my-custom-plugin/ # | .. pmr.kettle.dfs.install.dir=/opt/pentaho/mapreduce # Enables the use of Hadoop's Distributed Cache to store the Kettle environment required to execute Pentaho MapReduce # If this is disabled you must configure all TaskTracker nodes with the Pentaho for Hadoop Distribution # @deprecated This is deprecated and is provided as a migration path for existing installations. pmr.use.distributed.cache=true # Pentaho MapReduce runtime archive to be preloaded into kettle.hdfs.install.dir/pmr.kettle.installation.id pmr.libraries.archive.file=pentaho-mapreduce-libraries.zip # Additional plugins to be copied when Pentaho MapReduce's Kettle Environment does not exist on DFS. This should be a comma-separated # list of plugin folder names to copy. # e.g. pmr.kettle.additional.plugins=my-test-plugin,steps/DummyPlugin pmr.kettle.additional.plugins=pdi-core-plugins,pentaho-metastore-locator-plugin # Individual file name prefixes to not include when adding plugins to the Pentaho MapReduce Kettle Environment. This should be a comma-separated # list of file prefixes to exclude. The pdi-core-plugins-ui value should not be removed, only added to as demonstrated below. # e.g. to exclude the file some-jar-file-name-9.0.0.0-xxx.jar: pmr.kettle.exclude.plugin.files=pdi-core-plugins-ui,some-jar-file-name pmr.kettle.exclude.plugin.files=pdi-core-plugins-ui notificationsBeforeLoadingShim=1 # pmr.create.unique.metastore.dir: # If the property is not present or set to true, a unique metastore directory is created for each execution of a Pentaho MapReduce job # If the property is set to false, a single metastore directory is created and is overwritten for each Pentaho MapReduce job # Setting this property to false will save space within HDFS but can cause concurrency and security issues if multiple users are using the same # pmr.kettle.dfs.install.dir. pmr.create.unique.metastore.dir=true # Value to use for the ipc.client.connect.max.retries.on.timeouts Hadoop property when connecting to HDFS # Note that this value overrides any ipc.client.connect.max.retries.on.timeouts set in *site.xml files for individual # Hadoop cluster definitions hadoopfs.ipc.client.connect.max.retries.on.timeouts=5 # These clauses are added on the java command line when executing a "Start PDI Cluster on Yarn" step yarn.additional.jvm.options=--add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/sun.net.www.protocol.jar=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED --add-opens java.base/java.net=ALL-UNNAMED --add-opens java.base/java.security=ALL-UNNAMED --add-opens java.base/sun.net.www.protocol.file=ALL-UNNAMED --add-opens java.base/java.security=ALL-UNNAMED --add-opens java.base/sun.net.www.protocol.file=ALL-UNNAMED --add-opens java.base/sun.net.www.protocol.ftp=ALL-UNNAMED --add-opens java.base/sun.net.www.protocol.http=ALL-UNNAMED --add-opens java.base/sun.reflect.misc=ALL-UNNAMED --add-opens java.management/javax.management=ALL-UNNAMED --add-opens java.management/javax.management.openmbean=ALL-UNNAMED --add-opens java.naming/com.sun.jndi.ldap=ALL-UNNAMED --add-opens java.base/java.math=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED --add-opens java.base/java.lang.Object=ALL-UNNAMED --add-opens java.base/sun.nio.ch=ALL-UNNAMED ================================================ FILE: assemblies/pentaho-big-data-plugin/src/main/assembly/resources/plugins/.gitignore ================================================ ================================================ FILE: assemblies/pmr-libraries/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-assemblies 11.1.0.0-SNAPSHOT pentaho-big-data-assemblies-pmr-libraries 11.1.0.0-SNAPSHOT pom 3.108.0 1.1 1.4.7 2.0 3.0.1 6.0.0 1.0.0.GA 2.5.1 1.19.1 5.1.2.Final 5.6.15.Final site 2.6.0 2.2.3 0.2.9 11.1.0.0-SNAPSHOT 11.1.0.0-SNAPSHOT 8.0.0 2.12.2 org.osgi osgi.core ${osgi.core.version} runtime io.reactivex.rxjava2 rxjava ${rxjava.version} org.antlr antlr-complete 3.5.2 ascsapjco3wrp ascsapjco3wrp 20100529 asm asm 3.2 org.bouncycastle bcmail-jdk15to18 org.bouncycastle bcpkix-jdk15to18 org.bouncycastle bcutil-jdk15to18 org.bouncycastle bcprov-jdk15to18 bsf bsf 2.4.0 cglib cglib-nodep 2.2 com.enterprisedt edtftpj 2.1.0 com.google.code.findbugs jsr305 1.3.9 com.googlecode.jsendnsca jsendnsca 2.0.1 com.googlecode.json-simple json-simple 1.1 com.google.gdata core 1.47.1 com.google.gdata gdata-analytics 2.3.0 com.google.gdata gdata-analytics-meta 2.1 com.google.gdata gdata-client 1.41.4 com.google.gdata gdata-client-meta 1.0 com.google.gdata gdata-core 1.41.4 com.google.guava guava ${guava.version} com.healthmarketscience.jackcess jackcess 1.2.6 com.github.mwiede jsch ${jsch.version} com.github.librepdf openpdf ${openpdf.version} bouncycastle * org.bouncycastle * com.github.librepdf openrtf ${openrtf.version} bouncycastle * org.bouncycastle * com.tinkerpop.blueprints blueprints-core ${blueprints-core.version} commons-beanutils commons-beanutils ${commons-beanutils.version} commons-cli commons-cli 1.2 commons-collections commons-collections 3.2.2 org.apache.commons commons-configuration2 commons-configuration commons-configuration 1.6 commons-digester commons-digester 1.8 commons-discovery commons-discovery 0.4 commons-fileupload commons-fileupload ${commons-fileupload-osgi.version} commons-io commons-io ${commons-io.version} commons-lang commons-lang 2.6 commons-math commons-math 1.1 commons-validator commons-validator 1.3.1 org.apache.logging.log4j log4j-core ${log4j.version} org.apache.logging.log4j log4j-api ${log4j.version} org.apache.commons commons-vfs2 com.sun.jersey.contribs jersey-apache-client ${jersey.version} com.sun.jersey jersey-bundle ${jersey.version} com.sun.jersey jersey-client ${jersey.version} com.sun.jersey jersey-core ${jersey.version} com.thoughtworks.xstream xstream org.dom4j dom4j eigenbase eigenbase-properties 1.1.2 eigenbase eigenbase-resgen 1.3.1 eigenbase eigenbase-xom 1.3.5 feed4j feed4j 1.0 ftp4che ftp4che 0.7.1 infobright infobright-core 3.4 org.codehaus.janino janino ${janino.version} javacup javacup 10k javadbf javadbf 20081125 org.javassist javassist 3.20.0-GA javax.activation activation ${javax.activation-api.version} javax.mail mail ${javax.mail.version} javax.servlet jsp-api ${jsp-api.version} javax.servlet javax.servlet-api ${javax.servlet-api.version} jakarta.servlet jakarta.servlet-api ${jakarta.servlet.version} javax.validation validation-api ${validation-api.version} javax.xml jaxrpc-api 1.1 jaxen jaxen jcifs jcifs 1.3.3 jexcelapi jxl 2.6.12 jfree jcommon 1.0.16 jfree jfreechart 1.0.13 jsonpath jsonpath 1.0 jug-lgpl jug-lgpl 2.0.0 ldapjdk ldapjdk 20000524 monetdb monetdb-jdbc 2.8 net.java.dev.javacc javacc 5.0 net.sf.ehcache ehcache-core ${ehcache.version} net.sf.saxon Saxon-HE org.xmlresolver xmlresolver net.sf.scannotation scannotation 1.0.2 javassist javassist ognl ognl 2.6.9 org.apache.commons commons-compress org.apache.commons commons-collections4 4.1 org.apache.xmlbeans xmlbeans ${xmlbeans.version} org.apache.xmlgraphics batik-anim ${batik.version} org.apache.xmlgraphics batik-awt-util ${batik.version} org.apache.xmlgraphics batik-bridge ${batik.version} org.apache.xmlgraphics batik-codec ${batik.version} org.apache.xmlgraphics batik-css ${batik.version} org.apache.xmlgraphics batik-dom ${batik.version} org.apache.xmlgraphics batik-ext ${batik.version} org.apache.xmlgraphics batik-gui-util ${batik.version} org.apache.xmlgraphics batik-gvt ${batik.version} org.apache.xmlgraphics batik-parser ${batik.version} org.apache.xmlgraphics batik-script ${batik.version} org.apache.xmlgraphics batik-svg-dom ${batik.version} org.apache.xmlgraphics batik-transcoder ${batik.version} org.apache.xmlgraphics batik-util ${batik.version} org.apache.xmlgraphics batik-xml ${batik.version} org.apache.xmlgraphics batik-constants ${batik.version} org.apache.xmlgraphics batik-i18n ${batik.version} org.apache-extras.beanshell bsh ${beanshell.version} org.codehaus.groovy groovy 2.4.21 * * com.fasterxml.jackson.jaxrs jackson-jaxrs-json-provider ${fasterxml-jackson.version} org.drools drools-ruleunits-engine 8.44.0.Final org.drools drools-compiler 8.44.0.Final org.drools drools-core 8.44.0.Final org.eobjects.sassyreader SassyReader 0.5 org.fife.ui rsyntaxtextarea 1.3.2 org.hibernate.common hibernate-commons-annotations ${hibernate-commons-annotations.version} org.hibernate hibernate-core ${hibernate-core.version} org.hibernate hibernate-ehcache ${hibernate-core.version} org.pentaho json ${pentaho-json.version} org.mnode.mstor mstor 0.9.13 * * org.mvel mvel2 2.0.10 org.odftoolkit odfdom-java 0.8.6 org.olap4j olap4j 1.2.0 org.olap4j olap4j-xmla 1.2.0 org.owasp.encoder encoder 1.2 org.postgresql postgresql org.safehaus.jug jug-lgpl 2.0.0 org.samba.jcifs jcifs 1.3.3 org.scannotation scannotation 1.0.2 javassist javassist org.snmp4j snmp4j 1.9.3d org.springframework.security spring-security-core org.springframework spring-core org.springframework spring-beans org.springframework spring-context org.springframework spring-aop org.springframework spring-expression org.syslog4j syslog4j 0.9.34 org.w3c.css sac 1.3 org.xerial.snappy snappy-java ${snappy-java.version} org.yaml snakeyaml pentaho-kettle kettle-core ${pdi.version} org.pentaho pentaho-encryption-support ${encryption-support.version} pentaho-kettle kettle-engine ${pdi.version} jackson-jaxrs org.codehaus.jackson jackson-core-asl org.codehaus.jackson pentaho-kettle kettle-ui-swt ${pdi.version} org.pentaho.reporting.library flute ${pentaho-reporting.version} org.pentaho.reporting.library libbase ${pentaho-reporting.version} org.pentaho.reporting.library libdocbundle ${pentaho-reporting.version} org.pentaho.reporting.library libfonts ${pentaho-reporting.version} org.pentaho.reporting.library libformat ${pentaho-reporting.version} org.pentaho.reporting.library libformula ${pentaho-reporting.version} org.pentaho.reporting.library libloader ${pentaho-reporting.version} org.pentaho.reporting.library libpixie ${pentaho-reporting.version} org.pentaho.reporting.library librepository ${pentaho-reporting.version} org.pentaho.reporting.library libserializer ${pentaho-reporting.version} org.pentaho.reporting.library libswing ${pentaho-reporting.version} org.pentaho.reporting.library libxml ${pentaho-reporting.version} pentaho simple-jndi ${simple-jndi.version} pentaho pentaho-vfs-browser ${pdi.version} org.pentaho shim-api ${pentaho-hadoop-shims.version} org.pentaho shim-api-core ${pentaho-hadoop-shims.version} pentaho metastore ${metastore.version} pentaho mondrian ${mondrian.version} org.pentaho commons-database-model ${commons-database.version} org.pentaho pentaho-metadata ${pentaho-metadata.version} org.pentaho pentaho-registry ${pentaho-registry.version} org.pentaho.reporting.engine classic-core ${pentaho-reporting.version} org.pentaho commons-xul-core ${commons-xul.version} org.mozilla rhino stax stax 1.2.0 stax stax-api 1.0.1 sun jlfgr 1.0 org.apache.sshd sshd-core org.apache.sshd sshd-sftp woodstox wstx-asl 3.2.4 wsdl4j wsdl4j 1.6.2 wsdl4j wsdl4j-qname 1.6.1 xml-apis xml-apis 2.0.2 xml-apis xml-apis-ext 1.3.04 xmlpull xmlpull 1.1.3.1 xpp3 xpp3_min 1.1.4c commons-logging commons-logging org.apache.httpcomponents httpclient org.apache.httpcomponents.core5 httpcore5 org.apache.httpcomponents.client5 httpclient5 org.eclipse.platform org.eclipse.swt.gtk.linux.x86_64 ${org.eclipse.swt.gtk.linux.x86_64.version} org.eclipse.jetty jetty-http ${jetty-hadoop.version} org.eclipse.jetty jetty-continuation ${jetty-hadoop.version} org.eclipse.jetty jetty-io ${jetty-hadoop.version} org.eclipse.jetty jetty-plus ${jetty-hadoop.version} org.eclipse.jetty jetty-security ${jetty-hadoop.version} org.eclipse.jetty jetty-server ${jetty-hadoop.version} org.eclipse.jetty jetty-servlet ${jetty-hadoop.version} org.eclipse.jetty jetty-util ${jetty-hadoop.version} org.eclipse.jetty jetty-xml ${jetty-hadoop.version} org.slf4j slf4j-api org.apache.logging.log4j log4j-slf4j2-impl org.apache.logging.log4j log4j-core pentaho pentaho-platform-api ${platform.version} pentaho pentaho-platform-core ${platform.version} pentaho pentaho-platform-extensions ${platform.version} xbean org.apache.xbean jackson-core-asl org.codehaus.jackson pentaho pentaho-metaverse-api ${pentaho-metaverse.version} org.apache.avro avro-mapred ${org.apache.avro.version} * * org.pentaho.di.plugins pentaho-metastore-locator-api ${pdi.version} org.pentaho.di.plugins pentaho-kettle-repository-locator-api ${pdi.version} org.gwtproject gwt-user ${gwt.version} maven-resources-plugin ${dependency.maven-resources-plugin.revision} filter generate-resources resources false maven-assembly-plugin assembly package single src/main/descriptors/assembly.xml ${project.artifactId} true ================================================ FILE: assemblies/pmr-libraries/src/main/descriptors/assembly.xml ================================================ plugin zip target/classes . target/simple-jndi . lib false runtime false ${artifact.artifactId}-${artifact.baseVersion}.${artifact.extension} io.reactivex.rxjava2:rxjava ascsapjco3wrp:ascsapjco3wrp asm:asm org.bouncycastle:bcmail-jdk15to18 org.bouncycastle:bcprov-jdk15to18 org.bouncycastle:bcutil-jdk15to18 org.bouncycastle:bcpkix-jdk15to18 bsf:bsf cglib:cglib-nodep com.enterprisedt:edtftpj com.google.code.findbugs:jsr305 com.googlecode.jsendnsca:jsendnsca com.googlecode.json-simple:json-simple com.google.gdata:core com.google.gdata:gdata-analytics com.google.gdata:gdata-analytics-meta com.google.gdata:gdata-client com.google.gdata:gdata-client-meta com.google.gdata:gdata-core com.google.guava:guava com.healthmarketscience.jackcess:jackcess com.github.mwiede:jsch com.github.librepdf:openpdf com.github.librepdf:openrtf com.tinkerpop.blueprints:blueprints-core commons-beanutils:commons-beanutils commons-cli:commons-cli commons-collections:commons-collections commons-configuration:commons-configuration org.apache.commons:commons-dbcp2 commons-digester:commons-digester commons-discovery:commons-discovery commons-fileupload:commons-fileupload org.apache.httpcomponents:httpclient org.apache.httpcomponents:httpcore org.apache.httpcomponents.client5:httpclient5 org.apache.httpcomponents.core5:httpcore5 commons-io:commons-io commons-lang:commons-lang commons-logging:commons-logging commons-math:commons-math org.apache.commons:commons-pool2 commons-validator:commons-validator org.apache.commons:commons-configuration2 org.apache.commons:commons-vfs2 org.apache.logging.log4j:log4j-core org.apache.logging.log4j:log4j-api com.sun.jersey.contribs:jersey-apache-client com.sun.jersey:jersey-bundle com.sun.jersey:jersey-client com.sun.jersey:jersey-core xmlpull:xmlpull xpp3:xpp3_min com.thoughtworks.xstream:xstream org.dom4j:dom4j eigenbase:eigenbase-properties eigenbase:eigenbase-resgen eigenbase:eigenbase-xom feed4j:feed4j ftp4che:ftp4che infobright:infobright-core org.codehaus.janino:janino org.codehaus.janino:commons-compiler javacup:javacup javadbf:javadbf org.javassist:javassist javax.activation:activation com.sun.mail:javax.mail javax.servlet:jsp-api javax.servlet:javax.servlet-api javax.validation:validation-api javax.xml:jaxrpc-api jakarta.servlet:jakarta.servlet-api jaxen:jaxen jcifs:jcifs jexcelapi:jxl jfree:jcommon jfree:jfreechart jsonpath:jsonpath jug-lgpl:jug-lgpl ldapjdk:ldapjdk monetdb:monetdb-jdbc net.java.dev.javacc:javacc net.sf.ehcache:ehcache-core net.sf.saxon:Saxon-HE org.xmlresolver:xmlresolver net.sf.scannotation:scannotation ognl:ognl org.antlr:antlr-complete org.apache.commons:commons-compress org.apache.poi:poi org.apache.poi:poi-ooxml org.apache.poi:poi-ooxml-schemas org.apache.commons:commons-collections4 org.apache.xmlbeans:xmlbeans org.apache.xmlgraphics:batik-anim org.apache.xmlgraphics:batik-awt-util org.apache.xmlgraphics:batik-bridge org.apache.xmlgraphics:batik-codec org.apache.xmlgraphics:batik-css org.apache.xmlgraphics:batik-dom org.apache.xmlgraphics:batik-ext org.apache.xmlgraphics:batik-gui-util org.apache.xmlgraphics:batik-gvt org.apache.xmlgraphics:batik-parser org.apache.xmlgraphics:batik-script org.apache.xmlgraphics:batik-svg-dom org.apache.xmlgraphics:batik-transcoder org.apache.xmlgraphics:batik-util org.apache.xmlgraphics:batik-xml org.apache.xmlgraphics:batik-constants org.apache.xmlgraphics:batik-i18n org.apache-extras.beanshell:bsh org.codehaus.groovy:groovy org.codehaus.jackson:jackson-core-asl org.codehaus.jackson:jackson-jaxrs org.drools:drools-ruleunits-engine org.drools:drools-compiler org.drools:drools-core org.eclipse.platform:org.eclipse.swt.gtk.linux.x86_64 org.eclipse.jetty:jetty-continuation org.eclipse.jetty:jetty-http org.eclipse.jetty:jetty-io org.eclipse.jetty:jetty-plus org.eclipse.jetty:jetty-security org.eclipse.jetty:jetty-server org.eclipse.jetty:jetty-servlet org.eclipse.jetty:jetty-util org.eobjects.sassyreader:SassyReader org.fife.ui:rsyntaxtextarea org.hibernate.common:hibernate-commons-annotations org.hibernate:hibernate-core org.hibernate:hibernate-ehcache org.pentaho:json org.apache.logging.log4j:log4j-core org.mnode.mstor:mstor org.mvel:mvel2 org.odftoolkit:odfdom-java org.olap4j:olap4j org.olap4j:olap4j-xmla org.owasp.encoder:encoder org.postgresql:postgresql org.safehaus.jug:jug-lgpl org.samba.jcifs:jcifs org.scannotation:scannotation org.slf4j:slf4j-api org.apache.logging.log4j:log4j-slf4j2-impl org.snmp4j:snmp4j org.springframework.security:spring-security-core org.springframework:spring-core org.springframework:spring-beans org.springframework:spring-context org.springframework:spring-aop org.springframework:spring-expression org.syslog4j:syslog4j org.w3c.css:sac org.xerial.snappy:snappy-java org.yaml:snakeyaml pentaho-kettle:kettle-core pentaho-kettle:kettle-engine pentaho-kettle:kettle-ui-swt org.osgi:osgi.core org.pentaho:commons-xul-core org.pentaho.reporting.library:flute org.pentaho.reporting.library:libbase org.pentaho.reporting.library:libdocbundle org.pentaho.reporting.library:libfonts org.pentaho.reporting.library:libformat org.pentaho.reporting.library:libformula org.pentaho.reporting.library:libloader org.pentaho.reporting.library:libpixie org.pentaho.reporting.library:librepository org.pentaho.reporting.library:libserializer org.pentaho.reporting.library:libswing org.pentaho.reporting.library:libxml pentaho:pentaho-vfs-browser pentaho:pentaho-platform-api pentaho:pentaho-platform-core pentaho:pentaho-platform-extensions pentaho:pentaho-xul-core pentaho:metastore pentaho:mondrian org.pentaho:commons-database-model org.pentaho:pentaho-metadata org.pentaho:shim-api org.pentaho:shim-api-core org.pentaho:pentaho-registry pentaho:simple-jndi org.pentaho:pentaho-encryption-support org.pentaho.reporting.engine:classic-core org.mozilla:rhino com.wcohen:com.wcohen.secondstring stax:stax stax:stax-api sun:jlfgr org.apache.sshd:sshd-core org.apache.sshd:sshd-sftp woodstox:wstx-asl wsdl4j:wsdl4j wsdl4j:wsdl4j-qname xml-apis:xml-apis xml-apis:xml-apis-ext org.apache.kafka:kafka-clients org.mortbay.jetty:servlet-api pentaho:pentaho-metaverse-api org.apache.avro:avro-mapred org.pentaho.di.plugins:pentaho-metastore-locator-api org.pentaho.di.plugins:pentaho-kettle-repository-locator-api pentaho:pentaho-authentication-mapper-api pentaho:pentaho-authentication-mapper-impl org.pentaho.hadoop.shims:pentaho-hadoop-shims-common-dependencies org.pentaho.hadoop.shims:pentaho-hadoop-shims-common-base org.gwtproject:gwt-user ================================================ FILE: assemblies/pmr-libraries/src/main/resources/classes/kettle-lifecycle-listeners.xml ================================================ ================================================ FILE: assemblies/pmr-libraries/src/main/resources/classes/kettle-password-encoder-plugins.xml ================================================ Kettle Password Encoder org.pentaho.support.encryption.KettleTwoWayPasswordEncoder ================================================ FILE: assemblies/pmr-libraries/src/main/resources/classes/kettle-registry-extensions.xml ================================================ ================================================ FILE: assemblies/pmr-libraries/src/main/resources/classes/log4j2.xml ================================================ %d %-5p [%c] %m%n %d{ABSOLUTE} %-5p [%c{1}] %m%n ================================================ FILE: assemblies/pmr-libraries/src/main/resources/classes/org/apache/commons/vfs2/impl/providers.xml ================================================ ================================================ FILE: assemblies/pmr-libraries/src/main/resources/classes/pmr.properties ================================================ isPmr=true notificationsBeforeLoadingShim=1 ================================================ FILE: assemblies/pmr-libraries/src/main/resources/simple-jndi/jdbc.properties ================================================ SampleData/type=javax.sql.DataSource SampleData/driver=org.h2.Driver SampleData/url=jdbc:h2:file:samples/db/sampledb;IFEXISTS=TRUE SampleData/user=PENTAHO_USER SampleData/password=PASSWORD Quartz/type=javax.sql.DataSource Quartz/driver=org.hsqldb.jdbcDriver Quartz/url=jdbc:hsqldb:hsql://localhost/quartz Quartz/user=pentaho_user Quartz/password=password Hibernate/type=javax.sql.DataSource Hibernate/driver=org.hsqldb.jdbcDriver Hibernate/url=jdbc:hsqldb:hsql://localhost/hibernate Hibernate/user=hibuser Hibernate/password=password Shark/type=javax.sql.DataSource Shark/driver=org.hsqldb.jdbcDriver Shark/url=jdbc:hsqldb:hsql://localhost/shark Shark/user=sa Shark/password= PDI_Operations_Mart/type=javax.sql.DataSource PDI_Operations_Mart/driver=org.postgresql.Driver PDI_Operations_Mart/url=jdbc:postgresql://localhost:5432/hibernate?searchpath=pentaho_operations_mart PDI_Operations_Mart/user=hibuser PDI_Operations_Mart/password=password live_logging_info/type=javax.sql.DataSource live_logging_info/driver=org.postgresql.Driver live_logging_info/url=jdbc:postgresql://localhost:5432/hibernate?searchpath=pentaho_dilogs live_logging_info/user=hibuser live_logging_info/password=password ================================================ FILE: assemblies/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-parent 11.1.0.0-SNAPSHOT pentaho-big-data-assemblies 11.1.0.0-SNAPSHOT pom samples pmr-libraries pentaho-big-data-plugin org.apache.maven.plugins maven-enforcer-plugin 3.0.0 enforce-versions enforce [3.6.0,) [11,) assembly-skip-tests skipTests true true plugin-only pentaho-big-data-plugin non-big-data-plugin samples pmr-libraries ================================================ FILE: assemblies/samples/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-assemblies 11.1.0.0-SNAPSHOT pentaho-big-data-plugin-samples 11.1.0.0-SNAPSHOT pom Apache License, Version 2.0 http://www.apache.org/licenses/LICENSE-2.0.txt repo A business-friendly OSS license scm:git:git@github.com:${github.user}/${project.artifactId}.git scm:git:git@github.com:${github.user}/${project.artifactId}.git scm:git:git@github.com:${github.user}/${project.artifactId}.git site maven-assembly-plugin pkg package single false ${basedir}/src/main/assembly/descriptors/samples.xml ================================================ FILE: assemblies/samples/src/main/assembly/descriptors/samples.xml ================================================ package samples zip ${project.basedir}/src/main/resources . ================================================ FILE: assemblies/samples/src/main/resources/.kettle-ignore ================================================ ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/Hadoop Job Executor 2 adv.kjb ================================================ Hadoop Job Executor 2 adv 0 / - 2010/07/12 13:45:27.737 - 2010/07/12 13:45:27.737 ID_JOB Y ID_JOB CHANNEL_ID Y CHANNEL_ID JOBNAME Y JOBNAME STATUS Y STATUS LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS STARTDATE Y STARTDATE ENDDATE Y ENDDATE LOGDATE Y LOGDATE DEPDATE Y DEPDATE REPLAYDATE Y REPLAYDATE LOG_FIELD Y LOG_FIELD EXECUTING_SERVER N EXECUTING_SERVER EXECUTING_USER N EXECUTING_USER START_JOB_ENTRY N START_JOB_ENTRY CLIENT N CLIENT
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE JOBNAME Y TRANSNAME JOBENTRYNAME Y STEPNAME LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS RESULT Y RESULT NR_RESULT_ROWS Y NR_RESULT_ROWS NR_RESULT_FILES Y NR_RESULT_FILES LOG_FIELD N LOG_FIELD COPY_NR N COPY_NR
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE LOGGING_OBJECT_TYPE Y LOGGING_OBJECT_TYPE OBJECT_NAME Y OBJECT_NAME OBJECT_COPY Y OBJECT_COPY REPOSITORY_DIRECTORY Y REPOSITORY_DIRECTORY FILENAME Y FILENAME OBJECT_ID Y OBJECT_ID OBJECT_REVISION Y OBJECT_REVISION PARENT_CHANNEL_ID Y PARENT_CHANNEL_ID ROOT_CHANNEL_ID Y ROOT_CHANNEL_ID
ID_JOB_RUN Y ID_JOB_RUN ID_JOB Y ID_JOB JOBNAME Y JOBNAME NAMESPACE Y NAMESPACE CHECKPOINT_NAME Y CHECKPOINT_NAME CHECKPOINT_COPYNR Y CHECKPOINT_COPYNR ATTEMPT_NR Y ATTEMPT_NR JOB_RUN_START_DATE Y JOB_RUN_START_DATE LOGDATE Y LOGDATE RESULT_XML Y RESULT_XML PARAMETER_XML Y PARAMETER_XML N WordCount - Advanced HadoopJobExecutorPlugin hadoopjob - wordcount2 N ./samples/jobs/hadoop/pentaho-mapreduce2-sample.jar org.pentaho.hadoop.sample.wordcount.WordCount2 /wordcount/input /wordcount/output N Y 5 60 hadoopjob - wordcount2 org.pentaho.hadoop.sample.wordcount.WordCount2$Map org.pentaho.hadoop.sample.wordcount.WordCount2$Reduce org.pentaho.hadoop.sample.wordcount.WordCount2$Reduce /wordcount/input org.apache.hadoop.mapreduce.lib.input.TextInputFormat /wordcount/output org.apache.hadoop.io.Text org.apache.hadoop.io.IntWritable org.apache.hadoop.mapreduce.lib.output.TextOutputFormat localhost 8020 localhost 8032 2 1 N Y 0 478 290 START SPECIAL Y N N 0 0 60 12 0 1 1 N Y 0 56 290 Clean Output DELETE_FOLDERS N success_if_no_errors 10 hdfs://hadoop-server:8020/wordcount/output N Y 0 252 290 Success SUCCESS N Y 0 678 290 START Clean Output 0 0 Y Y Y Clean Output WordCount - Advanced 0 0 Y Y Y WordCount - Advanced Success 0 0 Y Y Y Cleans up the output directory 191 236 183 23 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Hadoop WordCount MapReduce Job - Edit the Input and Output directory paths - Choose Hadoop Cluster 399 353 250 49 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Monitor the logs for progress (if blocking option is selected) 610 222 178 36 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y SETUP INSTRUCTIONS: 1. Update the HDFS path within the 'Clean Output' step to match your Hadoop server location and path to where you intend to generate output from the wordcount example 2. Create an input directory in HDFS and place text file(s) in the input directory that you want to use to test the wordcount example 3. Update the 'Wordcount - Advanced' step (Job Setup and Cluster tabs) to configure the correct paths and server name including: - Input Path - the path in HDFS from which to read files for counting - Output Path - where the processed count of words will be placed *Note: Source code for the sample jar can be found alongside this sample in your samples directory. 15 26 991 114 Microsoft Sans Serif 8 N N 0 0 0 255 165 0 100 100 100 Y JobRestart UniqueConnections N ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/Hadoop Job Executor adv.kjb ================================================ Hadoop Job Executor adv 0 / - 2010/07/12 13:45:27.737 - 2010/07/12 13:45:27.737
ID_JOB Y ID_JOB CHANNEL_ID Y CHANNEL_ID JOBNAME Y JOBNAME STATUS Y STATUS LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS STARTDATE Y STARTDATE ENDDATE Y ENDDATE LOGDATE Y LOGDATE DEPDATE Y DEPDATE REPLAYDATE Y REPLAYDATE LOG_FIELD Y LOG_FIELD EXECUTING_SERVER N EXECUTING_SERVER EXECUTING_USER N EXECUTING_USER START_JOB_ENTRY N START_JOB_ENTRY CLIENT N CLIENT
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE JOBNAME Y TRANSNAME JOBENTRYNAME Y STEPNAME LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS RESULT Y RESULT NR_RESULT_ROWS Y NR_RESULT_ROWS NR_RESULT_FILES Y NR_RESULT_FILES LOG_FIELD N LOG_FIELD COPY_NR N COPY_NR
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE LOGGING_OBJECT_TYPE Y LOGGING_OBJECT_TYPE OBJECT_NAME Y OBJECT_NAME OBJECT_COPY Y OBJECT_COPY REPOSITORY_DIRECTORY Y REPOSITORY_DIRECTORY FILENAME Y FILENAME OBJECT_ID Y OBJECT_ID OBJECT_REVISION Y OBJECT_REVISION PARENT_CHANNEL_ID Y PARENT_CHANNEL_ID ROOT_CHANNEL_ID Y ROOT_CHANNEL_ID
ID_JOB_RUN Y ID_JOB_RUN ID_JOB Y ID_JOB JOBNAME Y JOBNAME NAMESPACE Y NAMESPACE CHECKPOINT_NAME Y CHECKPOINT_NAME CHECKPOINT_COPYNR Y CHECKPOINT_COPYNR ATTEMPT_NR Y ATTEMPT_NR JOB_RUN_START_DATE Y JOB_RUN_START_DATE LOGDATE Y LOGDATE RESULT_XML Y RESULT_XML PARAMETER_XML Y PARAMETER_XML N WordCount - Advanced HadoopJobExecutorPlugin hadoopjob - wordcount N ./samples/jobs/hadoop/pentaho-mapreduce-sample.jar --input=/wordcount/input --output=/wordcount/output --hdfsHost=hadoop-server:8020 --jobTrackerHost=hadoop-server:8021 N Y 5 hadoopjob - wordcount org.pentaho.hadoop.sample.wordcount.WordCountMapper org.pentaho.hadoop.sample.wordcount.WordCountReducer org.pentaho.hadoop.sample.wordcount.WordCountReducer /wordcount/input org.apache.hadoop.mapred.TextInputFormat /wordcount/output org.apache.hadoop.io.Text org.apache.hadoop.io.IntWritable org.apache.hadoop.mapred.TextOutputFormat hadoop-server 8020 hadoop-server 8021 2 1 pentaho.hadoop.property.name1 pentaho.hadoop.property.value1 pentaho.hadoop.property.name2 pentaho.hadoop.property.value2 N Y 0 478 290 START SPECIAL Y N N 0 0 60 12 0 1 1 N Y 0 56 290 Clean Output DELETE_FOLDERS N success_if_no_errors 10 hdfs://hadoop-server:8020/wordcount/output N Y 0 252 290 Success SUCCESS N Y 0 678 290 START Clean Output 0 0 Y Y Y Clean Output WordCount - Advanced 0 0 Y Y Y WordCount - Advanced Success 0 0 Y Y Y Cleans up the output directory 191 236 156 22 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Hadoop WordCount MapReduce Job - Edit the Input and Output directory paths - Edit the HDFS hostname - Edit the Job Tracker hostname 399 353 219 60 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Monitor the logs for progress (if blocking option is selected) 610 222 156 35 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y SETUP INSTRUCTIONS: 1. Update the HDFS path within the 'Clean Output' step to match your Hadoop server location and path to where you intend to generate output from the wordcount example 2. Create an input directory in HDFS and place text file(s) in the input directory that you want to use to test the wordcount example 3. Update the 'Wordcount - Advanced' step (Job Setup and Cluster tabs) to configure the correct paths and server name including: - Input Path - the path in HDFS from which to read files for counting - Output Path - where the processed count of words will be placed - HDFS Hostname - Job Tracker Hostname *Note: Source code for the sample jar can be found alongside this sample in your samples directory. 15 26 578 100 Microsoft Sans Serif 8 N N 0 0 0 255 165 0 100 100 100 Y ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/Hadoop Job Executor simple.kjb ================================================ Hadoop Job Executor simple 0 / - 2010/07/12 13:45:27.737 - 2010/07/12 13:45:27.737
ID_JOBYID_JOBCHANNEL_IDYCHANNEL_IDJOBNAMEYJOBNAMESTATUSYSTATUSLINES_READYLINES_READLINES_WRITTENYLINES_WRITTENLINES_UPDATEDYLINES_UPDATEDLINES_INPUTYLINES_INPUTLINES_OUTPUTYLINES_OUTPUTLINES_REJECTEDYLINES_REJECTEDERRORSYERRORSSTARTDATEYSTARTDATEENDDATEYENDDATELOGDATEYLOGDATEDEPDATEYDEPDATEREPLAYDATEYREPLAYDATELOG_FIELDYLOG_FIELD
ID_BATCHYID_BATCHCHANNEL_IDYCHANNEL_IDLOG_DATEYLOG_DATEJOBNAMEYTRANSNAMEJOBENTRYNAMEYSTEPNAMELINES_READYLINES_READLINES_WRITTENYLINES_WRITTENLINES_UPDATEDYLINES_UPDATEDLINES_INPUTYLINES_INPUTLINES_OUTPUTYLINES_OUTPUTLINES_REJECTEDYLINES_REJECTEDERRORSYERRORSRESULTYRESULTNR_RESULT_ROWSYNR_RESULT_ROWSNR_RESULT_FILESYNR_RESULT_FILESLOG_FIELDNLOG_FIELDCOPY_NRNCOPY_NR
ID_BATCHYID_BATCHCHANNEL_IDYCHANNEL_IDLOG_DATEYLOG_DATELOGGING_OBJECT_TYPEYLOGGING_OBJECT_TYPEOBJECT_NAMEYOBJECT_NAMEOBJECT_COPYYOBJECT_COPYREPOSITORY_DIRECTORYYREPOSITORY_DIRECTORYFILENAMEYFILENAMEOBJECT_IDYOBJECT_IDOBJECT_REVISIONYOBJECT_REVISIONPARENT_CHANNEL_IDYPARENT_CHANNEL_IDROOT_CHANNEL_IDYROOT_CHANNEL_ID N WordCount - Simple HadoopJobExecutorPlugin PDI Hadoop - WordCount - Simple Y ./samples/jobs/hadoop/pentaho-mapreduce-sample.jar org.pentaho.hadoop.sample.wordcount.WordCount --input=/wordcount/input --output=/wordcount/output --hdfsHost=hadoop-server:8020 --jobTrackerHost=hadoop-server:8021 Y Y 5 1 PDI Hadoop - WordCount - Simple org.pentaho.hadoop.sample.wordcount.WordCountMapper org.pentaho.hadoop.sample.wordcount.WordCountReducer org.pentaho.hadoop.sample.wordcount.WordCountReducer /wordcount/input org.apache.hadoop.mapred.TextInputFormat /wordcount/output org.apache.hadoop.io.Text org.apache.hadoop.io.IntWritable org.apache.hadoop.mapred.TextOutputFormat hadoop-server 8020 hadoop-server 8021 2 1 pentaho.hadoop.property.name1 pentaho.hadoop.property.value1 pentaho.hadoop.property.name2 pentaho.hadoop.property.value2 N Y 0 508 208 START SPECIAL Y N N 0 0 60 12 0 1 1 N Y 0 87 208 Clean Output DELETE_FOLDERS N success_if_no_errors 10 hdfs://hadoop-server:8020/wordcount/output N Y 0 283 208 Success SUCCESS N Y 0 709 208 START Clean Output 0 0 Y Y Y Clean Output WordCount - Simple 0 0 Y Y Y WordCount - Simple Success 0 0 Y Y Y Cleans up the output directory 216 145 161 28 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Hadoop WordCount MapReduce Job 421 275 197 28 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y SETUP INSTRUCTIONS: 1. Update the HDFS path within the 'Clean Output' step to match your Hadoop server location and path to where you intend to generate output from the wordcount example 2. Create an input directory in HDFS and place text file(s) in the input directory that you want to use to test the wordcount example 3. Update the 'Wordcount - Simple' step to provide the appropriate input and output directory locations in HDFS (defined in command line interface) *Note: Source code for the sample jar can be found alongside this sample in your samples directory. 22 28 807 76 Arial 10 N N 0 0 0 255 165 0 100 100 100 Y ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/Pentaho MapReduce - weblogs.kjb ================================================ Pentaho MapReduce - weblogs 0 / - 2010/07/19 21:35:45.843 - 2010/07/19 21:35:45.843
ID_JOBYID_JOBCHANNEL_IDYCHANNEL_IDJOBNAMEYJOBNAMESTATUSYSTATUSLINES_READYLINES_READLINES_WRITTENYLINES_WRITTENLINES_UPDATEDYLINES_UPDATEDLINES_INPUTYLINES_INPUTLINES_OUTPUTYLINES_OUTPUTLINES_REJECTEDYLINES_REJECTEDERRORSYERRORSSTARTDATEYSTARTDATEENDDATEYENDDATELOGDATEYLOGDATEDEPDATEYDEPDATEREPLAYDATEYREPLAYDATELOG_FIELDYLOG_FIELD
ID_BATCHYID_BATCHCHANNEL_IDYCHANNEL_IDLOG_DATEYLOG_DATEJOBNAMEYTRANSNAMEJOBENTRYNAMEYSTEPNAMELINES_READYLINES_READLINES_WRITTENYLINES_WRITTENLINES_UPDATEDYLINES_UPDATEDLINES_INPUTYLINES_INPUTLINES_OUTPUTYLINES_OUTPUTLINES_REJECTEDYLINES_REJECTEDERRORSYERRORSRESULTYRESULTNR_RESULT_ROWSYNR_RESULT_ROWSNR_RESULT_FILESYNR_RESULT_FILESLOG_FIELDNLOG_FIELDCOPY_NRNCOPY_NR
ID_BATCHYID_BATCHCHANNEL_IDYCHANNEL_IDLOG_DATEYLOG_DATELOGGING_OBJECT_TYPEYLOGGING_OBJECT_TYPEOBJECT_NAMEYOBJECT_NAMEOBJECT_COPYYOBJECT_COPYREPOSITORY_DIRECTORYYREPOSITORY_DIRECTORYFILENAMEYFILENAMEOBJECT_IDYOBJECT_IDOBJECT_REVISIONYOBJECT_REVISIONPARENT_CHANNEL_IDYPARENT_CHANNEL_IDROOT_CHANNEL_IDYROOT_CHANNEL_ID N Pentaho MapReduce (Web Log Parsing) HadoopTransJobExecutorPlugin Web Logs- Number of HTTP Methods by Month ${Internal.Job.Filename.Directory}/weblogs-mapper.ktr N ${Internal.Job.Filename.Directory}/weblogs-reducer.ktr N Hadoop Input Hadoop Output Hadoop Input Hadoop Output Y 5 /weblogs/input org.apache.hadoop.mapred.TextInputFormat /weblogs/output Y N N N N org.apache.hadoop.mapred.TextOutputFormat hadoop-server 8020 hadoop-server 8021 2 1 /var/tmp N Y 0 337 286 START SPECIAL Y N N 0 0 60 12 0 1 1 N Y 0 25 286 Success SUCCESS N Y 0 493 286 Copy input files to HDFS HadoopCopyFilesPlugin Y N N N N N N Y ${Internal.Job.Filename.Directory}/files/ hdfs://hadoop-server/weblogs/input ([^\s]+(\.(?i)(log))$) N Y 0 181 286 Failure ABORT N Y 0 493 408 Copy input files to HDFS Pentaho MapReduce (Web Log Parsing) 0 0 Y Y N START Copy input files to HDFS 0 0 Y Y Y Pentaho MapReduce (Web Log Parsing) Success 0 0 Y Y N Pentaho MapReduce (Web Log Parsing) Failure 0 0 Y N N This example: - Reads one or more weblogs from an input directory in HDFS - Uses the Transformation Job Executor to generate a new MapReduce job in Hadoop calling - 'weblogs-mapper.ktr' to parse the weblog and generate keys based on the Year and Month as part of the mapping phase - 'weblogs-reducer.ktr' to aggregate all page hits by Year and Month (our key) as part of the reducing phase SETUP INSTRUCTIONS: 1. Update the 'Copy input files to HDFS' step - update the source path to match the location of your PDI installation directory - update the target path to be the location of your input directory in HDFS where the job will read the weblog files from 2. Update the 'Pentaho MapReduce' step (Job Setup and Cluster tabs) to configure the correct paths and server names including: - Input Path - the path in HDFS from which to read files for counting - Output Path - where the processed count of words will be placed - HDFS Hostname - Job Tracker Hostname 3. Update the 'Delete input files' step to point to your input directory location in HDFS 12 4 707 234 Microsoft Sans Serif 12 N N 0 0 0 255 165 0 100 100 100 Y ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/Pentaho MapReduce - wordcount.kjb ================================================ Pentaho MapReduce - wordcount 0 / - 2010/07/19 21:35:45.843 - 2010/07/19 21:35:45.843
ID_JOBYID_JOBCHANNEL_IDYCHANNEL_IDJOBNAMEYJOBNAMESTATUSYSTATUSLINES_READYLINES_READLINES_WRITTENYLINES_WRITTENLINES_UPDATEDYLINES_UPDATEDLINES_INPUTYLINES_INPUTLINES_OUTPUTYLINES_OUTPUTLINES_REJECTEDYLINES_REJECTEDERRORSYERRORSSTARTDATEYSTARTDATEENDDATEYENDDATELOGDATEYLOGDATEDEPDATEYDEPDATEREPLAYDATEYREPLAYDATELOG_FIELDYLOG_FIELD
ID_BATCHYID_BATCHCHANNEL_IDYCHANNEL_IDLOG_DATEYLOG_DATEJOBNAMEYTRANSNAMEJOBENTRYNAMEYSTEPNAMELINES_READYLINES_READLINES_WRITTENYLINES_WRITTENLINES_UPDATEDYLINES_UPDATEDLINES_INPUTYLINES_INPUTLINES_OUTPUTYLINES_OUTPUTLINES_REJECTEDYLINES_REJECTEDERRORSYERRORSRESULTYRESULTNR_RESULT_ROWSYNR_RESULT_ROWSNR_RESULT_FILESYNR_RESULT_FILESLOG_FIELDNLOG_FIELDCOPY_NRNCOPY_NR
ID_BATCHYID_BATCHCHANNEL_IDYCHANNEL_IDLOG_DATEYLOG_DATELOGGING_OBJECT_TYPEYLOGGING_OBJECT_TYPEOBJECT_NAMEYOBJECT_NAMEOBJECT_COPYYOBJECT_COPYREPOSITORY_DIRECTORYYREPOSITORY_DIRECTORYFILENAMEYFILENAMEOBJECT_IDYOBJECT_IDOBJECT_REVISIONYOBJECT_REVISIONPARENT_CHANNEL_IDYPARENT_CHANNEL_IDROOT_CHANNEL_IDYROOT_CHANNEL_ID N Pentaho MapReduce (Wordcount) HadoopTransJobExecutorPlugin Pentaho MapReduce (Wordcount) ${Internal.Job.Filename.Directory}/wordcount-mapper.ktr N ${Internal.Job.Filename.Directory}/wordcount-reducer.ktr N Hadoop Input Hadoop Output Hadoop Input Hadoop Output Y 5 /wordcount/input org.apache.hadoop.mapred.TextInputFormat /wordcount/output Y N N N N org.apache.hadoop.mapred.TextOutputFormat hadoop-server 8020 hadoop-server 8021 2 1 /var/tmp N Y 0 299 158 START SPECIAL Y N N 0 0 60 12 0 1 1 N Y 0 19 158 Failure ABORT N Y 0 440 292 Success SUCCESS N Y 0 439 158 Copy Files to HDFS HadoopCopyFilesPlugin Y N N N N N N Y ./ hdfs://hadoop-server:8020/wordcount/input .*README.* N Y 0 159 158 Pentaho MapReduce (Wordcount) Success 0 0 Y Y N Pentaho MapReduce (Wordcount) Failure 0 0 Y N N Copy Files to HDFS Pentaho MapReduce (Wordcount) 0 0 Y Y Y START Copy Files to HDFS 0 0 Y Y Y SETUP INSTRUCTIONS: 1. Create an input directory in HDFS and place text file(s) in the input directory that you want to use to test the wordcount example 2. Update the 'Pentaho MapReduce' step (Job Setup and Cluster tabs) to configure the correct paths and server names including: - Input Path - the path in HDFS from which to read files for counting - Output Path - where the processed count of words will be placed - HDFS Hostname - Job Tracker Hostname 20 40 443 73 Microsoft Sans Serif 8 N N 0 0 0 255 165 0 100 100 100 Y ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/Pig Script Executor tutorial local.kjb ================================================ Pig Script Executor tutorial local 0 / - 2011/08/01 15:02:59.357 - 2011/08/01 15:02:59.357
ID_JOB Y ID_JOB CHANNEL_ID Y CHANNEL_ID JOBNAME Y JOBNAME STATUS Y STATUS LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS STARTDATE Y STARTDATE ENDDATE Y ENDDATE LOGDATE Y LOGDATE DEPDATE Y DEPDATE REPLAYDATE Y REPLAYDATE LOG_FIELD Y LOG_FIELD EXECUTING_SERVER N EXECUTING_SERVER EXECUTING_USER N EXECUTING_USER START_JOB_ENTRY N START_JOB_ENTRY CLIENT N CLIENT
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE JOBNAME Y TRANSNAME JOBENTRYNAME Y STEPNAME LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS RESULT Y RESULT NR_RESULT_ROWS Y NR_RESULT_ROWS NR_RESULT_FILES Y NR_RESULT_FILES LOG_FIELD N LOG_FIELD COPY_NR N COPY_NR
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE LOGGING_OBJECT_TYPE Y LOGGING_OBJECT_TYPE OBJECT_NAME Y OBJECT_NAME OBJECT_COPY Y OBJECT_COPY REPOSITORY_DIRECTORY Y REPOSITORY_DIRECTORY FILENAME Y FILENAME OBJECT_ID Y OBJECT_ID OBJECT_REVISION Y OBJECT_REVISION PARENT_CHANNEL_ID Y PARENT_CHANNEL_ID ROOT_CHANNEL_ID Y ROOT_CHANNEL_ID
ID_JOB_RUN Y ID_JOB_RUN ID_JOB Y ID_JOB JOBNAME Y JOBNAME NAMESPACE Y NAMESPACE CHECKPOINT_NAME Y CHECKPOINT_NAME CHECKPOINT_COPYNR Y CHECKPOINT_COPYNR ATTEMPT_NR Y ATTEMPT_NR JOB_RUN_START_DATE Y JOB_RUN_START_DATE LOGDATE Y LOGDATE RESULT_XML Y RESULT_XML PARAMETER_XML Y PARAMETER_XML N Pig Script Executor HadoopPigScriptExecutorPlugin localhost 8020 localhost 8021 ./samples/jobs/hadoop/script1-local-mod.pig Y Y excite_small ./samples/jobs/hadoop/excite-small.log udf_jar ./samples/jobs/hadoop/tutorial.jar N Y 0 560 220 Success SUCCESS N Y 0 730 220 START SPECIAL Y N N 0 0 60 12 0 1 1 N Y 0 40 220 Clean Output DELETE_FOLDERS N success_if_no_errors 10 ./script1-local-results.txt N Y 0 400 220 Pig Script Executor Success 0 0 Y Y N Clean Output Pig Script Executor 0 0 Y Y Y START Clean Output 0 0 Y Y Y Cleans the output directory. 360 180 102 20 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y This job runs the excite log processing example (script1) from the Pig tutorial (http://wiki.apache.org/pig/PigTutorial) locally - i.e. no hadoop server needed 40 70 526 37 Microsoft Sans Serif 8 N N 0 0 0 255 165 0 100 100 100 Y ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/Pig Script Executor tutorial.kjb ================================================ Pig Script Executor tutorial 0 / - 2011/08/01 15:02:59.357 - 2011/08/01 15:02:59.357
ID_JOB Y ID_JOB CHANNEL_ID Y CHANNEL_ID JOBNAME Y JOBNAME STATUS Y STATUS LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS STARTDATE Y STARTDATE ENDDATE Y ENDDATE LOGDATE Y LOGDATE DEPDATE Y DEPDATE REPLAYDATE Y REPLAYDATE LOG_FIELD Y LOG_FIELD EXECUTING_SERVER N EXECUTING_SERVER EXECUTING_USER N EXECUTING_USER START_JOB_ENTRY N START_JOB_ENTRY CLIENT N CLIENT
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE JOBNAME Y TRANSNAME JOBENTRYNAME Y STEPNAME LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS RESULT Y RESULT NR_RESULT_ROWS Y NR_RESULT_ROWS NR_RESULT_FILES Y NR_RESULT_FILES LOG_FIELD N LOG_FIELD COPY_NR N COPY_NR
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE LOGGING_OBJECT_TYPE Y LOGGING_OBJECT_TYPE OBJECT_NAME Y OBJECT_NAME OBJECT_COPY Y OBJECT_COPY REPOSITORY_DIRECTORY Y REPOSITORY_DIRECTORY FILENAME Y FILENAME OBJECT_ID Y OBJECT_ID OBJECT_REVISION Y OBJECT_REVISION PARENT_CHANNEL_ID Y PARENT_CHANNEL_ID ROOT_CHANNEL_ID Y ROOT_CHANNEL_ID
ID_JOB_RUN Y ID_JOB_RUN ID_JOB Y ID_JOB JOBNAME Y JOBNAME NAMESPACE Y NAMESPACE CHECKPOINT_NAME Y CHECKPOINT_NAME CHECKPOINT_COPYNR Y CHECKPOINT_COPYNR ATTEMPT_NR Y ATTEMPT_NR JOB_RUN_START_DATE Y JOB_RUN_START_DATE LOGDATE Y LOGDATE RESULT_XML Y RESULT_XML PARAMETER_XML Y PARAMETER_XML N Pig Script Executor HadoopPigScriptExecutorPlugin hadoop-server 8020 hadoop-server 8021 ./samples/jobs/hadoop/script1-hadoop-mod.pig Y N udf_jar ./samples/jobs/hadoop/tutorial.jar N Y 0 560 220 Success SUCCESS N Y 0 730 220 START SPECIAL Y N N 0 0 60 12 0 1 1 N Y 0 40 220 Hadoop Copy Files HadoopCopyFilesPlugin Y N N N N N N N ./samples/jobs/hadoop/excite.log.bz2 hdfs://hadoop-server:8020/. N Y 0 220 220 Clean Output DELETE_FOLDERS N success_if_no_errors 10 hdfs://hadoop-server:8020/script1-hadoop-results N Y 0 400 220 Pig Script Executor Success 0 0 Y Y N START Hadoop Copy Files 0 0 Y Y Y Hadoop Copy Files Clean Output 0 0 Y Y Y Clean Output Pig Script Executor 0 0 Y Y Y Copies the excite log file to hdfs. 180 180 118 20 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Cleans the output directory. 360 180 102 20 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y This job runs the excite log processing example (script1) from the Pig tutorial: http://wiki.apache.org/pig/PigTutorial SETUP INSTRUCTIONS: 1. Update the HDFS path within the 'Hadoop Copy Files' and 'Clean Output' step to match your Hadoop server location and path to where you intend to generate output 2. Update the 'Pig Script Executor' step with the names and ports of the host(s) running your HDFS name node and Job Tracker 30 30 565 64 Microsoft Sans Serif 8 N N 0 0 0 255 165 0 100 100 100 Y ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/emr_job.kjb ================================================ emr_job / - 2010/09/05 20:16:13.032 - 2010/09/05 20:16:13.032
ID_JOBYID_JOBCHANNEL_IDYCHANNEL_IDJOBNAMEYJOBNAMESTATUSYSTATUSLINES_READYLINES_READLINES_WRITTENYLINES_WRITTENLINES_UPDATEDYLINES_UPDATEDLINES_INPUTYLINES_INPUTLINES_OUTPUTYLINES_OUTPUTLINES_REJECTEDYLINES_REJECTEDERRORSYERRORSSTARTDATEYSTARTDATEENDDATEYENDDATELOGDATEYLOGDATEDEPDATEYDEPDATEREPLAYDATEYREPLAYDATELOG_FIELDYLOG_FIELD
ID_BATCHYID_BATCHCHANNEL_IDYCHANNEL_IDLOG_DATEYLOG_DATEJOBNAMEYTRANSNAMEJOBENTRYNAMEYSTEPNAMELINES_READYLINES_READLINES_WRITTENYLINES_WRITTENLINES_UPDATEDYLINES_UPDATEDLINES_INPUTYLINES_INPUTLINES_OUTPUTYLINES_OUTPUTLINES_REJECTEDYLINES_REJECTEDERRORSYERRORSRESULTYRESULTNR_RESULT_ROWSYNR_RESULT_ROWSNR_RESULT_FILESYNR_RESULT_FILESLOG_FIELDNLOG_FIELD
ID_BATCHYID_BATCHCHANNEL_IDYCHANNEL_IDLOG_DATEYLOG_DATELOGGING_OBJECT_TYPEYLOGGING_OBJECT_TYPEOBJECT_NAMEYOBJECT_NAMEOBJECT_COPYYOBJECT_COPYREPOSITORY_DIRECTORYYREPOSITORY_DIRECTORYFILENAMEYFILENAMEOBJECT_IDYOBJECT_IDOBJECT_REVISIONYOBJECT_REVISIONPARENT_CHANNEL_IDYPARENT_CHANNEL_IDROOT_CHANNEL_IDYROOT_CHANNEL_ID N Amazon EMR Job Executor EMRJobExecutorPlugin emr job executor3 ./samples/jobs/hadoop/wordcount.jar s3://s3/<bucket_name> 1 --input s3://mddwordcount/input --output s3://mddwordcount/output Y 15 emr job executor3 N Y 0 274 66 START SPECIAL Y N N 0 0 60 12 0 1 1 N Y 0 66 66 Success SUCCESS N Y 0 457 66 START Amazon EMR Job Executor 0 0 Y Y Y Amazon EMR Job Executor Success 0 0 Y Y N ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/excite-small.log ================================================ 2A9EABFB35F5B954 970916105432 +md foods +proteins BED75271605EBD0C 970916001949 yahoo chat BED75271605EBD0C 970916001954 yahoo chat BED75271605EBD0C 970916003523 yahoo chat BED75271605EBD0C 970916011322 yahoo search BED75271605EBD0C 970916011404 yahoo chat BED75271605EBD0C 970916011422 yahoo chat BED75271605EBD0C 970916012756 yahoo caht BED75271605EBD0C 970916012816 yahoo chat BED75271605EBD0C 970916023603 yahoo chat BED75271605EBD0C 970916025458 yahoo caht BED75271605EBD0C 970916025516 yahoo chat BED75271605EBD0C 970916030348 yahoo chat BED75271605EBD0C 970916034807 yahoo chat BED75271605EBD0C 970916040755 yahoo chat BED75271605EBD0C 970916090700 hawaii chat universe BED75271605EBD0C 970916094445 yahoo chat BED75271605EBD0C 970916191427 yahoo chat BED75271605EBD0C 970916201045 yahoo chat BED75271605EBD0C 970916201050 yahoo chat BED75271605EBD0C 970916201927 yahoo chat 824F413FA37520BF 970916184809 garter belts 824F413FA37520BF 970916184818 garter belts 824F413FA37520BF 970916184939 lingerie 824F413FA37520BF 970916185051 spiderman 824F413FA37520BF 970916185155 tommy hilfiger 824F413FA37520BF 970916185257 calgary 824F413FA37520BF 970916185513 calgary 824F413FA37520BF 970916185605 exhibitionists 824F413FA37520BF 970916190220 exhibitionists 824F413FA37520BF 970916191233 exhibitionists 7A8D9CFC957C7FCA 970916064707 duron paint 7A8D9CFC957C7FCA 970916064731 duron paint A25C8C765238184A 970916103534 brookings A25C8C765238184A 970916104751 breton liberation front A25C8C765238184A 970916105238 breton A25C8C765238184A 970916105322 breton liberation front A25C8C765238184A 970916105539 breton A25C8C765238184A 970916105628 breton A25C8C765238184A 970916105723 front de liberation de la bretagne A25C8C765238184A 970916105857 front de liberation de la bretagne 6589F4342B215FD4 970916125147 afghanistan 6589F4342B215FD4 970916125158 afghanistan 6589F4342B215FD4 970916125407 afghanistan 16DE160B4FFE3B85 970916050356 eia rs232-c 16DE160B4FFE3B85 970916050645 nullmodem 16DE160B4FFE3B85 970916050807 nullmodem 563FC9A7E8A9022A 970916042826 organizational chart 563FC9A7E8A9022A 970916221456 organizational chart of uae's companies 563FC9A7E8A9022A 970916221611 organizational chart of dubai dutiy free 563FC9A7E8A9022A 970916221717 organizational charts in dubai 73D74648CC2CA35E 970916122841 salvia trip reports 893C3ADD0EFBBECB 970916074735 fleetwood mac 893C3ADD0EFBBECB 970916074811 fleetwood mac 893C3ADD0EFBBECB 970916074828 893C3ADD0EFBBECB 970916074912 893C3ADD0EFBBECB 970916074932 893C3ADD0EFBBECB 970916074936 893C3ADD0EFBBECB 970916074958 893C3ADD0EFBBECB 970916075000 893C3ADD0EFBBECB 970916075021 893C3ADD0EFBBECB 970916075042 7B99756B742D8E89 970916070712 lionking 7B99756B742D8E89 970916070724 lionking 7B99756B742D8E89 970916072057 lionking 7B99756B742D8E89 970916072058 lionking 7B99756B742D8E89 970916072351 lionking 736D28D439E9FE2D 970916130425 zelle and larson FB3EA7AB8B51C95D 970916105346 hifly FB3EA7AB8B51C95D 970916105408 hifly 729B745475893F44 970916025053 earth pictures 729B745475893F44 970916025113 earth pictures planet 575DC994A9CF8D36 970916110752 evgeny kissin 575DC994A9CF8D36 970916110817 evgeny kissin yevgeny 575DC994A9CF8D36 970916113040 evgeny kissin yevgeny 575DC994A9CF8D36 970916113057 575DC994A9CF8D36 970916113423 575DC994A9CF8D36 970916113453 575DC994A9CF8D36 970916113556 575DC994A9CF8D36 970916113705 575DC994A9CF8D36 970916113742 575DC994A9CF8D36 970916113955 evgeny kissin yevgeny 39BF6DD421B71387 970916171516 house o fun 39BF6DD421B71387 970916171606 house o fun 519AC93F468A4BF4 970916232453 ann nicole smith 519AC93F468A4BF4 970916232544 pamela anderson 81CC31A8588135F2 970916102352 looney tunes 81CC31A8588135F2 970916102645 looney tunes daffy toons FFCA848089F3BA8C 970916100905 marilyn manson 7AE07E7F0053F0A9 970916194214 gis digitizing C1C4228EA191F401 970916082442 "bentley's luggage" C1C4228EA191F401 970916082516 luggage C1C4228EA191F401 970916083327 "tumi luggage" C1C4228EA191F401 970916083849 "tumi" 7C60C0A2EBF7A3E7 970916012828 northwestern university 7C60C0A2EBF7A3E7 970916234912 gangs 9EAF527F15CABB79 970916074242 � BCD90B7247D8FC7C 970916195153 car rental companies AF7BBE7E92E62D6E 970916123811 new jersey resources AF7BBE7E92E62D6E 970916203918 new jersey resources 320475401250000D 970916080124 northwest+ airline 2C7FDB7C4C41215D 970916184521 crowell and weedon 2C7FDB7C4C41215D 970916185017 crowell and weedon 11971F6093701B3C 970916093302 economia torino 11971F6093701B3C 970916093413 universita di torino 11971F6093701B3C 970916093506 andrea belratti 11971F6093701B3C 970916093524 andrea beltratti 11971F6093701B3C 970916093810 honig 11971F6093701B3C 970916093903 car hommes 11971F6093701B3C 970916095055 winzip 11971F6093701B3C 970916095506 martin lettau 11971F6093701B3C 970916095605 patrick de fontnouvelle 11971F6093701B3C 970916095633 de fontnouvelle 11971F6093701B3C 970916095758 ghysels 11971F6093701B3C 970916132437 icon librarian 11971F6093701B3C 970916132922 icon librarian E84833C4A26D6818 970916202849 nature's pet marketplace E84833C4A26D6818 970916204115 top drawer E84833C4A26D6818 970916204218 topdrawer E84833C4A26D6818 970916214604 tim erickson 54E8C79987B6F2F3 970916064158 quiz 54E8C79987B6F2F3 970916064229 quiz 54E8C79987B6F2F3 970916064305 paper clay 54E8C79987B6F2F3 970916064854 bath and body works 54E8C79987B6F2F3 970916065614 pregnancy 54E8C79987B6F2F3 970916074831 pregnancy 54E8C79987B6F2F3 970916075153 pregnancy 54E8C79987B6F2F3 970916075210 pregnancy 54E8C79987B6F2F3 970916075236 pregnancy 54E8C79987B6F2F3 970916075302 pregnancy pregnant 54E8C79987B6F2F3 970916080128 pregnancy pregnant 54E8C79987B6F2F3 970916080202 pregnancy pregnant 54E8C79987B6F2F3 970916080240 conceiving 54E8C79987B6F2F3 970916080404 conceiving 54E8C79987B6F2F3 970916080455 conceiving 54E8C79987B6F2F3 970916081215 conceiving 54E8C79987B6F2F3 970916081303 conceiving conceive 54E8C79987B6F2F3 970916081324 conceiving conceive 54E8C79987B6F2F3 970916081416 conceiving conceive 54E8C79987B6F2F3 970916081511 conceiving conceive 54E8C79987B6F2F3 970916214401 baby 54E8C79987B6F2F3 970916214732 baby 54E8C79987B6F2F3 970916214921 pregnant 54E8C79987B6F2F3 970916215423 pregnant 54E8C79987B6F2F3 970916220045 pregnant 54E8C79987B6F2F3 970916220458 pregnant 54E8C79987B6F2F3 970916220600 pregnant 54E8C79987B6F2F3 970916220637 pregnant 54E8C79987B6F2F3 970916220842 pregnant 54E8C79987B6F2F3 970916221032 pregnant 54E8C79987B6F2F3 970916221119 pregnant 54E8C79987B6F2F3 970916221147 pregnant 54E8C79987B6F2F3 970916221245 pregnant 54E8C79987B6F2F3 970916221339 pregnant 54E8C79987B6F2F3 970916221607 pregnant 54E8C79987B6F2F3 970916221631 pregnant 54E8C79987B6F2F3 970916221832 pregnant 54E8C79987B6F2F3 970916222232 pregnant 54E8C79987B6F2F3 970916222621 pregnant 54E8C79987B6F2F3 970916222716 pregnant 54E8C79987B6F2F3 970916222754 pregnant 54E8C79987B6F2F3 970916222833 pregnant 54E8C79987B6F2F3 970916222858 pregnant pregnancy 54E8C79987B6F2F3 970916222938 pregnant pregnancy 54E8C79987B6F2F3 970916223107 pregnant pregnancy 54E8C79987B6F2F3 970916223708 pregnant pregnancy 3F8AAC2372F6941C 970916091301 bac 3F8AAC2372F6941C 970916091354 blood alcohol content 3F8AAC2372F6941C 970916091425 3F8AAC2372F6941C 970916091545 3F8AAC2372F6941C 970916093448 3F8AAC2372F6941C 970916093544 breathalizers 3F8AAC2372F6941C 970916093551 breathalizers 3F8AAC2372F6941C 970916093642 breathalizers 3F8AAC2372F6941C 970916093724 minors in possesion 3F8AAC2372F6941C 970916093848 minors in possesion 3F8AAC2372F6941C 970916093904 mip 3F8AAC2372F6941C 970916093932 e. lansing laws 3F8AAC2372F6941C 970916094043 east lansing laws 3F8AAC2372F6941C 970916094111 east lansing laws 3F8AAC2372F6941C 970916185828 business homepage 3F8AAC2372F6941C 970916185908 business homepage 3F8AAC2372F6941C 970916185926 intel homepage 3F8AAC2372F6941C 970916190005 intel homepage 3F8AAC2372F6941C 970916190032 intel homepage pentium processor 3F8AAC2372F6941C 970916191238 old psycological contract 3F8AAC2372F6941C 970916191308 "old psycological contract" 3F8AAC2372F6941C 970916191358 +old +psycological +contract 3F8AAC2372F6941C 970916191436 +new+psycological +contract 3F8AAC2372F6941C 970916191443 +new +psycological +contract 3F8AAC2372F6941C 970916191720 g.m 3F8AAC2372F6941C 970916191735 general motors homepage 3F8AAC2372F6941C 970916191812 general motors +homepage 3F8AAC2372F6941C 970916191833 ford motor company 3F8AAC2372F6941C 970916192820 assembly plant 0FB5866123ADFDFF 970916112103 blackpool tourist information 4B6A008308C6DE7E 970916072324 magnetic strip 4B6A008308C6DE7E 970916072408 auto lockout tools 0F8EA93654516937 970916144849 tuxedo park D4DA409F40BB9102 970916140853 san juan island D4DA409F40BB9102 970916140855 san juan island D4DA409F40BB9102 970916140950 san juan island 01F6B9CA495576BA 970916121358 salary canada 01F6B9CA495576BA 970916123855 salary canada 01F6B9CA495576BA 970916124000 salary canada 01F6B9CA495576BA 970916124134 salary canada 01F6B9CA495576BA 970916124134 salary canada 01F6B9CA495576BA 970916124156 salary canada 01F6B9CA495576BA 970916124432 salary canada 7F88C9EC4CD0BB3A 970916104228 kcchief.com 7F88C9EC4CD0BB3A 970916104256 kcchiefs.com 7F88C9EC4CD0BB3A 970916105714 pittsburgh steelers 7F88C9EC4CD0BB3A 970916105729 pittsburgh steelers E55487B7296ED015 970916102620 m�nchen AND hotel E55487B7296ED015 970916102744 m�nchen AND hotel E55487B7296ED015 970916102853 st. AND paul AND hotel E55487B7296ED015 970916103125 st. AND paul AND hotel E55487B7296ED015 970916103450 st. AND paul AND hotel E55487B7296ED015 970916103614 menneapolis AND hotel E55487B7296ED015 970916103627 minneapolis AND hotel 513FCC6548E3F36D 970916153728 stealth crash air show BB925FF85FF44849 970916061832 plant cell journal BB925FF85FF44849 970916061841 plant cell journal 15165C4C19E63B0A 970916201155 netmeeting 15165C4C19E63B0A 970916201320 netmeeting 15165C4C19E63B0A 970916201456 netmeeting msdownload B0274667D0A700A8 970916131323 free games B0274667D0A700A8 970916131746 bingo zone 514008DD5C88BB30 970916112944 home page proctology 514008DD5C88BB30 970916112952 home page proctology DB0CC854B82A662C 970916064732 alligator graphics DB0CC854B82A662C 970916064751 alligator graphics DB0CC854B82A662C 970916064824 alligator graphics DB0CC854B82A662C 970916064847 alligator 9A10B373FA529557 970916070208 golf pro shops 9A10B373FA529557 970916131403 9A10B373FA529557 970916131404 550D31C646EE3CFE 970916155237 medicare, certificate of need 550D31C646EE3CFE 970916155315 new york times 72270DEAFE0BF9FC 970916002029 cuhk 72270DEAFE0BF9FC 970916002038 cuhk 72270DEAFE0BF9FC 970916002221 fin6060a 3F59FEC0AD9851A5 970916053428 veltins AND bier 3F59FEC0AD9851A5 970916053552 veltins AND bier 3F59FEC0AD9851A5 970916053618 veltins AND bier 3F59FEC0AD9851A5 970916083304 david hare 3F59FEC0AD9851A5 970916083618 3F59FEC0AD9851A5 970916083745 3F59FEC0AD9851A5 970916083807 plenty hare 3F59FEC0AD9851A5 970916083950 3F59FEC0AD9851A5 970916084207 plenty hare 3F59FEC0AD9851A5 970916084340 davidhare 3F59FEC0AD9851A5 970916084929 davidhare 3F59FEC0AD9851A5 970916084949 hare plenty 3F59FEC0AD9851A5 970916085005 hare plenty david 3F59FEC0AD9851A5 970916085043 hare plenty david 3F59FEC0AD9851A5 970916091357 david hare 3F59FEC0AD9851A5 970916091428 re: hamill 3F59FEC0AD9851A5 970916091449 3F59FEC0AD9851A5 970916091459 3F59FEC0AD9851A5 970916091605 re: hamill mark 3F59FEC0AD9851A5 970916091611 3F59FEC0AD9851A5 970916091635 3F59FEC0AD9851A5 970916091738 faq hamill re: 3F59FEC0AD9851A5 970916091803 faq hamill 3F59FEC0AD9851A5 970916091838 3F59FEC0AD9851A5 970916092544 plays 3F59FEC0AD9851A5 970916092605 plays plenty 3F59FEC0AD9851A5 970916092628 plays plenty hare 3F59FEC0AD9851A5 970916092633 3F59FEC0AD9851A5 970916093104 theatre hare 3F59FEC0AD9851A5 970916093137 hamill 3F59FEC0AD9851A5 970916093154 hamill mark 3F59FEC0AD9851A5 970916095418 re. hamill 3F59FEC0AD9851A5 970916095428 re: hamill 3F59FEC0AD9851A5 970916095435 re: hamill 3F59FEC0AD9851A5 970916095454 3F59FEC0AD9851A5 970916095459 3F59FEC0AD9851A5 970916095801 david hare 3F59FEC0AD9851A5 970916095828 mark hamill E131BFC55AF4CDCE 970916200044 c:windows E131BFC55AF4CDCE 970916200609 c:windows 9CF1A20154759F8F 970916091938 "university of texas at arlington" 4F3CA140D72441BC 970916114733 "canadian conquest" 4F3CA140D72441BC 970916114940 4F3CA140D72441BC 970916132635 eyelogic 27D0C3A6B4BE62E5 970916203809 libra 27D0C3A6B4BE62E5 970916204304 sagitarius 27D0C3A6B4BE62E5 970916205125 horoscopes 349396224ECBDCBE 970916153625 "denise fitzpatrick" 349396224ECBDCBE 970916153747 "ann michael gilliam" 349396224ECBDCBE 970916153815 "ann gilliam" 349396224ECBDCBE 970916153910 "jerry noblin" 349396224ECBDCBE 970916153949 "j. noblin" 66B377662547D14A 970916065206 dr. ronald ennis 1A75CBE7DA62BD5F 970916183012 sugar ray guitar taps 1A75CBE7DA62BD5F 970916183025 sugar ray guitar taps floored FCE735441720FBE8 970916112750 "little people of america" FCE735441720FBE8 970916112802 "little people of america" FCE735441720FBE8 970916160435 "little people of america" FCE735441720FBE8 970916160449 "little people of america" EC6E91864359DD8D 970916164740 maytag EC6E91864359DD8D 970916164827 maytag EC6E91864359DD8D 970916165136 maytag EC6E91864359DD8D 970916165243 maytag EC6E91864359DD8D 970916165317 maytag EC6E91864359DD8D 970916165335 maytag EC6E91864359DD8D 970916171635 maytag EC6E91864359DD8D 970916171744 maytag EC6E91864359DD8D 970916171901 maytag EC6E91864359DD8D 970916172001 maytag EC6E91864359DD8D 970916172011 maytag EC6E91864359DD8D 970916172031 maytag EC6E91864359DD8D 970916172038 maytag EC6E91864359DD8D 970916172128 maytag EC6E91864359DD8D 970916172250 maytag EC6E91864359DD8D 970916172403 maytag EC6E91864359DD8D 970916172503 maytag EC6E91864359DD8D 970916172503 maytag EC6E91864359DD8D 970916172517 maytag EC6E91864359DD8D 970916172526 maytag EC6E91864359DD8D 970916172621 maytag EC6E91864359DD8D 970916172625 maytag EC6E91864359DD8D 970916172656 maytag EC6E91864359DD8D 970916172657 maytag EC6E91864359DD8D 970916172751 maytag EC6E91864359DD8D 970916172754 maytag EC6E91864359DD8D 970916172838 maytag EC6E91864359DD8D 970916172842 maytag EC6E91864359DD8D 970916172919 maytag EC6E91864359DD8D 970916173015 maytag EC6E91864359DD8D 970916173027 maytag EC6E91864359DD8D 970916173050 maytag EC6E91864359DD8D 970916173050 maytag EC6E91864359DD8D 970916173111 maytag EC6E91864359DD8D 970916173114 maytag EC6E91864359DD8D 970916173253 maytag EC6E91864359DD8D 970916173452 maytag EC6E91864359DD8D 970916173454 maytag EC6E91864359DD8D 970916173505 maytag EC6E91864359DD8D 970916173515 maytag EC6E91864359DD8D 970916173542 maytag EC6E91864359DD8D 970916173624 car EC6E91864359DD8D 970916173711 car EC6E91864359DD8D 970916173747 car EC6E91864359DD8D 970916173858 car EC6E91864359DD8D 970916173905 car EC6E91864359DD8D 970916173910 car E0D12FA14991D2D9 970916143958 ati technogies E0D12FA14991D2D9 970916144028 ati technogies F559561E697722BB 970916223640 noriko+sakai 9912390F5E1D690F 970916213048 amsterdam 9912390F5E1D690F 970916213111 amsterdam noord 9912390F5E1D690F 970916213225 amsterdam noord 8E1A8EA81FEA8A30 970916001026 stiler+skole 8E1A8EA81FEA8A30 970916002122 skolestil 8E1A8EA81FEA8A30 970916060031 lau tak wah 8E1A8EA81FEA8A30 970916060140 lau tak wah 383A51DC0D94C7F7 970916182139 bus schedule greyhound 383A51DC0D94C7F7 970916182147 bus schedule greyhound 383A51DC0D94C7F7 970916183408 bus schedule greyhound 383A51DC0D94C7F7 970916183438 fullington bus schedule CBAEB52E28985C5E 970916110525 bmi publishing 266C99B4834F4675 970916124711 probate records 266C99B4834F4675 970916124737 266C99B4834F4675 970916124756 probate records county 266C99B4834F4675 970916125012 9A33FFD53E103291 970916073204 shapeup 8CDEE772A295AA02 970916190227 ren faire 8CDEE772A295AA02 970916191951 blackpoint navato 8CDEE772A295AA02 970916192030 ren faire 8CDEE772A295AA02 970916192500 living history center 8CDEE772A295AA02 970916192553 living history center ren 6946DB5A812D6EFB 970916072247 cucumber AND daughter AND belly C68A35C476240F3D 970916130001 jacaranda AND radio C68A35C476240F3D 970916131150 jacaranda AND radio C68A35C476240F3D 970916131223 highveld stereo C68A35C476240F3D 970916131256 highveld AND stereo C68A35C476240F3D 970916131420 highveld AND stereo C68A35C476240F3D 970916131429 highveld AND stereo C68A35C476240F3D 970916131433 highveld AND stereo C68A35C476240F3D 970916131435 highveld AND stereo C68A35C476240F3D 970916131439 highveld AND stereo C68A35C476240F3D 970916133355 denmark 8B2065581C770F50 970916101241 martha stuart 8B2065581C770F50 970916101438 martha stuart 8B2065581C770F50 970916101522 www.martha stuart 8B2065581C770F50 970916101645 martha stuart 4077443B5801F0C3 970916164055 "garth brooks tickets" 4077443B5801F0C3 970916164359 garth brooks tickets 4077443B5801F0C3 970916182623 job openings 4077443B5801F0C3 970916182752 job openings listings 4077443B5801F0C3 970916182823 agricultural job listings 4077443B5801F0C3 970916182834 agricultural job listings employment 4077443B5801F0C3 970916182942 job listings 43A83F326ED8A531 970916135831 coldwater creek 449C383378F5C37D 970916173829 telluride.co. 449C383378F5C37D 970916173905 telluride.co. 449C383378F5C37D 970916173932 telluride.co. 414D8301A62279D8 970916200349 casino 2601DDA407398E5E 970916064850 ADDF71B56E078EC1 970916194616 pentium ii 266 setup ADDF71B56E078EC1 970916194755 pentium ii 266 setup problems ADDF71B56E078EC1 970916194829 pentium ii 266 setup problems ADDF71B56E078EC1 970916194921 pentium ii 266 setup problems ADDF71B56E078EC1 970916194943 pentium ii 266 setup problems ADDF71B56E078EC1 970916195134 pentium ii 266 setup problems ADDF71B56E078EC1 970916195232 pentium ii 266 setup problems ADDF71B56E078EC1 970916195348 pentium ii 266 setup problems ADDF71B56E078EC1 970916195408 pentium ii 266 problems ADDF71B56E078EC1 970916195433 pentium ii 266 problems ADDF71B56E078EC1 970916195511 pentium ii 266 problems ADDF71B56E078EC1 970916195529 pentium ii 266 problems ADDF71B56E078EC1 970916195551 pentium ii 266 problems ADDF71B56E078EC1 970916195606 pentium ii 266 problems ADDF71B56E078EC1 970916195626 pentium ii 266 problems ADDF71B56E078EC1 970916195726 pentium ii 266 problems ADDF71B56E078EC1 970916200042 pentium ii 266 problems ADDF71B56E078EC1 970916200102 pentium ii 266 problems 27F5BE2A36039395 970916124539 rainforest art 27F5BE2A36039395 970916124606 rainforest,art 27F5BE2A36039395 970916124631 rainforest,art 27F5BE2A36039395 970916124652 rainforest,art 27F5BE2A36039395 970916124750 art,rainforest 27F5BE2A36039395 970916125342 art,rainforest 27F5BE2A36039395 970916125431 art,science fiction 69A46F05C734BF2F 970916074141 meijiro 69A46F05C734BF2F 970916074223 "meijiro kogu" 69A46F05C734BF2F 970916074256 tokushima 69A46F05C734BF2F 970916074418 tokushima 69A46F05C734BF2F 970916074504 tokushima 69A46F05C734BF2F 970916074823 "bamboo in shikoku prefecture, japan" 69A46F05C734BF2F 970916074844 "bamboo shikoku , japan" 69A46F05C734BF2F 970916074900 "bamboo japan" DAF7A3D38ED9A343 970916033027 hiro DAF7A3D38ED9A343 970916033138 hiro yamagata DAF7A3D38ED9A343 970916033904 0C48BBEE45E646AF 970916072450 clip art 0C48BBEE45E646AF 970916072532 clip art globe\ 0C48BBEE45E646AF 970916072627 clip art 0C48BBEE45E646AF 970916080404 clip art 0C48BBEE45E646AF 970916125938 0C48BBEE45E646AF 970916144028 gifted and talented adaptations 2B737CAD0C4B125A 970916145506 2B737CAD0C4B125A 970916145924 http://www.dcadnet.com/cum.html 0567639EB8F3751C 970916161410 "conan o'brien" 0567639EB8F3751C 970916161413 "conan o'brien" C771C1E3DF333CDC 970916191209 essays- why can't we just be friends, the female perspective C771C1E3DF333CDC 970916191339 essays C771C1E3DF333CDC 970916192530 essays aed C86AA16FFD90B66C 970916035144 C86AA16FFD90B66C 970916035348 treatment cystic hygroma C86AA16FFD90B66C 970916035433 treatment cystic hygroma picture C86AA16FFD90B66C 970916035915 C86AA16FFD90B66C 970916040243 cystic hygroma excision pictures C86AA16FFD90B66C 970916040353 C86AA16FFD90B66C 970916040500 C86AA16FFD90B66C 970916040525 cystic hygroma video pictures C86AA16FFD90B66C 970916040559 cystic hygroma clips C86AA16FFD90B66C 970916040803 cystic hygroma clips C86AA16FFD90B66C 970916041014 cystic hygroma after surgery C86AA16FFD90B66C 970916041226 plastic surgery cystic hygroma C86AA16FFD90B66C 970916041430 cystic hygroma photograph C86AA16FFD90B66C 970916041516 altavista A93156BD79F164A4 970916140244 leafs summary A93156BD79F164A4 970916140343 rangers 3, leafs 2 A93156BD79F164A4 970916140428 rangers 3, leafs 2 A93156BD79F164A4 970916140546 A93156BD79F164A4 970916140723 A93156BD79F164A4 970916140809 leafs summary for 09/16/97 A93156BD79F164A4 970916140849 recent hockey summaries A93156BD79F164A4 970916140930 recent hockey summaries A93156BD79F164A4 970916141017 tornoto maple leafs boxscores A93156BD79F164A4 970916141541 A93156BD79F164A4 970916141716 leafs summary for last night A93156BD79F164A4 970916141745 preseason leafs summary A93156BD79F164A4 970916141803 preseason leafs summary A93156BD79F164A4 970916141835 preseason leafs summary A93156BD79F164A4 970916141900 toronto star 2833FEAF16BCF190 970916071424 "pacificnet" AND webtalk 2833FEAF16BCF190 970916071556 "pacificnet.com" AND webtalk 2833FEAF16BCF190 970916071635 198.316.217.831/webtalk.html 201490742D23909B 970916191353 "adult kiss data" 201490742D23909B 970916191442 "kiss data" 201490742D23909B 970916192154 "kiss data" 201490742D23909B 970916192803 "kiss data" 201490742D23909B 970916192920 "kiss data" C989A6531FD9EEC8 970916063105 mhsaa C989A6531FD9EEC8 970916063336 mhsaa C989A6531FD9EEC8 970916063400 mhsaa C989A6531FD9EEC8 970916063413 mhsaa C989A6531FD9EEC8 970916063423 mhsaa C989A6531FD9EEC8 970916063434 mhsaa C989A6531FD9EEC8 970916063452 mhsaa C989A6531FD9EEC8 970916063502 mhsaa C989A6531FD9EEC8 970916063511 mhsaa A127C018E4812A29 970916015816 france A127C018E4812A29 970916015922 french quotidiennement 645C8DD38C387A92 970916144421 www.emu.com 645C8DD38C387A92 970916144723 www.emu.com 645C8DD38C387A92 970916144752 www.emu.com 645C8DD38C387A92 970916144820 www.emu.com 645C8DD38C387A92 970916144922 www.emu.com 645C8DD38C387A92 970916145006 www.emu.com 645C8DD38C387A92 970916145100 www.emu.com 645C8DD38C387A92 970916145137 www.emu.com 645C8DD38C387A92 970916145314 645C8DD38C387A92 970916145459 645C8DD38C387A92 970916145525 www.emu.com 645C8DD38C387A92 970916145550 www.emu.com 645C8DD38C387A92 970916145728 www.emu.com DA94D4B5A7C0D1AF 970916154114 wilshire financial services DA94D4B5A7C0D1AF 970916154142 wilshire financial services group DA94D4B5A7C0D1AF 970916154245 wilshire financial services group companies AA716408D075660C 970916195520 admiral krag 719CF3C90004051C 970916114344 southern-domains 0DB3873516AE57F7 970916034537 metalica 9DB263190BB17AC2 970916235345 apple.com 9DB263190BB17AC2 970916235714 apple.com 9DB263190BB17AC2 970916235751 apple.com movies 9DB263190BB17AC2 970916235820 movies C0BD480632F27E58 970916111049 newsnet C0BD480632F27E58 970916112445 usenet C0BD480632F27E58 970916113157 bbs C0BD480632F27E58 970916114716 bbs C0BD480632F27E58 970916115317 usenet newsgroups 59C873BBBA8998BA 970916064758 jerusalem post 59C873BBBA8998BA 970916064817 jerusalem post 59C873BBBA8998BA 970916064946 jerusalem post newspaper 59C873BBBA8998BA 970916065100 jerusalem post newspaper 59C873BBBA8998BA 970916112015 webcrawler D9142519595FF9D1 970916105724 yahoo D9142519595FF9D1 970916105813 yahoo 1E6F6DBF634461FA 970916180926 car audio 1E6F6DBF634461FA 970916181745 phoenix gold 1E6F6DBF634461FA 970916181811 phoenix gold 1E6F6DBF634461FA 970916181851 phoenix gold 1E6F6DBF634461FA 970916181914 clarion car audio 1E6F6DBF634461FA 970916182518 clarion car audio 1E6F6DBF634461FA 970916182528 clarion car audio 1E6F6DBF634461FA 970916182534 clarion car audio 1E6F6DBF634461FA 970916182544 clarion car audio 1E6F6DBF634461FA 970916182553 clarion car audio 98825190824FBCEC 970916103412 satellite pictures 98825190824FBCEC 970916103418 satellite pictures 98825190824FBCEC 970916110033 satellite pictures 98825190824FBCEC 970916110036 satellite pictures 98825190824FBCEC 970916110140 dublin maps 98825190824FBCEC 970916110251 satellite pictures london 98825190824FBCEC 970916110558 satellite pictures london 33E7C94098B1796F 970916012725 port douglas 33E7C94098B1796F 970916012802 port douglas 33E7C94098B1796F 970916012834 port douglas 33E7C94098B1796F 970916012855 port douglas 33E7C94098B1796F 970916013653 hairy 33E7C94098B1796F 970916013801 hairy 33E7C94098B1796F 970916014006 hairy C0916429A59CE5A1 970916060012 calibration C0916429A59CE5A1 970916061342 calibration AND equipment C0916429A59CE5A1 970916061433 calibration AND equipment C0916429A59CE5A1 970916061508 calibration AND equipment AND testing 8A7BC9076D6F166F 970916113432 news, europe, netherlands 8A7BC9076D6F166F 970916113512 news, europe, netherlands 8A7BC9076D6F166F 970916113748 news, europe, netherlands benelux 8A7BC9076D6F166F 970916113816 news, europe, netherlands F19ED8F44663520A 970916090752 system search F19ED8F44663520A 970916102316 mail spy F19ED8F44663520A 970916102339 mailspy F19ED8F44663520A 970916102350 mailspy 4091BCDFBF33197A 970916223632 lovers 4091BCDFBF33197A 970916223912 sun 4091BCDFBF33197A 970916224414 naturism 8AFBE95F88FA5C99 970916085553 the cranes are flying 8AFBE95F88FA5C99 970916085612 8AFBE95F88FA5C99 970916090003 8FE55B4D65B22166 970916052821 espn 78EC0A6026552159 970916160144 pocsag 78EC0A6026552159 970916160215 pocsag F268B329129FEA09 970916103147 emergency medicine F268B329129FEA09 970916103239 emergency medicine organizations 122A31FB8B9FC6EA 970916082347 printers laserjet 122A31FB8B9FC6EA 970916082355 printers laserjet hp 122A31FB8B9FC6EA 970916082403 printers laserjet hp 122A31FB8B9FC6EA 970916082416 printers laserjet hp printer 122A31FB8B9FC6EA 970916082429 printers laserjet hp printer inkjet 122A31FB8B9FC6EA 970916082445 printers laserjet hp printer 122A31FB8B9FC6EA 970916082458 printers laserjet hp printer 122A31FB8B9FC6EA 970916082512 hp laserjet printers 122A31FB8B9FC6EA 970916082524 hp laserjet printers deskjet 122A31FB8B9FC6EA 970916082532 hp laserjet printers deskjet printer 122A31FB8B9FC6EA 970916082536 hp laserjet printers deskjet printer 122A31FB8B9FC6EA 970916133144 chat B9922F32F8DD2511 970916170034 montelambert DB49308A76F8A6C4 970916021833 fiskars DB49308A76F8A6C4 970916022144 fiskars DB49308A76F8A6C4 970916022357 DB49308A76F8A6C4 970916022812 +fiskars +ups +power DB49308A76F8A6C4 970916023022 +fiskars +ups +power DB49308A76F8A6C4 970916023139 +fiskars +ups +power DB49308A76F8A6C4 970916023337 +fiskars +ups +power 70AFB9518EB9997A 970916091739 jscript 70AFB9518EB9997A 970916091752 jscript 70AFB9518EB9997A 970916092120 jscript C1977F1B854584B3 970916214404 sean mcafee shit my pants C1977F1B854584B3 970916214409 sean mcafee shit my pants 5BE36449BA3E2501 970916100347 xerox 5BE36449BA3E2501 970916101126 duxbury 5BE36449BA3E2501 970916101740 duxbury 5BE36449BA3E2501 970916101749 braille printers 5BE36449BA3E2501 970916101827 braille printers 5BE36449BA3E2501 970916102425 braille printers 66BA2100B71AF41C 970916193830 79785E25B2F213B8 970916145404 "free stamps" 79785E25B2F213B8 970916145412 "free stamps" 79785E25B2F213B8 970916145444 "free stamps" 79785E25B2F213B8 970916145516 "free stamps" 185D6864023D5B24 970916133811 internet AND certification 185D6864023D5B24 970916133929 internet AND certification 185D6864023D5B24 970916134010 185D6864023D5B24 970916134118 185D6864023D5B24 970916134250 microsoft AND internet AND certification C5460576B58BB1CC 970916192508 hacking telenet C5460576B58BB1CC 970916193004 hacking telenet C5460576B58BB1CC 970916193355 hacking telenet C5460576B58BB1CC 970916193737 hacking telenet C5460576B58BB1CC 970916193908 hacking telenet C5460576B58BB1CC 970916194352 hacking telenet C5460576B58BB1CC 970916194748 phreaking telenet 158EF1FF1683799A 970916091509 silicon investor 158EF1FF1683799A 970916091542 silicon investor 05FD3B6783303D98 970916210805 triphop 05FD3B6783303D98 970916211033 triphop 05FD3B6783303D98 970916211049 triphop 05FD3B6783303D98 970916211140 triphop 05FD3B6783303D98 970916211201 triphop 05FD3B6783303D98 970916211233 triphop 05FD3B6783303D98 970916211250 triphop 05FD3B6783303D98 970916211315 triphop 05FD3B6783303D98 970916211408 mazzy star 05FD3B6783303D98 970916211426 05FD3B6783303D98 970916211440 05FD3B6783303D98 970916211459 05FD3B6783303D98 970916211638 portishead 05FD3B6783303D98 970916211704 05FD3B6783303D98 970916212103 05FD3B6783303D98 970916212130 05FD3B6783303D98 970916212145 trip-hop 71D2D4E6C01FD7E1 970916102326 kitty kelly 59994BE9F8892C94 970916195903 americansingles 366185767DC07204 970916042638 pc_4dgen.zip 8EE83362186F49EF 970916110722 pocket doors 8EE83362186F49EF 970916161522 red fern 8EE83362186F49EF 970916161635 tulsequah chief ED3EA19F0B5A556B 970916092326 www.thatguy.com/splash/ 1A64296CCE60F19E 970916083359 black men 1A64296CCE60F19E 970916124107 toni braxton 1A64296CCE60F19E 970916124152 toni braxton 1A64296CCE60F19E 970916124156 toni braxton 1A64296CCE60F19E 970916131923 black women 1A64296CCE60F19E 970916134113 halle berry 1A64296CCE60F19E 970916134125 halle berry 1A64296CCE60F19E 970916134416 halle berry C81329DC0EF932FB 970916112502 1998 C040A1754EEF11B1 970916161855 primestar 93CE4FF9E36FA112 970916203820 demi moore 93CE4FF9E36FA112 970916211152 jenne mccarthy 93CE4FF9E36FA112 970916211227 jenne mccarthy jenny 93CE4FF9E36FA112 970916211759 jenny mccarthy 93CE4FF9E36FA112 970916211814 jenny mccarthy 59E5CD546C202A78 970916103542 internet public library 59E5CD546C202A78 970916103609 internet public library 59E5CD546C202A78 970916103637 internet public library 49D3717A8D3ED397 970916083233 eek the cat 77AC89619076A8E1 970916104906 sloth 77AC89619076A8E1 970916104939 sloth toed 4F6F6DA149C3DC4D 970916005305 maria checa 4F6F6DA149C3DC4D 970916010738 4F6F6DA149C3DC4D 970916011154 allysa milano 4F6F6DA149C3DC4D 970916011402 C07D4ECD1ACE0C89 970916193608 e C07D4ECD1ACE0C89 970916193629 entertainment 6FB3D2D282761F25 970916111515 usa todays sports page 6FB3D2D282761F25 970916111553 usa todays sports page 6FB3D2D282761F25 970916111723 usa todays sports page 6FB3D2D282761F25 970916141735 info on ukiah ca 6FB3D2D282761F25 970916142024 info on ukiah ca 6FB3D2D282761F25 970916142113 davy tree triming 6FB3D2D282761F25 970916170122 cartoons 6FB3D2D282761F25 970916170344 cartoons 6FB3D2D282761F25 970916175415 kids activdies 6FB3D2D282761F25 970916175513 kids activdies 6FB3D2D282761F25 970916175523 kids activdies 6FB3D2D282761F25 970916191440 invertebrates 6FB3D2D282761F25 970916191445 invertebrates 6FB3D2D282761F25 970916193702 little tikes 6FB3D2D282761F25 970916193704 little tikes 6FB3D2D282761F25 970916193748 little tikes 6FB3D2D282761F25 970916193751 little tikes 6FB3D2D282761F25 970916194005 little tikes 6FB3D2D282761F25 970916194026 little tikes 6FB3D2D282761F25 970916194028 little tikes 6FB3D2D282761F25 970916194103 6FB3D2D282761F25 970916194139 6FB3D2D282761F25 970916194244 little tykes toys 6FB3D2D282761F25 970916194335 toys r us 6FB3D2D282761F25 970916194518 toys r us 6FB3D2D282761F25 970916194520 little tykes toys 6FB3D2D282761F25 970916194603 mall of america in minnasota 6FB3D2D282761F25 970916195043 mall of america in minnasota 6FB3D2D282761F25 970916230356 invertebrates 6FB3D2D282761F25 970917000335 invertebrates 6FB3D2D282761F25 970917000345 6FB3D2D282761F25 970917000348 4A1C1951CEC8BA70 970916085140 marine midland 4A1C1951CEC8BA70 970916085234 778EBE06AC999541 970916120133 deaf history 778EBE06AC999541 970916120241 deaf history B144CE6F1EDAB0DE 970916080609 game B144CE6F1EDAB0DE 970916080737 car B144CE6F1EDAB0DE 970916080821 mercedes benz B144CE6F1EDAB0DE 970916090435 mercedes benz slk B144CE6F1EDAB0DE 970916090554 mercedes benz 259DC5DCBDBD4D4D 970916125331 7D1DD1781EDB79A0 970916173046 makeup products 7D1DD1781EDB79A0 970916173100 makeup products 7D1DD1781EDB79A0 970916174001 "lancom" cosmetic producrs 7D1DD1781EDB79A0 970916174500 "lancom" products 7D1DD1781EDB79A0 970916175124 lancome cosmetics 7D1DD1781EDB79A0 970916180008 lancome beauty products 7D1DD1781EDB79A0 970916180439 lancome paris 7D1DD1781EDB79A0 970916181253 7D1DD1781EDB79A0 970916181408 nordstrom DAA8C88C7DA0F0B9 970916181646 cancer and prevention DAA8C88C7DA0F0B9 970916182959 DAA8C88C7DA0F0B9 970916183616 cancer and prevention DAA8C88C7DA0F0B9 970916183708 060FCC14E09355CF 970916002916 vascular diseases + sleep disorders 060FCC14E09355CF 970916004422 vascular diseases + sleep disorders 060FCC14E09355CF 970916005808 sleep+disorders+high+blood+pressure 060FCC14E09355CF 970916005939 060FCC14E09355CF 970916010137 insomnia+high+blood+pressure 060FCC14E09355CF 970916010636 060FCC14E09355CF 970916010800 insomnia+high+blood+pressure 060FCC14E09355CF 970916011358 insomnia+high+blood+pressure 060FCC14E09355CF 970916011744 insomnia+high+blood+pressure BF76256C3A233A8A 970916101502 +proposals BF76256C3A233A8A 970916101525 +proposals 15BDF589C71C10CB 970916133051 15BDF589C71C10CB 970916211915 information drop shipeed 15BDF589C71C10CB 970916211916 information drop shipeed 15BDF589C71C10CB 970916211919 information drop shipeed 15BDF589C71C10CB 970916212003 information drop shipeed 15BDF589C71C10CB 970916212106 whole salers 15BDF589C71C10CB 970916212148 whole salers 15BDF589C71C10CB 970916212255 air filtration systems greg montoya 15BDF589C71C10CB 970916212402 air filtration systems greg montoya 15BDF589C71C10CB 970916212437 air filtration systems greg montoya 15BDF589C71C10CB 970916212646 greg montoya 15BDF589C71C10CB 970916214320 greg montoya 15BDF589C71C10CB 970916214400 greg montoya 15BDF589C71C10CB 970916214418 15BDF589C71C10CB 970916214946 greg montoya 15BDF589C71C10CB 970916215001 15BDF589C71C10CB 970916215550 15BDF589C71C10CB 970916215640 5539B128215E9A49 970916094717 cahuilla 5539B128215E9A49 970916094858 5539B128215E9A49 970916095455 chauilla 5539B128215E9A49 970916095642 chauilla 5539B128215E9A49 970916095711 candelaria 5539B128215E9A49 970916110728 candalaria 5539B128215E9A49 970916110936 5539B128215E9A49 970916111924 5539B128215E9A49 970916112200 5539B128215E9A49 970916112220 5539B128215E9A49 970916112442 5539B128215E9A49 970916112851 B1E4391F6E6EFEF4 970916203551 using multimedia B1E4391F6E6EFEF4 970916205455 multimedia system B1E4391F6E6EFEF4 970916205504 multimedia system B1E4391F6E6EFEF4 970916205540 multimedia system B1E4391F6E6EFEF4 970916205606 multimedia system interactive B1E4391F6E6EFEF4 970916205659 multimedia system educational B1E4391F6E6EFEF4 970916205737 multimedia system educational computing B1E4391F6E6EFEF4 970916210015 multimedia system educational computing publications B1E4391F6E6EFEF4 970916210046 multimedia system educational computing publications AAE7D472AA45AB96 970916213339 crash photos AAE7D472AA45AB96 970916213418 di crash photos AAE7D472AA45AB96 970916213433 di crash photos AAE7D472AA45AB96 970916213459 di crash photos 790FC18760C238A6 970916083709 childbirth 790FC18760C238A6 970916083839 childbirth C9F4F61D48892F7B 970916201602 cal state northridge C9F4F61D48892F7B 970916202041 cal state northridge - home page 2F93931CCB13D662 970916144059 teen 99D8C7D14A864902 970916234643 reiten + western 99D8C7D14A864902 970916234803 99D8C7D14A864902 970917000740 westernreiten + braunschweig 99D8C7D14A864902 970917000831 reiten + western + braunschweig 99D8C7D14A864902 970917000923 reiten + western + niedersachsen 75C18D86685AAEAE 970916162606 shannon tweed 75C18D86685AAEAE 970916163303 shannon tweed 75C18D86685AAEAE 970916164545 shannon tweed 75C18D86685AAEAE 970916164555 shannon tweed 7D61F86F1732EDC6 970916055000 +baseball +collector +software 86EAEA913CC8D7C4 970916205516 cheerleaders 86EAEA913CC8D7C4 970916205654 cheerleaders 86EAEA913CC8D7C4 970916205722 cheerleaders 3DF52D5806E094F4 970916142206 19887D73626B5D55 970916200713 shakespeare 19887D73626B5D55 970916200816 shakespeare comedies 19887D73626B5D55 970916201237 the comedy of errors 19887D73626B5D55 970916201610 19887D73626B5D55 970916202213 19887D73626B5D55 970916202446 19887D73626B5D55 970916202616 19887D73626B5D55 970916202947 19887D73626B5D55 970916203256 19887D73626B5D55 970916203348 19887D73626B5D55 970916203457 19887D73626B5D55 970916203552 19887D73626B5D55 970916203648 19887D73626B5D55 970916203754 19887D73626B5D55 970916203854 19887D73626B5D55 970916203940 19887D73626B5D55 970916204022 19887D73626B5D55 970916210207 comedy of errors, the 19887D73626B5D55 970916211335 comedy of errors, the 19887D73626B5D55 970916211545 19887D73626B5D55 970916211639 19887D73626B5D55 970916211949 the comedy of errors "i to the world am like a drop of water" 19887D73626B5D55 970916212111 "i to the world am like a drop of water" 19887D73626B5D55 970916212319 the comedy of errors; important passages 19887D73626B5D55 970916212418 the comedy of errors; important passages 19887D73626B5D55 970916212506 19887D73626B5D55 970916212705 the comedy of errors 19887D73626B5D55 970916212801 19887D73626B5D55 970916215300 the comedy of errors 19887D73626B5D55 970916215614 B8E12AFC196C5FB7 970916081637 win32s B8E12AFC196C5FB7 970916081817 vxtreme 070A45F23275C279 970916195011 family ancestory 070A45F23275C279 970916195333 family ancestory immigrants 070A45F23275C279 970916195412 family ancestory german 070A45F23275C279 970916195444 family ancestory german 070A45F23275C279 970916195531 family ancestory german genealogy descendants ancestor 070A45F23275C279 970916195554 4E3114DABE39DDB6 970916224105 stock photoes 752FE259E734662C 970916090642 "jlamont@washblade.com" 752FE259E734662C 970916090707 "james lamont" 752FE259E734662C 970916090752 "washington blade"+"james lamont" 752FE259E734662C 970916091014 chechi 752FE259E734662C 970916091035 ceo of northwest airlines 752FE259E734662C 970916091220 "northwest airlines"+"chechi" 752FE259E734662C 970916091317 "northwest airlines"+"cheechi" 752FE259E734662C 970916091336 northwest airlines 752FE259E734662C 970916094054 jlamont@washblade.com 752FE259E734662C 970916113229 "caring kids" 752FE259E734662C 970916113306 "caring kids" 752FE259E734662C 970916113338 "caring kids" 752FE259E734662C 970916113458 "caring kids"+st. johns university 752FE259E734662C 970916113533 "caring kids"+st. johns university 752FE259E734662C 970916113630 "st. johns university" + kids B53A8E9C0F0A04B8 970916134559 student loans B53A8E9C0F0A04B8 970916134724 student loans, alberta F44CC3ECE5C1C448 970916072822 6D906622D87278E5 970916094911 youth +cult 6D906622D87278E5 970916094924 youth +cult 6D906622D87278E5 970916100502 youth +cult 0F6881A3768F6E29 970916151920 free email 0F6881A3768F6E29 970916152015 free email 0F6881A3768F6E29 970916152058 free email 054340E4B8F63E34 970916162603 medieval pics 054340E4B8F63E34 970916163016 medieval pics mapoff 6CB9B3573BCB5A72 970916081616 florida gators football 0338D63FFC24DC2F 970916152530 clothing catalogs 0338D63FFC24DC2F 970916154224 mens clothing catalog 0338D63FFC24DC2F 970916163825 freebies 810EFC647D40E4CB 970916182847 see you at the pole CF5AFAEC0B19A940 970916063953 animal bites CF5AFAEC0B19A940 970916064021 animal bites rabies CF5AFAEC0B19A940 970916064955 animal bites rabies CF5AFAEC0B19A940 970916065205 animal bites rabies C28C7C97640037C1 970916065817 ieee A1F547F916AD8A43 970916125757 grammar A1F547F916AD8A43 970916130038 english grammar 523E46AA9E20AB3E 970916163926 zork cheats 0C6EA7BA0D77B41A 970916060223 steve AND heater 0C6EA7BA0D77B41A 970916060414 genamation industries 0C6EA7BA0D77B41A 970916094017 dos AND gvc AND network AND card 0C6EA7BA0D77B41A 970916094116 gvc AND network AND card 79E7FA8E26F7349E 970916140438 low fat recipe books 49948224B156B2DF 970916212913 49948224B156B2DF 970916213117 9541D2047C5360F9 970916024016 jenny 9541D2047C5360F9 970916063530 aaa 9541D2047C5360F9 970916063614 aaa travel B163FAFD64AFAB18 970916134052 free downloadable pc games B163FAFD64AFAB18 970916134142 free downloadable pc games B163FAFD64AFAB18 970916134159 free downloadable pc wallpaper B163FAFD64AFAB18 970916134220 free downloadable pc wallpaper B163FAFD64AFAB18 970916134242 free downloadable pc wallpaper B163FAFD64AFAB18 970916134304 free downloadable pc wallpaper B163FAFD64AFAB18 970916134344 free downloadable pc wallpaper B163FAFD64AFAB18 970916135454 free pc screensavers B163FAFD64AFAB18 970916142957 free pc screensavers D2FFE38AFF1C358A 970916083539 methylmercury D2FFE38AFF1C358A 970916083634 cantel at&t 011ACA65C2BF70B2 970916175937 prime ministers of australia 011ACA65C2BF70B2 970916180131 011ACA65C2BF70B2 970916182815 death of robert menzies 011ACA65C2BF70B2 970916182831 dead robert menzies 011ACA65C2BF70B2 970916182917 233043A33AEF6A1D 970916204226 barbara ross 233043A33AEF6A1D 970916204330 barbara+ross+e 233043A33AEF6A1D 970916204848 barbara+ross+law 233043A33AEF6A1D 970916210506 robot+circuit+computer+interface 233043A33AEF6A1D 970916210814 robot+circuit+computer+interface E559AEBED8E9E078 970916113041 www.csaa.com E559AEBED8E9E078 970916113230 www.csaa.com home insuramce E559AEBED8E9E078 970916113232 www.csaa.com home insurance 6E2A4B3FED94E84D 970916173721 vtec and honda 6E2A4B3FED94E84D 970916173931 "what is 'vtec'?" 6E2A4B3FED94E84D 970916174442 dodge "magnum" engines 6E2A4B3FED94E84D 970916174748 dodge 6E2A4B3FED94E84D 970916174817 dodge avenger A1CFAE0FF0E6CFDE 970916193254 freeware 62F017C7A74C51DC 970916190511 usenet 62F017C7A74C51DC 970916190616 usenet newsgroup 62F017C7A74C51DC 970916190739 usenet newsgroup and xenix 150EBA3F42F75143 970916191931 ravage 150EBA3F42F75143 970916191935 ravage 150EBA3F42F75143 970916191944 ravage 2B73EFE0F9FC9E0B 970916195501 http://educationalproducts.com 2B73EFE0F9FC9E0B 970916195507 http://educationalproducts.com 90B21F67EEA27FEA 970916105359 wolfenstein cheats 90B21F67EEA27FEA 970916105418 90B21F67EEA27FEA 970916105451 90B21F67EEA27FEA 970916105505 90B21F67EEA27FEA 970916105532 1830EAD9FEB54EB4 970916173423 pointilism 109FA25A577DCE53 970916182544 coffee+decor 109FA25A577DCE53 970916182630 coffee+decor 109FA25A577DCE53 970916182648 coffee+decor 109FA25A577DCE53 970916182744 coffee+decor 109FA25A577DCE53 970916182907 coffee+decor 109FA25A577DCE53 970916182923 coffee+decor 51BA997ACC88FAE3 970916230937 photodiode 51BA997ACC88FAE3 970916231040 photodiode circuit 51BA997ACC88FAE3 970916231143 photodiode photodiodes detectors photodetectors 14101EC6E7B07817 970916072051 r.e.m 14101EC6E7B07817 970916072150 rem and music B038389D403E4C43 970916200420 scarborough,ontario,canada B038389D403E4C43 970916203329 C89F34E15252E94A 970916201058 custom computer configuration C89F34E15252E94A 970916201238 custom computer configuration C89F34E15252E94A 970916201330 custom computer configuration C89F34E15252E94A 970916201352 custom computer configuration C89F34E15252E94A 970916201404 custom computer configuration EEF64006C7D47AC1 970916181047 army aviation center organizations EEF64006C7D47AC1 970916181052 army aviation center organizations EEF64006C7D47AC1 970916181208 'us army' EEF64006C7D47AC1 970916181222 army aviation center organizations 8F0ECFEDAB4A03DB 970916025956 fax 8F0ECFEDAB4A03DB 970916035934 free fax service 8F0ECFEDAB4A03DB 970916035936 free fax service 3887FA7C17106AF1 970916102956 courses+online+engin 3887FA7C17106AF1 970916103104 courses+online+engin 3887FA7C17106AF1 970916103151 courses+online+engin 3887FA7C17106AF1 970916103315 5210315A34C4E7A4 970916201905 www.pol.net 09EEE6585FB974ED 970916001014 urlaub ferienwohnungen appartements 09EEE6585FB974ED 970916001038 urlaub ferienwohnungen appartements A2800E21FDCEE2BF 970916102057 eclectus roratus A2800E21FDCEE2BF 970916102202 eclectus roratus A2800E21FDCEE2BF 970916102213 eclectus roratus A2800E21FDCEE2BF 970916102342 eclectus roratus aviary A2800E21FDCEE2BF 970916102415 A2800E21FDCEE2BF 970916102658 eclectus roratus aviary A2800E21FDCEE2BF 970916124816 parrots A2800E21FDCEE2BF 970916124834 parrots A2800E21FDCEE2BF 970916124903 eclectus roratus A2800E21FDCEE2BF 970916125218 rakets 31A203282F24E07C 970916111228 tablature 31A203282F24E07C 970916111420 tablature guitar BA449E5E59C384BB 970916135030 samuel de champlain BA449E5E59C384BB 970916135059 samuel de champlain BA449E5E59C384BB 970916135132 samuel de champlain BA449E5E59C384BB 970916135212 samuel de champlain BA449E5E59C384BB 970916135301 samuel de champlain BA449E5E59C384BB 970916140308 samuel de champlain BA449E5E59C384BB 970916140420 samuel de champlain BA449E5E59C384BB 970916140626 samuel de champlain BA449E5E59C384BB 970916140959 samuel de chnplain BA449E5E59C384BB 970916141012 samuel de champlain BA449E5E59C384BB 970916141048 samuel de champlain BA449E5E59C384BB 970916141714 etienne brule BA449E5E59C384BB 970916141756 etienne brule BA449E5E59C384BB 970916141917 helene boulle BA449E5E59C384BB 970916142225 helene boule BA449E5E59C384BB 970916142750 champlain, samuel de 5BB5018A0A78B70D 970916061121 royal mutual fund ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/files/2008.log ================================================ 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2008:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/files/2009.log ================================================ 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2009:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/files/2010.log ================================================ 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jan/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Mar/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Feb/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Apr/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/May/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jul/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Sep/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Aug/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 192.168.1.211 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "GET / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Oct/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Jun/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Nov/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" 127.0.0.1 - - [01/Dec/2010:00:00:00 -0800] "POST / HTTP/1.1" 200 50 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0" ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/files/readme.txt ================================================ Files in this folder can be copied to your HDFS to be used with the provided samples. ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/pentaho-mapreduce-sample-src/README.TXT ================================================ The pentaho-mapreduce-sample-src contains code that can be built and jar'd for use by the Hadoop Job Executor. The sample is a WordCount example where 4 command line arguments can be passed in to override the defaults. --input=DIR The directory containing the input files for the WordCount Hadoop job --output=DIR The directory where the results of the WordCount Hadoop job will be stored --hdfsHost=HOST The host<:port> of the HDFS service e.g.- localhost:9000 --jobTrackerHost=HOST The host<:port> of the job tracker service e.g.- localhost:9001 ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/pentaho-mapreduce-sample-src/src/org/pentaho/hadoop/sample/wordcount/WordCount.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.hadoop.sample.wordcount; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; public class WordCount { public static void main(String[] args) throws Exception { String hdfsHost = "localhost:9000"; String jobTrackerHost = "localhost:9001"; String fsPrefix = "hdfs"; String dirInput = "/wordcount/input"; String dirOutput = "/wordcount/output"; if (args.length == 1 && ( args[0].equals("--help") || args[0].equals("-h") || args[0].equals("/?") )) { System.out.println("Usage: WordCount "); System.out.println(); System.out.println("Options:"); System.out.println(); System.out.println("--input=DIR The directory containing the input files for the"); System.out.println(" WordCount Hadoop job"); System.out.println("--output=DIR The directory where the results of the WordCount"); System.out.println(" Hadoop job will be stored"); System.out.println("--hdfsHost=HOST The host<:port> of the HDFS service"); System.out.println(" e.g.- localhost:9000"); System.out.println("--jobTrackerHost=HOST The host<:port> of the job tracker service"); System.out.println(" e.g.- localhost:9001"); System.out.println("--fsPrefix=PREFIX The prefix to use for for the filesystem"); System.out.println(" e.g.- hdfs"); System.out.println(); System.out.println(); System.out.println("If an option is not provided through the command prompt the following defaults"); System.out.println("will be used:"); System.out.println("--input='/wordcount/input'"); System.out.println("--output='/wordcount/output'"); System.out.println("--hdfsHost=localhost:9000"); System.out.println("--jobTrackerHost=localhost:9001"); System.out.println("--fsPrefix=hdfs"); } else { if(args.length > 0){ for(String arg : args) { if(arg.startsWith("--input=")) { dirInput = WordCount.getArgValue(arg); } else if(arg.startsWith("--output=")) { dirOutput = WordCount.getArgValue(arg); } else if(arg.startsWith("--hdfsHost=")) { hdfsHost = WordCount.getArgValue(arg); } else if(arg.startsWith("--jobTrackerHost=")) { jobTrackerHost = WordCount.getArgValue(arg); } else if(arg.startsWith("--fsPrefix=")) { fsPrefix = WordCount.getArgValue(arg); } } } JobConf conf = new JobConf(WordCount.class); conf.setJobName("WordCount"); String hdfsBaseUrl = fsPrefix + "://" + hdfsHost; conf.set("fs.default.name", hdfsBaseUrl + "/"); if (jobTrackerHost != null && jobTrackerHost.length() > 0) { conf.set("mapred.job.tracker", jobTrackerHost); } FileInputFormat.setInputPaths(conf, new Path[] { new Path(hdfsBaseUrl + dirInput) }); FileOutputFormat.setOutputPath(conf, new Path(hdfsBaseUrl + dirOutput)); conf.setMapperClass(WordCountMapper.class); conf.setReducerClass(WordCountReducer.class); conf.setMapOutputKeyClass(Text.class); conf.setMapOutputValueClass(IntWritable.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(IntWritable.class); JobClient.runJob(conf); } } private static String getArgValue(String arg) { String result = null; String[] tokens = arg.split("="); if(tokens.length > 1) { result = tokens[1].replace("'", "").replace("\"", ""); } System.out.println(arg + " parses to " + result); return result; } } ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/pentaho-mapreduce-sample-src/src/org/pentaho/hadoop/sample/wordcount/WordCountMapper.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.hadoop.sample.wordcount; import java.io.IOException; import java.util.StringTokenizer; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.MapReduceBase; import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; public class WordCountMapper extends MapReduceBase implements Mapper { private Text word = new Text(); private static final IntWritable ONE = new IntWritable(1); public void map(Object key, Text value, OutputCollector output, Reporter reporter) throws IOException { StringTokenizer wordList = new StringTokenizer(value.toString()); while (wordList.hasMoreTokens()) { this.word.set(wordList.nextToken()); output.collect(this.word, ONE); } } } ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/pentaho-mapreduce-sample-src/src/org/pentaho/hadoop/sample/wordcount/WordCountReducer.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.hadoop.sample.wordcount; import java.io.IOException; import java.util.Iterator; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.MapReduceBase; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reducer; import org.apache.hadoop.mapred.Reporter; public class WordCountReducer extends MapReduceBase implements Reducer { private IntWritable totalWordCount = new IntWritable(); public void reduce(Text key, Iterator values, OutputCollector output, Reporter reporter) throws IOException { int wordCount = 0; while (values.hasNext()) { wordCount += ((IntWritable) values.next()).get(); } this.totalWordCount.set(wordCount); output.collect(key, this.totalWordCount); } } ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/pentaho-mapreduce2-sample-src/README.TXT ================================================ The pentaho-mapreduce-sample2-src contains code that can be built and jar'd for use by the Hadoop Job Executor. The sample is a WordCount2 example where 2 command line arguments should be passed in. input The directory containing the input files for the WordCount Hadoop job output The directory where the results of the WordCount2 Hadoop job will be stored ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/pentaho-mapreduce2-sample-src/src/org/pentaho/hadoop/sample/wordcount/WordCount2.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.hadoop.sample.wordcount; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import java.io.IOException; import java.util.StringTokenizer; public class WordCount2 extends Configured implements Tool { public int run( String[] strings ) throws Exception { Configuration conf = getConf(); Job job = Job.getInstance( conf, "wordcount2" ); job.setJarByClass( WordCount2.class ); job.setOutputKeyClass( Text.class ); job.setOutputValueClass( IntWritable.class ); job.setMapperClass( Map.class ); job.setReducerClass( Reduce.class ); job.setInputFormatClass( TextInputFormat.class ); job.setOutputFormatClass( TextOutputFormat.class ); FileInputFormat.addInputPath( job, new Path( strings[ 0 ] ) ); FileOutputFormat.setOutputPath( job, new Path( strings[ 1 ] ) ); return job.waitForCompletion( true ) ? 0 : 1; } public static class Map extends Mapper { private static final IntWritable one = new IntWritable( 1 ); private Text word = new Text(); public void map( LongWritable key, Text value, Context context ) throws IOException, InterruptedException { String line = value.toString(); StringTokenizer tokenizer = new StringTokenizer( line ); while ( tokenizer.hasMoreTokens() ) { this.word.set( tokenizer.nextToken() ); context.write( this.word, one ); } } } public static class Reduce extends Reducer { public void reduce( Text key, Iterable values, Context context ) throws IOException, InterruptedException { int sum = 0; for ( IntWritable val : values ) { sum += val.get(); } context.write( key, new IntWritable( sum ) ); } } public static void main( String[] args ) throws Exception { int exitCode = ToolRunner.run( new Configuration(), new WordCount2(), args ); System.exit( exitCode ); } } ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/script1-hadoop-mod.pig ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ -- Query Phrase Popularity (Hadoop cluster) -- This script processes a search query log file from the Excite search engine and finds search phrases that occur with particular high frequency during certain times of the day. -- Register the tutorial JAR file so that the included UDFs can be called in the script. REGISTER $udf_jar; -- Use the PigStorage function to load the excite log file into the "raw" bag as an array of records. -- Input: (user,time,query) raw = LOAD '/excite.log.bz2' USING PigStorage('\t') AS (user, time, query); -- Call the NonURLDetector UDF to remove records if the query field is empty or a URL. clean1 = FILTER raw BY org.apache.pig.tutorial.NonURLDetector(query); -- Call the ToLower UDF to change the query field to lowercase. clean2 = FOREACH clean1 GENERATE user, time, org.apache.pig.tutorial.ToLower(query) as query; -- Because the log file only contains queries for a single day, we are only interested in the hour. -- The excite query log timestamp format is YYMMDDHHMMSS. -- Call the ExtractHour UDF to extract the hour (HH) from the time field. houred = FOREACH clean2 GENERATE user, org.apache.pig.tutorial.ExtractHour(time) as hour, query; -- Call the NGramGenerator UDF to compose the n-grams of the query. ngramed1 = FOREACH houred GENERATE user, hour, flatten(org.apache.pig.tutorial.NGramGenerator(query)) as ngram; -- Use the DISTINCT command to get the unique n-grams for all records. ngramed2 = DISTINCT ngramed1; -- Use the GROUP command to group records by n-gram and hour. hour_frequency1 = GROUP ngramed2 BY (ngram, hour); -- Use the COUNT function to get the count (occurrences) of each n-gram. hour_frequency2 = FOREACH hour_frequency1 GENERATE flatten($0), COUNT($1) as count; -- Use the GROUP command to group records by n-gram only. -- Each group now corresponds to a distinct n-gram and has the count for each hour. uniq_frequency1 = GROUP hour_frequency2 BY group::ngram; -- For each group, identify the hour in which this n-gram is used with a particularly high frequency. -- Call the ScoreGenerator UDF to calculate a "popularity" score for the n-gram. uniq_frequency2 = FOREACH uniq_frequency1 GENERATE flatten($0), flatten(org.apache.pig.tutorial.ScoreGenerator($1)); -- Use the FOREACH-GENERATE command to assign names to the fields. uniq_frequency3 = FOREACH uniq_frequency2 GENERATE $1 as hour, $0 as ngram, $2 as score, $3 as count, $4 as mean; -- Use the FILTER command to move all records with a score less than or equal to 2.0. filtered_uniq_frequency = FILTER uniq_frequency3 BY score > 2.0; -- Use the ORDER command to sort the remaining records by hour and score. ordered_uniq_frequency = ORDER filtered_uniq_frequency BY hour, score; -- Use the PigStorage function to store the results. -- Output: (hour, n-gram, score, count, average_counts_among_all_hours) STORE ordered_uniq_frequency INTO '/script1-hadoop-results' USING PigStorage(); ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/script1-local-mod.pig ================================================ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ -- Query Phrase Popularity (local mode) -- This script processes a search query log file from the Excite search engine and finds search phrases that occur with particular high frequency during certain times of the day. -- Register the tutorial JAR file so that the included UDFs can be called in the script. REGISTER $udf_jar; -- Use the PigStorage function to load the excite log file into the raw bag as an array of records. -- Input: (user,time,query) raw = LOAD '$excite_small' USING PigStorage('\t') AS (user, time, query); -- Call the NonURLDetector UDF to remove records if the query field is empty or a URL. clean1 = FILTER raw BY org.apache.pig.tutorial.NonURLDetector(query); -- Call the ToLower UDF to change the query field to lowercase. clean2 = FOREACH clean1 GENERATE user, time, org.apache.pig.tutorial.ToLower(query) as query; -- Because the log file only contains queries for a single day, we are only interested in the hour. -- The excite query log timestamp format is YYMMDDHHMMSS. -- Call the ExtractHour UDF to extract the hour (HH) from the time field. houred = FOREACH clean2 GENERATE user, org.apache.pig.tutorial.ExtractHour(time) as hour, query; -- Call the NGramGenerator UDF to compose the n-grams of the query. ngramed1 = FOREACH houred GENERATE user, hour, flatten(org.apache.pig.tutorial.NGramGenerator(query)) as ngram; -- Use the DISTINCT command to get the unique n-grams for all records. ngramed2 = DISTINCT ngramed1; -- Use the GROUP command to group records by n-gram and hour. hour_frequency1 = GROUP ngramed2 BY (ngram, hour); -- Use the COUNT function to get the count (occurrences) of each n-gram. hour_frequency2 = FOREACH hour_frequency1 GENERATE flatten($0), COUNT($1) as count; -- Use the GROUP command to group records by n-gram only. -- Each group now corresponds to a distinct n-gram and has the count for each hour. uniq_frequency1 = GROUP hour_frequency2 BY group::ngram; -- For each group, identify the hour in which this n-gram is used with a particularly high frequency. -- Call the ScoreGenerator UDF to calculate a "popularity" score for the n-gram. uniq_frequency2 = FOREACH uniq_frequency1 GENERATE flatten($0), flatten(org.apache.pig.tutorial.ScoreGenerator($1)); -- Use the FOREACH-GENERATE command to assign names to the fields. uniq_frequency3 = FOREACH uniq_frequency2 GENERATE $1 as hour, $0 as ngram, $2 as score, $3 as count, $4 as mean; -- Use the FILTER command to move all records with a score less than or equal to 2.0. filtered_uniq_frequency = FILTER uniq_frequency3 BY score > 2.0; -- Use the ORDER command to sort the remaining records by hour and score. ordered_uniq_frequency = ORDER filtered_uniq_frequency BY hour, score; -- Use the PigStorage function to store the results. -- Output: (hour, n-gram, score, count, average_counts_among_all_hours) STORE ordered_uniq_frequency INTO 'script1-local-results.txt' USING PigStorage(); ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/weblogs-mapper.ktr ================================================ weblogs-mapper Normal 0 /
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID TRANSNAME Y TRANSNAME STATUS Y STATUS LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS STARTDATE Y STARTDATE ENDDATE Y ENDDATE LOGDATE Y LOGDATE DEPDATE Y DEPDATE REPLAYDATE Y REPLAYDATE LOG_FIELD Y LOG_FIELD EXECUTING_SERVER N EXECUTING_SERVER EXECUTING_USER N EXECUTING_USER CLIENT N CLIENT
ID_BATCH Y ID_BATCH SEQ_NR Y SEQ_NR LOGDATE Y LOGDATE TRANSNAME Y TRANSNAME STEPNAME Y STEPNAME STEP_COPY Y STEP_COPY LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS INPUT_BUFFER_ROWS Y INPUT_BUFFER_ROWS OUTPUT_BUFFER_ROWS Y OUTPUT_BUFFER_ROWS
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE LOGGING_OBJECT_TYPE Y LOGGING_OBJECT_TYPE OBJECT_NAME Y OBJECT_NAME OBJECT_COPY Y OBJECT_COPY REPOSITORY_DIRECTORY Y REPOSITORY_DIRECTORY FILENAME Y FILENAME OBJECT_ID Y OBJECT_ID OBJECT_REVISION Y OBJECT_REVISION PARENT_CHANNEL_ID Y PARENT_CHANNEL_ID ROOT_CHANNEL_ID Y ROOT_CHANNEL_ID
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE TRANSNAME Y TRANSNAME STEPNAME Y STEPNAME STEP_COPY Y STEP_COPY LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS LOG_FIELD N LOG_FIELD
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE METRICS_DATE Y METRICS_DATE METRICS_CODE Y METRICS_CODE METRICS_DESCRIPTION Y METRICS_DESCRIPTION METRICS_SUBJECT Y METRICS_SUBJECT METRICS_TYPE Y METRICS_TYPE METRICS_VALUE Y METRICS_VALUE
0.0 0.0 10000 50 50 N Y 50000 Y N 1000 100 clusterTest 40000 2000 5000 Y Y 2010/10/26 15:38:33.201 - 2010/07/15 10:12:26.133 H4sIAAAAAAAAAAMAAAAAAAAAAAA= N Hadoop will pass data into this step based on the format defined in the Pentaho MapReduce entry used to run this mapper. For this example we assume the input will be: (hadoop-generated key, line from log file) 11 36 302 98 Ariel 8 N N 0 0 0 255 255 0 100 100 100 Y Parse the log line into identifiable fields (client ip, http request, day, month, etc). 196 378 242 39 Ariel 8 N N 0 0 0 255 255 0 100 100 100 Y Combine the month and year of the log line to create the output key (how the output will be ultimately grouped). 540 376 257 54 Ariel 8 N N 0 0 0 255 255 0 100 100 100 Y Define the output of this Transformation as the output key previously generated and the client_ip that made the request. These key-value pairs are passed to the reducer where they will be grouped by the output key and tallied up. 658 56 401 69 Ariel 8 N N 0 0 0 255 255 0 100 100 100 Y Parse Log Combine Year and Month into output key Y Hadoop Input Parse Log Y Combine Year and Month into output key Hadoop Output Y Combine Year and Month into output key Calculator Y 1 none outKey ADD year month String -1 -1 N 645 297 Y Hadoop Input HadoopEnterPlugin Y 1 none key String 0 0 value String 0 0 120 155 Y Hadoop Output HadoopExitPlugin Y 1 none outKey client_ip 824 155 Y Parse Log RegexEval Y 2 none value is_match Y Y N N N Y N N N N client_ip String -1 -1 none full_request_date Date dd/MMM/yyyy:HH:mm:ss Z -1 -1 none day Integer -1 -1 none month String -1 -1 none year String -1 -1 none hour Integer -1 -1 none minute Integer -1 -1 none second Integer -1 -1 none tz String -1 -1 none http_verb String -1 -1 none URI String -1 -1 none http_status_code Integer -1 -1 none bytes_returned Integer -1 -1 none referrer String -1 -1 - none user_agent String -1 -1 - Unknown none firefox_gecko_version String -1 -1 none firefox_gecko_version_major String -1 -1 none firefox_gecko_version_minor String -1 -1 none firefox_gecko_version_a String -1 -1 none firefox_gecko_version_b String -1 -1 none 299 297 Y N ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/weblogs-reducer.ktr ================================================ weblogs-reducer Normal 0 /
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID TRANSNAME Y TRANSNAME STATUS Y STATUS LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS STARTDATE Y STARTDATE ENDDATE Y ENDDATE LOGDATE Y LOGDATE DEPDATE Y DEPDATE REPLAYDATE Y REPLAYDATE LOG_FIELD Y LOG_FIELD EXECUTING_SERVER N EXECUTING_SERVER EXECUTING_USER N EXECUTING_USER CLIENT N CLIENT
ID_BATCH Y ID_BATCH SEQ_NR Y SEQ_NR LOGDATE Y LOGDATE TRANSNAME Y TRANSNAME STEPNAME Y STEPNAME STEP_COPY Y STEP_COPY LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS INPUT_BUFFER_ROWS Y INPUT_BUFFER_ROWS OUTPUT_BUFFER_ROWS Y OUTPUT_BUFFER_ROWS
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE LOGGING_OBJECT_TYPE Y LOGGING_OBJECT_TYPE OBJECT_NAME Y OBJECT_NAME OBJECT_COPY Y OBJECT_COPY REPOSITORY_DIRECTORY Y REPOSITORY_DIRECTORY FILENAME Y FILENAME OBJECT_ID Y OBJECT_ID OBJECT_REVISION Y OBJECT_REVISION PARENT_CHANNEL_ID Y PARENT_CHANNEL_ID ROOT_CHANNEL_ID Y ROOT_CHANNEL_ID
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE TRANSNAME Y TRANSNAME STEPNAME Y STEPNAME STEP_COPY Y STEP_COPY LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS LOG_FIELD N LOG_FIELD
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE METRICS_DATE Y METRICS_DATE METRICS_CODE Y METRICS_CODE METRICS_DESCRIPTION Y METRICS_DESCRIPTION METRICS_SUBJECT Y METRICS_SUBJECT METRICS_TYPE Y METRICS_TYPE METRICS_VALUE Y METRICS_VALUE
0.0 0.0 10000 50 50 N Y 50000 Y N 1000 100 clusterTest 40000 2000 5000 Y Y 2010/08/16 14:33:33.360 - 2010/07/16 09:23:42.406 H4sIAAAAAAAAAAMAAAAAAAAAAAA= N Hadoop will pass data into this step based on the input format defined in the Pentaho MapReduce entry used to run this reducer. We are expecting: (monthYear, client_ip) from the previously parsed log line. 10 45 332 113 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y This step performs the counting of HTTP methods. 341 266 178 49 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Data that flows out of this step will become the output of the reducer running this transformation. The output here is considered reduced (every occurrence of a key (yearMonth) will be tallied up and passed on). 475 80 377 69 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Hadoop Input Group by Y Group by Hadoop Output Y Group by GroupBy N 1 none N N %%java.io.tmpdir%% grp N N key sum value COUNT_ALL 409 181 Y Hadoop Input HadoopEnterPlugin Y 1 none key String 0 0 value String 0 0 169 181 Y Hadoop Output HadoopExitPlugin Y 1 none key sum 649 181 Y N ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/wordcount-mapper.ktr ================================================ wordcount-mapper Normal 0 /
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID TRANSNAME Y TRANSNAME STATUS Y STATUS LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS STARTDATE Y STARTDATE ENDDATE Y ENDDATE LOGDATE Y LOGDATE DEPDATE Y DEPDATE REPLAYDATE Y REPLAYDATE LOG_FIELD Y LOG_FIELD EXECUTING_SERVER N EXECUTING_SERVER EXECUTING_USER N EXECUTING_USER CLIENT N CLIENT
ID_BATCH Y ID_BATCH SEQ_NR Y SEQ_NR LOGDATE Y LOGDATE TRANSNAME Y TRANSNAME STEPNAME Y STEPNAME STEP_COPY Y STEP_COPY LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS INPUT_BUFFER_ROWS Y INPUT_BUFFER_ROWS OUTPUT_BUFFER_ROWS Y OUTPUT_BUFFER_ROWS
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE LOGGING_OBJECT_TYPE Y LOGGING_OBJECT_TYPE OBJECT_NAME Y OBJECT_NAME OBJECT_COPY Y OBJECT_COPY REPOSITORY_DIRECTORY Y REPOSITORY_DIRECTORY FILENAME Y FILENAME OBJECT_ID Y OBJECT_ID OBJECT_REVISION Y OBJECT_REVISION PARENT_CHANNEL_ID Y PARENT_CHANNEL_ID ROOT_CHANNEL_ID Y ROOT_CHANNEL_ID
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE TRANSNAME Y TRANSNAME STEPNAME Y STEPNAME STEP_COPY Y STEP_COPY LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS LOG_FIELD N LOG_FIELD
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE METRICS_DATE Y METRICS_DATE METRICS_CODE Y METRICS_CODE METRICS_DESCRIPTION Y METRICS_DESCRIPTION METRICS_SUBJECT Y METRICS_SUBJECT METRICS_TYPE Y METRICS_TYPE METRICS_VALUE Y METRICS_VALUE
0.0 0.0 10000 50 50 N Y 50000 Y N 1000 100 clusterTest 40000 2000 5000 Y Y 2010/10/25 08:45:28.015 - 2010/07/15 10:12:26.133 H4sIAAAAAAAAAAMAAAAAAAAAAAA= N Hadoop will pass data into this step based on the format defined in the Pentaho MapReduce entry used to run this mapper. For this example we assume the input will be: (hadoop-generated key, line from text file) 10 50 302 98 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y In this particular example, each "row" contains several words which we want to split so they may be individually counted. 37 335 286 54 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Each word that we split will have a default count value of 1. 481 342 207 39 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Define the output of this Transformation as the word we've split from the row and the count of 1 (it's only 1 word after all). These key-value pairs are passed to the reducer where they are grouped by the output key and tallied up. 562 67 384 69 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Split words to rows Add value Y Hadoop Input Split words to rows Y Add value Hadoop Output Y Add value Constant Y 1 none count Integer 1 -1 -1 N 490 266 Y Hadoop Input HadoopEnterPlugin Y 1 none key String 0 0 value String 0 0 100 170 Y Hadoop Output HadoopExitPlugin Y 1 none word count 685 170 Y Split words to rows SplitFieldToRows3 Y 1 none value word N Y N 295 266 Y N ================================================ FILE: assemblies/samples/src/main/resources/jobs/hadoop/wordcount-reducer.ktr ================================================ wordcount-reducer Normal 0 /
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID TRANSNAME Y TRANSNAME STATUS Y STATUS LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS STARTDATE Y STARTDATE ENDDATE Y ENDDATE LOGDATE Y LOGDATE DEPDATE Y DEPDATE REPLAYDATE Y REPLAYDATE LOG_FIELD Y LOG_FIELD EXECUTING_SERVER N EXECUTING_SERVER EXECUTING_USER N EXECUTING_USER CLIENT N CLIENT
ID_BATCH Y ID_BATCH SEQ_NR Y SEQ_NR LOGDATE Y LOGDATE TRANSNAME Y TRANSNAME STEPNAME Y STEPNAME STEP_COPY Y STEP_COPY LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS INPUT_BUFFER_ROWS Y INPUT_BUFFER_ROWS OUTPUT_BUFFER_ROWS Y OUTPUT_BUFFER_ROWS
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE LOGGING_OBJECT_TYPE Y LOGGING_OBJECT_TYPE OBJECT_NAME Y OBJECT_NAME OBJECT_COPY Y OBJECT_COPY REPOSITORY_DIRECTORY Y REPOSITORY_DIRECTORY FILENAME Y FILENAME OBJECT_ID Y OBJECT_ID OBJECT_REVISION Y OBJECT_REVISION PARENT_CHANNEL_ID Y PARENT_CHANNEL_ID ROOT_CHANNEL_ID Y ROOT_CHANNEL_ID
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE TRANSNAME Y TRANSNAME STEPNAME Y STEPNAME STEP_COPY Y STEP_COPY LINES_READ Y LINES_READ LINES_WRITTEN Y LINES_WRITTEN LINES_UPDATED Y LINES_UPDATED LINES_INPUT Y LINES_INPUT LINES_OUTPUT Y LINES_OUTPUT LINES_REJECTED Y LINES_REJECTED ERRORS Y ERRORS LOG_FIELD N LOG_FIELD
ID_BATCH Y ID_BATCH CHANNEL_ID Y CHANNEL_ID LOG_DATE Y LOG_DATE METRICS_DATE Y METRICS_DATE METRICS_CODE Y METRICS_CODE METRICS_DESCRIPTION Y METRICS_DESCRIPTION METRICS_SUBJECT Y METRICS_SUBJECT METRICS_TYPE Y METRICS_TYPE METRICS_VALUE Y METRICS_VALUE
0.0 0.0 10000 50 50 N Y 50000 Y N 1000 100 2010/08/12 12:40:09.759 - 2010/07/16 09:23:42.406 H4sIAAAAAAAAAAMAAAAAAAAAAAA= N This step serves as an injection point for our GenericTransReducer to add rows of data to the transformation. 30 58 255 54 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y This step performs the actual counting for WordCount. 218 222 188 39 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y A simple "Dummy" step used to listen to rows as they are generated by the transformation for the GenericTransReducer. Output here is considered reduced, as every occurance of a word will be tallied up and passed on. 518 26 312 84 Microsoft Sans Serif 8 N N 0 0 0 255 255 0 100 100 100 Y Hadoop Input Group by Y Group by Hadoop Output Y Group by GroupBy N 1 none N N %%java.io.tmpdir%% grp N N key sum value SUM 302 145 Y Hadoop Input HadoopEnterPlugin Y 1 none key String 0 2 value Integer 0 5 155 143 Y Hadoop Output HadoopExitPlugin Y 1 none key sum 655 146 Y N ================================================ FILE: authentication-mapper/api/pom.xml ================================================ 4.0.0 pentaho pentaho-authentication-mapper-parent 11.1.0.0-SNAPSHOT pentaho-authentication-mapper-api 11.1.0.0-SNAPSHOT jar maven-jar-plugin test-jar package test-jar ================================================ FILE: authentication-mapper/api/src/main/java/org/pentaho/authentication/mapper/api/AuthenticationMappingManager.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.authentication.mapper.api; /** * @author bryan */ public interface AuthenticationMappingManager { String RANKING_CONFIG = "service.ranking"; OutputType getMapping( Class inputType, InputType input, Class outputType ) throws MappingException; } ================================================ FILE: authentication-mapper/api/src/main/java/org/pentaho/authentication/mapper/api/AuthenticationMappingService.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.authentication.mapper.api; import java.util.Map; /** * @author bryan */ public interface AuthenticationMappingService { String getId(); Class getInputType(); Class getOutputType(); boolean accepts( Object input ); OutputType getMapping( InputType input, Map config ) throws MappingException; } ================================================ FILE: authentication-mapper/api/src/main/java/org/pentaho/authentication/mapper/api/MappingException.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.authentication.mapper.api; /** * Created by bryan on 3/18/16. */ public class MappingException extends Exception { public MappingException() { } public MappingException( String message ) { super( message ); } public MappingException( String message, Throwable cause ) { super( message, cause ); } public MappingException( Throwable cause ) { super( cause ); } @FunctionalInterface public interface Function { R apply( T t ) throws MappingException; } @FunctionalInterface public interface Supplier { R get() throws MappingException; } } ================================================ FILE: authentication-mapper/impl/pom.xml ================================================ 4.0.0 pentaho pentaho-authentication-mapper-parent 11.1.0.0-SNAPSHOT pentaho-authentication-mapper-impl 11.1.0.0-SNAPSHOT jar pentaho pentaho-authentication-mapper-api ${project.version} org.slf4j slf4j-api ${project.artifactId} maven-jar-plugin test-jar package test-jar ================================================ FILE: authentication-mapper/impl/src/main/java/org/pentaho/authentication/mapper/impl/AuthenticationMappingManagerImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.authentication.mapper.impl; import java.io.IOException; import java.util.Comparator; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.TreeSet; import org.pentaho.authentication.mapper.api.AuthenticationMappingManager; import org.pentaho.authentication.mapper.api.AuthenticationMappingService; import org.pentaho.authentication.mapper.api.MappingException; import com.google.common.collect.Multimaps; import com.google.common.collect.SortedSetMultimap; /** * @author bryan */ public class AuthenticationMappingManagerImpl implements AuthenticationMappingManager { private final SortedSetMultimap serviceMap = Multimaps.synchronizedSortedSetMultimap( Multimaps.newSortedSetMultimap( new HashMap<>(), TreeSet::new ) ); public AuthenticationMappingManagerImpl() throws IOException { } public AuthenticationMappingManagerImpl( AuthenticationMappingService service ) throws IOException { serviceMap.put( new TypePair( service ), new RankedAuthService( 50, service ) ); } @Override @SuppressWarnings( "unchecked" ) public OutputType getMapping( Class inputType, InputType input, Class outputType ) throws MappingException { AuthenticationMappingService service; synchronized ( serviceMap ) { service = serviceMap.get( new TypePair( inputType, outputType ) ).stream() .filter( ( rankedService ) -> rankedService.getService().accepts( input ) ) .findFirst() .map( RankedAuthService::getService ) .orElse( null ); } return service != null ? service.getMapping( input, null ) : null; } public void onMappingServiceAdded( AuthenticationMappingService service, Map config ) { if ( service == null ) { return; } int ranking = Optional.ofNullable( config.get( RANKING_CONFIG ) ) .map( String::valueOf ).map( Integer::parseInt ).orElse( 50 ); serviceMap.put( new TypePair( service ), new RankedAuthService( ranking, service ) ); } public void onMappingServiceRemoved( AuthenticationMappingService service ) { if ( service == null ) { return; } synchronized ( serviceMap ) { serviceMap.get( new TypePair( service ) ) .removeIf( rankedAuthService -> rankedAuthService.service.equals( service ) ); } } private static class TypePair { final Class input, output; TypePair( AuthenticationMappingService service ) { this( service.getInputType(), service.getOutputType() ); } TypePair( Class input, Class output ) { this.input = Objects.requireNonNull( input ); this.output = Objects.requireNonNull( output ); } @Override public boolean equals( Object o ) { if ( this == o ) { return true; } if ( !( o instanceof TypePair ) ) { return false; } TypePair typePair = (TypePair) o; return Objects.equals( input, typePair.input ) && Objects.equals( output, typePair.output ); } @Override public int hashCode() { return Objects.hash( input, output ); } @Override public String toString() { return input + " -> " + output; } } private static class RankedAuthService implements Comparable { final int rank; final AuthenticationMappingService service; RankedAuthService( int rank, AuthenticationMappingService service ) { this.rank = rank; this.service = service; } private String getId() { return getService().getId(); } int getRank() { return rank; } AuthenticationMappingService getService() { return service; } @Override public String toString() { return "(" + rank + ") " + service; } @Override public int compareTo( RankedAuthService o ) { return Comparator .comparingInt( RankedAuthService::getRank ).reversed() .thenComparing( RankedAuthService::getId ) .compare( this, o ); } } } ================================================ FILE: authentication-mapper/impl/src/test/java/org/pentaho/authentication/mapper/impl/AuthenticationMappingManagerImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.authentication.mapper.impl; import com.google.common.collect.ImmutableMap; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.authentication.mapper.api.AuthenticationMappingManager; import org.pentaho.authentication.mapper.api.AuthenticationMappingService; import org.pentaho.authentication.mapper.api.MappingException; import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; /** * @author nhudak */ @RunWith( MockitoJUnitRunner.class ) public class AuthenticationMappingManagerImplTest { @Rule public TemporaryFolder etc = new TemporaryFolder(); @Rule public ExpectedException exception = ExpectedException.none(); @Captor ArgumentCaptor> mapArgumentCaptor; private AuthenticationMappingManagerImpl manager; @Before public void setUp() throws Exception { manager = new AuthenticationMappingManagerImpl(); } @Test @SuppressWarnings( "unchecked" ) public void mappingService() throws Exception { // Add mock service TestService service = new TestService( "cluster_security_mapping_configuration" ); manager.onMappingServiceAdded( service, ImmutableMap.of( AuthenticationMappingManager.RANKING_CONFIG, 200 ) ); // Also add decoy services with lower (default) priority an invalid input TestService defaultService = new TestService( "default" ); manager.onMappingServiceAdded( defaultService, ImmutableMap.of() ); TestService unused = new TestService( "unused" ) { @Override public boolean accepts( Object input ) { return false; } }; manager.onMappingServiceAdded( unused, ImmutableMap.of( AuthenticationMappingManager.RANKING_CONFIG, 1000 ) ); // Service called if input/output match Map result = manager.getMapping( String.class, "map this", Map.class ); assertThat( result, allOf( hasEntry( "id", "cluster_security_mapping_configuration" ), hasEntry( "input", "map this" ) ) ); // Remove service, default will be used manager.onMappingServiceRemoved( service ); result = manager.getMapping( String.class, "use the default", Map.class ); assertThat( result, hasEntry( "id", "default" ) ); } @Test public void noMappingAvailable() throws Exception { assertThat( manager.getMapping( String.class, "some value", List.class ), nullValue() ); } class TestService implements AuthenticationMappingService { final String id; TestService( String id ) { this.id = id; } @Override public String getId() { return id; } @Override public Class getInputType() { return String.class; } @Override public Class getOutputType() { return Map.class; } @Override public boolean accepts( Object input ) { return true; } @Override public Map getMapping( String input, Map config ) throws MappingException { return ImmutableMap.of( "id", id, "input", input ); } } } ================================================ FILE: authentication-mapper/impl/src/test/resources/invalid_mapping.json ================================================ { invalid : # } ================================================ FILE: authentication-mapper/impl/src/test/resources/mapping.json ================================================ { "cluster_security_mapping_configuration": { "default": { "pentaho_server_credentials": { "kerberos": { "principal": "oozie@PENTAHOQA.COM", "keytab": "/home/bryan/platform-codebase/kerb-integration-2/7.0-SNAPSHOT-218/data-integration-server/oozie.keytab" } }, "user_impersonation_mapping": { "type": "simple_mapping" } } } } ================================================ FILE: authentication-mapper/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-parent 11.1.0.0-SNAPSHOT pentaho-authentication-mapper-parent 11.1.0.0-SNAPSHOT pom Matt Campbell mcampbell@pentaho.com developer Nick Hudak nhudak@pentaho.com developer Bryan Rosander brosander@pentaho.com developer api impl com.google.guava guava ${guava.version} org.hamcrest hamcrest-all ${hamcrest.version} test org.mockito mockito-core ${mockito.version} test junit junit ${junit.version} test ================================================ FILE: dev-doc/multishim/MultiShimHBase.sd ================================================ kettlePlugin:KP "Kettle Plugin" namedClusterServiceLocator:NCSL "Named Cluster Service" clusterInitializer:CI "Cluster Initializer" hadoopConfigurationBootstrap:HCB "Hadoop Configuration Bootstrap" hbaseServiceFactory:HSF "HBase Service Factory" kettlePlugin:namedClusterServiceLocator.getService(cdh55unsec, HBaseService.class) namedClusterServiceLocator:clusterInitializer.initialize(cdh55unsec) clusterInitializer:hadoopConfigurationBootstrap.getProvider(cdh55) namedClusterServiceLocator:hbaseServiceFactory.canHandle(cdh55unsec) namedClusterServiceLocator:hbaseServiceFactory.create(cdh55unsec) ================================================ FILE: dev-doc/multishim/README.md ================================================ This outlines the anticipated changes needed to support multiple shims. Standard services (accessed via steps/job entries) -------------------------------------------------- Currently, the service location has unused hooks for the parts we think will be needed. The current flow is as follows: ![Single shim hbase sequence diagram](SingleShimHBase.png) The adjusted flow is mostly the same: ![Multi shim hbase sequence diagram](MultiShimHBase.png) First we need to add an attribute to the named cluster specifying the shim. The getProvider() call to HadoopConfigurationBootstrap is what initializes a shim. We need to change it to account for the shim selected in the named cluster so that it can initialize the right shim (or no-op if the shim has already been initialized). The other major change isn't visible in the sequence diagram. The HBaseServiceFactory's canHandle method needs to account for the selected shim in the named cluster (falling back to the default or "active" configuration if it is null) so ```java @Override public boolean canHandle( NamedCluster namedCluster ) { return isActiveConfiguration; } ``` becomes ```java @Override public boolean canHandle( NamedCluster namedCluster ) { String shim = namedCluster.getShim(); if ( shim == null ) { return isActiveConfiguration; } return shim.equals( hadoopConfiguration.getIdentifier() ); } ``` Limited context services (vfs, jdbc) ------------------------------------ We need a reference to the active MetaStore and the named cluster name in order to load a NamedCluster. As far as locating the active metastore, [metastore locator plugin](https://github.com/pentaho/pentaho-kettle/tree/master/plugins/metastore-locator) will allow us to determine the right one. We will probably need to flush out several more scenarios and implementations though. We will somehow need to embed the named cluster name in the URL both to determine which shim to use as well as to associate other named cluster settings with VFS and JDBC connections once we add them. Tools ----- [sdedit4.0.1](https://sourceforge.net/projects/sdedit/files/sdedit/4.0/) was used to transform the .sd files into .pngs ``` java -jar ~/Downloads/sdedit-4.01.jar -o SingleShimHBase.png -t png SingleShimHBase.sd java -jar ~/Downloads/sdedit-4.01.jar -o MultiShimHBase.png -t png MultiShimHBase.sd ``` ================================================ FILE: dev-doc/multishim/SingleShimHBase.sd ================================================ kettlePlugin:KP "Kettle Plugin" namedClusterServiceLocator:NCSL "Named Cluster Service" clusterInitializer:CI "Cluster Initializer" hadoopConfigurationBootstrap:HCB "Hadoop Configuration Bootstrap" hbaseServiceFactory:HSF "HBase Service Factory" kettlePlugin:namedClusterServiceLocator.getService(cdh55unsec, HBaseService.class) namedClusterServiceLocator:clusterInitializer.initialize(cdh55unsec) clusterInitializer:hadoopConfigurationBootstrap.getProvider() namedClusterServiceLocator:hbaseServiceFactory.canHandle(cdh55unsec) namedClusterServiceLocator:hbaseServiceFactory.create(cdh55unsec) ================================================ FILE: dev-doc/shim-bridge-classloading.graphml ================================================ Load Class Attempt to load from bundle wiring Class already loaded? Success? Profit! Attempt to load from shim classloader Success? Don't profit Class was from System classloader? Attempt to load from big data plugin classloader Success? Attempt to load from System classloader Success? No Yes Yes Yes No Yes No Yes No Yes No No ================================================ FILE: dev-doc/shim-bridging-classloading.md ================================================ As a step towards a more flexible architecture less constrained by the shims, we have been refactoring the steps and job entries to use higher level services. They follow the following pattern: api --- A higher level api that exposes big data capabilities as services with locators that take a NamedCluster as their argument. These should only rely on kettle-core, the metastore, and other api bundles. They will typically be made up of a [a service interface](https://github.com/pentaho/big-data-plugin/blob/master/api/pig/src/main/java/org/pentaho/bigdata/api/pig/PigService.java). The Service (and any supporting classes for arguments and/or return types) is responsible for performing operations against the cluster. impl/shim --------- An initial implementation of the api that delegates to the shim. These are OSGi bundles that bridge over to the legacy plugin as well as the shim. A [factory loader](https://github.com/pentaho/big-data-plugin/blob/master/impl/shim/pig/src/main/java/org/pentaho/big/data/impl/shim/pig/PigServiceFactoryLoader.java), a [factory](https://github.com/pentaho/big-data-plugin/blob/master/impl/shim/pig/src/main/java/org/pentaho/big/data/impl/shim/pig/PigServiceFactoryImpl.java), and a [service](https://github.com/pentaho/big-data-plugin/blob/master/impl/shim/pig/src/main/java/org/pentaho/big/data/impl/shim/pig/PigServiceImpl.java) need to be implemented. The factory loader implements the [HadoopConfigurationListener](https://github.com/pentaho/big-data-plugin/blob/master/legacy/src/main/java/org/pentaho/di/core/hadoop/HadoopConfigurationListener.java) interface. Its job is [to instantiate a new factory and register it with the service locator](https://github.com/pentaho/big-data-plugin/blob/master/impl/shim/pig/src/main/java/org/pentaho/big/data/impl/shim/pig/PigServiceFactoryLoader.java) for each HadoopConfiguration that opens and unregister the factory when the HadoopConfiguration is closed. The factory has two parent classloaders, the OSGi Bundle Context Classloader and the Shim's classloader. This way it is able to implement the Factory interface and the Service it instantiates can use the shim classes to do the work. ![Logic flow chart](shim-bridge-classloading.png) The Service interface is able to reference anything in the shim to do its job but sticking with the hadoop shim api classes is preferable as they are less likely to change from shim to shim. [Example blueprint](https://github.com/pentaho/big-data-plugin/blob/master/impl/shim/pig/src/main/resources/OSGI-INF/blueprint/blueprint.xml) kettle-plugins -------------- The step and job entry logic and dialog code. These are able to depend on Kettle artifacts as well as api artifacts (above) but should NOT depend on the legacy plugin, the hadoop api, or any shim artifacts to do their job. They are OSGi bundles that provide Kettle plugins via blueprint. They can use the [NamedClusterServiceLocator](https://github.com/pentaho/big-data-plugin/blob/master/api/clusterServiceLocator/src/main/java/org/pentaho/big/data/api/cluster/service/locator/NamedClusterServiceLocator.java) interface to get services for a given NamedCluster. [Example blueprint](https://github.com/pentaho/big-data-plugin/blob/master/kettle-plugins/pig/src/main/resources/OSGI-INF/blueprint/blueprint.xml) ================================================ FILE: dev-doc/shimprovements.md ================================================ Big Data Plugin in 6.1 ====================== OSGi ---- As of 6.1, all the main Hadoop functionality (HDFS, MapReduce, PMR, HBase, Pig, Sqoop, Oozie, YARN) is accessible via OSGi services. HDFS, Pig, and YARN were moved to OSGi services in the 6.0 version of the software. For 6.1, MapReduce, PMR, HBase, Oozie, and Sqoop were moved to OSGi services. This doesn't introduce a new paradigm, it just completes the migration of the functionality to OSGi. This won't impact the user experience. Shims, configuration files are in the same place and all saved jobs and transformations will continue to work. However, the steps and services themselves are now in OSGi. This change allows any OSGi plugin to leverage OSGi services in the future; these services are no longer limited to the Big Data Plugin. It also paves the way for the eventual addition of more advanced authentication/authorization as well as multi-shim support. JDBC ---- Hive and Impala Drivers have not been migrated to OSGi, and are still part of the Big Data Plugin. Files Being Moved/Modified -------------------------- The individual Kettle Plugins from the old Big Data Plugin have been split into an API that exposes shim capability as a series of OSGi services, an implementation using the shim, and the Kettle Plugin that consumes the API. The shims themselves and the configuration of the Big Data Plugin have not changed for this release. Affected Products ----------------- This affects all parts of the stack capable of using the Kettle Big Data Plugin steps and job entries. License Impact -------------- There should be no change in licensing driven by these changes. Kerberos support and the YARN service and job entries remain EE features while the rest is still open. Deployment Impact ----------------- Updates to the legacy Big Data Plugin should be the same as before. Either drop a new big-data-plugin folder into the plugins directory and configure it, or unzip a new shim in the hadoop-configurations directory. Updates to the OSGi bundles currently can be accomplished most easily by building the same version as the release and overwriting the bundle in the Karaf system repository. After this, stop the tool, remove the Karaf cache, and restart the tool. The bundle updating process will be improved after 6.1 and we will be aiming for a much easier deployment scenario. ================================================ FILE: impl/cluster/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-impl 11.1.0.0-SNAPSHOT pentaho-big-data-impl-cluster 11.1.0.0-SNAPSHOT jar Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com site 3.12.4 5.17.0 org.pentaho shim-api ${pentaho-hadoop-shims.version} pentaho metastore ${metastore.version} provided pentaho-kettle kettle-core ${pdi.version} provided commons-beanutils commons-beanutils ${commons-beanutils.version} junit junit ${dependency.junit.revision} test org.mockito mockito-core ${mockito-core.version} test org.mockito mockito-inline ${mockito-inline.version} test com.google.code.bean-matchers bean-matchers ${dependency.bean-matchers.revision} test org.slf4j slf4j-api org.osgi osgi.core org.osgi osgi.cmpn pentaho-kettle kettle-engine ${pdi.version} provided pentaho pentaho-big-data-legacy ${project.version} provided pentaho-kettle kettle-core ${pdi.version} tests test ================================================ FILE: impl/cluster/src/it/resources/core-site.xml ================================================ fs.defaultFS hdfs://CDH61Secure fs.trash.interval 1 io.compression.codecs org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec,org.apache.hadoop.io.compress.Lz4Codec hadoop.security.authentication kerberos hadoop.security.authorization true hadoop.rpc.protection privacy hadoop.security.auth_to_local DEFAULT hadoop.proxyuser.oozie.hosts * hadoop.proxyuser.oozie.groups * hadoop.proxyuser.flume.hosts * hadoop.proxyuser.flume.groups * hadoop.proxyuser.HTTP.hosts * hadoop.proxyuser.HTTP.groups * hadoop.proxyuser.hive.hosts * hadoop.proxyuser.hive.groups * hadoop.proxyuser.hue.hosts * hadoop.proxyuser.hue.groups * hadoop.proxyuser.httpfs.hosts * hadoop.proxyuser.httpfs.groups * hadoop.proxyuser.hdfs.groups * hadoop.proxyuser.hdfs.hosts * hadoop.proxyuser.yarn.hosts * hadoop.proxyuser.yarn.groups * hadoop.security.group.mapping org.apache.hadoop.security.ShellBasedUnixGroupsMapping hadoop.security.instrumentation.requires.admin false net.topology.script.file.name /etc/hadoop/conf.cloudera.yarn/topology.py io.file.buffer.size 65536 hadoop.ssl.enabled true hadoop.ssl.require.client.cert false true hadoop.ssl.keystores.factory.class org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory true hadoop.ssl.server.conf ssl-server.xml true hadoop.ssl.client.conf ssl-client.xml true hadoop.security.key.provider.path kms://https@svqxobcdh61secn1.pentaho.net:16000/kms ================================================ FILE: impl/cluster/src/main/java/org/pentaho/big/data/impl/cluster/NamedClusterImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.StringWriter; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.xml.XMLConstants; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.apache.commons.beanutils.BeanMap; import org.apache.commons.beanutils.BeanUtils; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.provider.url.UrlFileName; import org.apache.commons.vfs2.provider.url.UrlFileNameParser; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.osgi.api.NamedClusterOsgi; import org.pentaho.di.core.osgi.api.NamedClusterSiteFile; import org.pentaho.di.core.osgi.impl.NamedClusterSiteFileImpl; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBase; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.security.Base64TwoWayPasswordEncoder; import org.pentaho.metastore.api.security.ITwoWayPasswordEncoder; import org.pentaho.metastore.persist.MetaStoreAttribute; import org.pentaho.metastore.persist.MetaStoreElementType; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import com.google.common.annotations.VisibleForTesting; @MetaStoreElementType( name = "NamedCluster", description = "A NamedCluster" ) public class NamedClusterImpl implements NamedCluster, NamedClusterOsgi { public static final String HDFS_SCHEME = "hdfs"; public static final String MAPRFS_SCHEME = "maprfs"; public static final String WASB_SCHEME = "wasb"; public static final String NC_SCHEME = "hc"; public static final String ID = "id"; public static final String CHILD = "child"; public static final String CHILDREN = "children"; public static final String STRING = "string"; public static final String VALUE = "value"; public static final String UPPER_STRING = "String"; private static final Logger LOGGER = LogManager.getLogger( NamedClusterImpl.class ); private VariableSpace variables = new Variables(); @MetaStoreAttribute private String name; @MetaStoreAttribute private String shimIdentifier; @MetaStoreAttribute private String storageScheme; @MetaStoreAttribute private String hdfsHost; @MetaStoreAttribute private String hdfsPort; @MetaStoreAttribute private String hdfsUsername; @MetaStoreAttribute private String hdfsPassword; //encrypted @MetaStoreAttribute private String jobTrackerHost; @MetaStoreAttribute private String jobTrackerPort; @MetaStoreAttribute private String zooKeeperHost; @MetaStoreAttribute private String zooKeeperPort; @MetaStoreAttribute private String oozieUrl; @MetaStoreAttribute @Deprecated private boolean mapr; @MetaStoreAttribute private String gatewayUrl; @MetaStoreAttribute private String gatewayUsername; @MetaStoreAttribute private String gatewayPassword; //encrypted @MetaStoreAttribute private boolean useGateway; @MetaStoreAttribute private String kafkaBootstrapServers; @MetaStoreAttribute private long lastModifiedDate = System.currentTimeMillis(); @MetaStoreAttribute private List siteFiles; private ITwoWayPasswordEncoder passwordEncoder = new Base64TwoWayPasswordEncoder(); private static String hadoopActiveConfiguration = null; public NamedClusterImpl() { siteFiles = new ArrayList<>(); initializeVariablesFrom( null ); } public NamedClusterImpl( NamedCluster namedCluster ) { this(); replaceMeta( namedCluster ); } public void setName( String name ) { this.name = name; } public String getName() { return name; } public String getShimIdentifier() { return this.shimIdentifier; } public void setShimIdentifier( String shimIdentifier ) { this.shimIdentifier = shimIdentifier; } public String getStorageScheme() { if ( storageScheme == null ) { if ( isMapr() ) { storageScheme = MAPRFS_SCHEME; } else { storageScheme = HDFS_SCHEME; } } return storageScheme; } public void setStorageScheme( String storageScheme ) { this.storageScheme = storageScheme; } public void copyVariablesFrom( VariableSpace space ) { variables.copyVariablesFrom( space ); } public String environmentSubstitute( String aString ) { return variables.environmentSubstitute( aString ); } public String[] environmentSubstitute( String[] aString ) { return variables.environmentSubstitute( aString ); } public String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ) throws KettleValueException { return variables.fieldSubstitute( aString, rowMeta, rowData ); } public VariableSpace getParentVariableSpace() { return variables.getParentVariableSpace(); } public void setParentVariableSpace( VariableSpace parent ) { variables.setParentVariableSpace( parent ); } public String getVariable( String variableName, String defaultValue ) { return variables.getVariable( variableName, defaultValue ); } public String getVariable( String variableName ) { return variables.getVariable( variableName ); } public boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ) { if ( !Utils.isEmpty( variableName ) ) { String value = environmentSubstitute( variableName ); if ( !Utils.isEmpty( value ) ) { return ValueMetaBase.convertStringToBoolean( value ); } } return defaultValue; } public void initializeVariablesFrom( VariableSpace parent ) { variables.initializeVariablesFrom( parent ); } public String[] listVariables() { return variables.listVariables(); } public void setVariable( String variableName, String variableValue ) { variables.setVariable( variableName, variableValue ); } public void shareVariablesWith( VariableSpace space ) { variables = space; } public void injectVariables( Map prop ) { variables.injectVariables( prop ); } public void replaceMeta( NamedCluster nc ) { this.setName( nc.getName() ); this.setShimIdentifier( nc.getShimIdentifier() ); this.setStorageScheme( nc.getStorageScheme() ); this.setHdfsHost( nc.getHdfsHost() ); this.setHdfsPort( nc.getHdfsPort() ); this.setHdfsUsername( nc.getHdfsUsername() ); this.setHdfsPassword( nc.getHdfsPassword() ); this.setJobTrackerHost( nc.getJobTrackerHost() ); this.setJobTrackerPort( nc.getJobTrackerPort() ); this.setZooKeeperHost( nc.getZooKeeperHost() ); this.setZooKeeperPort( nc.getZooKeeperPort() ); this.setOozieUrl( nc.getOozieUrl() ); this.setMapr( nc.isMapr() ); this.setGatewayUrl( nc.getGatewayUrl() ); this.setGatewayUsername( nc.getGatewayUsername() ); this.setGatewayPassword( nc.getGatewayPassword() ); this.setUseGateway( nc.isUseGateway() ); this.setKafkaBootstrapServers( nc.getKafkaBootstrapServers() ); this.lastModifiedDate = System.currentTimeMillis(); for ( NamedClusterSiteFile ncsf : nc.getSiteFiles() ) { this.siteFiles.add( ncsf.copy() ); } } public NamedClusterImpl clone() { return new NamedClusterImpl( this ); } @Override public String processURLsubstitution( String incomingURL, IMetaStore metastore, VariableSpace variableSpace ) { if ( isUseGateway() ) { if ( incomingURL.startsWith( NC_SCHEME ) ) { return incomingURL; } StringBuilder builder = new StringBuilder( NC_SCHEME + "://" ); builder.append( getName() ); builder.append( incomingURL.startsWith( "/" ) ? incomingURL : "/" + incomingURL ); return builder.toString(); } else if ( isMapr() ) { String url = processURLsubstitution( incomingURL, MAPRFS_SCHEME, metastore, variableSpace ); if ( url != null && !url.startsWith( MAPRFS_SCHEME ) ) { url = MAPRFS_SCHEME + "://" + url; } return url; } else { return processURLsubstitution( incomingURL, getStorageScheme(), metastore, variableSpace ); } } private String processURLsubstitution( String incomingURL, String hdfsScheme, IMetaStore metastore, VariableSpace variableSpace ) { String outgoingURL = null; String clusterURL = null; if ( !hdfsScheme.equals( MAPRFS_SCHEME ) ) { clusterURL = generateURL( hdfsScheme, metastore, variableSpace ); } try { if ( clusterURL == null || isHdfsHostEmpty( variableSpace ) ) { outgoingURL = incomingURL; } else if ( incomingURL.equals( "/" ) ) { outgoingURL = clusterURL; } else if ( clusterURL != null ) { String noVariablesURL = incomingURL.replaceAll( "[${}]", "/" ); String fullyQualifiedIncomingURL = incomingURL; if ( !incomingURL.startsWith( hdfsScheme ) && !incomingURL.startsWith( NC_SCHEME ) ) { fullyQualifiedIncomingURL = clusterURL + incomingURL; noVariablesURL = clusterURL + incomingURL.replaceAll( "[${}]", "/" ); } UrlFileNameParser parser = new UrlFileNameParser(); FileName fileName = parser.parseUri( null, null, noVariablesURL ); String root = fileName.getRootURI(); String path = fullyQualifiedIncomingURL.substring( root.length() - 1 ); StringBuilder buffer = new StringBuilder(); // Check for a special case where a fully qualified path (one that has the protocol in it). // This can only happen through variable replacement. See BACKLOG-15849. When this scenario // occurs we do not prepend the cluster uri to the url. boolean prependCluster = true; if ( variableSpace != null ) { String filePath = variableSpace.environmentSubstitute( path ); StringBuilder pattern = new StringBuilder(); pattern.append( "^(" ).append( HDFS_SCHEME ).append( "|" ).append( WASB_SCHEME ).append( "|" ).append( MAPRFS_SCHEME ).append( "|" ).append( NC_SCHEME ).append( "):\\/\\/" ); Pattern r = Pattern.compile( pattern.toString() ); Matcher m = r.matcher( filePath ); prependCluster = !m.find(); } if ( prependCluster ) { buffer.append( clusterURL ); } buffer.append( path ); outgoingURL = buffer.toString(); } } catch ( Exception e ) { outgoingURL = null; } return outgoingURL; } @VisibleForTesting boolean isHdfsHostEmpty( VariableSpace variableSpace ) { String hostNameParsed = getHostNameParsed( variableSpace ); return hostNameParsed == null || hostNameParsed.trim().isEmpty(); } public String getHostNameParsed( VariableSpace variableSpace ) { if ( StringUtil.isVariable( hdfsHost ) ) { if ( variableSpace == null ) { return null; } return variableSpace.getVariable( StringUtil.getVariableName( getHdfsHost() ) ); } return hdfsHost != null ? hdfsHost.trim() : null; } /** * This method generates the URL from the specific NamedCluster using the specified scheme. * * @param scheme the name of the scheme to use to create the URL * @return the generated URL from the specific NamedCluster or null if an error occurs */ @VisibleForTesting String generateURL( String scheme, IMetaStore metastore, VariableSpace variableSpace ) { String clusterURL = null; try { if ( !Utils.isEmpty( scheme ) ) { String ncHostname = getHdfsHost() != null ? getHdfsHost() : ""; String ncPort = getHdfsPort() != null ? getHdfsPort() : ""; String ncUsername = getHdfsUsername() != null ? getHdfsUsername() : ""; String ncPassword = getHdfsPassword() != null ? decodePassword( getHdfsPassword() ) : ""; if ( variableSpace != null ) { variableSpace.initializeVariablesFrom( getParentVariableSpace() ); if ( StringUtil.isVariable( scheme ) ) { scheme = variableSpace.getVariable( StringUtil.getVariableName( scheme ) ) != null ? variableSpace .environmentSubstitute( scheme ) : null; } if ( StringUtil.isVariable( ncHostname ) ) { ncHostname = variableSpace.getVariable( StringUtil.getVariableName( ncHostname ) ) != null ? variableSpace .environmentSubstitute( ncHostname ) : null; } if ( StringUtil.isVariable( ncPort ) ) { ncPort = variableSpace.getVariable( StringUtil.getVariableName( ncPort ) ) != null ? variableSpace .environmentSubstitute( ncPort ) : null; } if ( StringUtil.isVariable( ncUsername ) ) { ncUsername = variableSpace.getVariable( StringUtil.getVariableName( ncUsername ) ) != null ? variableSpace .environmentSubstitute( ncUsername ) : null; } if ( StringUtil.isVariable( ncPassword ) ) { ncPassword = variableSpace.getVariable( StringUtil.getVariableName( ncPassword ) ) != null ? variableSpace .environmentSubstitute( ncPassword ) : null; } } ncHostname = ncHostname != null ? ncHostname.trim() : ""; if ( ncPort == null ) { ncPort = "-1"; } else { ncPort = ncPort.trim(); if ( Utils.isEmpty( ncPort ) ) { ncPort = "-1"; } } ncUsername = ncUsername != null ? ncUsername.trim() : ""; ncPassword = ncPassword != null ? ncPassword.trim() : ""; UrlFileName file = new UrlFileName( scheme, ncHostname, Integer.parseInt( ncPort ), -1, ncUsername, ncPassword, null, null, null ); clusterURL = file.getURI(); if ( clusterURL.endsWith( "/" ) ) { clusterURL = clusterURL.substring( 0, clusterURL.lastIndexOf( '/' ) ); } } } catch ( Exception e ) { clusterURL = null; } return clusterURL; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals( Object obj ) { if ( this == obj ) { return true; } if ( obj == null ) { return false; } if ( getClass() != obj.getClass() ) { return false; } NamedCluster other = (NamedCluster) obj; if ( name == null ) { if ( other.getName() != null ) { return false; } } else if ( !name.equals( other.getName() ) ) { return false; } return true; } public String getHdfsHost() { return hdfsHost; } public void setHdfsHost( String hdfsHost ) { this.hdfsHost = hdfsHost; } public String getHdfsPort() { return hdfsPort; } public void setHdfsPort( String hdfsPort ) { this.hdfsPort = hdfsPort; } public String getHdfsUsername() { return hdfsUsername; } public void setHdfsUsername( String hdfsUsername ) { this.hdfsUsername = hdfsUsername; } public String getHdfsPassword() { return hdfsPassword; } public void setHdfsPassword( String hdfsPassword ) { this.hdfsPassword = hdfsPassword; } public String getJobTrackerHost() { return jobTrackerHost; } public void setJobTrackerHost( String jobTrackerHost ) { this.jobTrackerHost = jobTrackerHost; } public String getJobTrackerPort() { return jobTrackerPort; } public void setJobTrackerPort( String jobTrackerPort ) { this.jobTrackerPort = jobTrackerPort; } public String getZooKeeperHost() { return zooKeeperHost; } public void setZooKeeperHost( String zooKeeperHost ) { this.zooKeeperHost = zooKeeperHost; } public String getZooKeeperPort() { return zooKeeperPort; } public void setZooKeeperPort( String zooKeeperPort ) { this.zooKeeperPort = zooKeeperPort; } public String getOozieUrl() { return oozieUrl; } public void setOozieUrl( String oozieUrl ) { this.oozieUrl = oozieUrl; } public long getLastModifiedDate() { return lastModifiedDate; } public void setLastModifiedDate( long lastModifiedDate ) { this.lastModifiedDate = lastModifiedDate; } public void setMapr( boolean mapr ) { if ( mapr ) { setStorageScheme( MAPRFS_SCHEME ); } } @Deprecated public boolean isMapr() { if ( storageScheme == null ) { return mapr; } else { return storageScheme.equals( MAPRFS_SCHEME ); } } @Override public String toString() { return "Named cluster: " + getName(); } public String toXmlForEmbed( String rootTag ) { BeanMap m = new BeanMap( this ); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = null; Document doc = null; try { builder = dbf.newDocumentBuilder(); doc = builder.newDocument(); Element rootNode = doc.createElement( rootTag ); doc.appendChild( rootNode ); Iterator> i = m.entryIterator(); while ( i.hasNext() ) { Map.Entry entry = i.next(); String elementName = (String) entry.getKey(); if ( !"class".equals( elementName ) && !"parentVariableSpace".equals( elementName ) ) { String value = ""; String type = UPPER_STRING; Element children = null; Object o = entry.getValue(); if ( o != null ) { if ( o instanceof ArrayList ) { value = NamedClusterSiteFileImpl.class.getName(); children = createSiteFileChildren( doc, ( (ArrayList) o ) ); } else if ( o instanceof Long ) { value = Long.toString( (Long) o ); } else if ( o instanceof Boolean ) { value = Boolean.toString( (Boolean) o ); } else { try { value = (String) entry.getValue(); if ( elementName.toLowerCase().contains( "password" ) ) { value = encodePassword( value ); } } catch ( Exception e ) { LOGGER.error( "Error encoding password", e ); } } } rootNode.appendChild( createChildElement( doc, elementName, type, value, children ) ); } } DOMSource domSource = new DOMSource( doc ); StringWriter writer = new StringWriter(); StreamResult result = new StreamResult( writer ); TransformerFactory tf = TransformerFactory.newInstance(); tf.setFeature( XMLConstants.FEATURE_SECURE_PROCESSING, true ); tf.setAttribute( XMLConstants.ACCESS_EXTERNAL_DTD, "" ); tf.setAttribute( XMLConstants.ACCESS_EXTERNAL_STYLESHEET, "" ); Transformer transformer = tf.newTransformer(); transformer.transform( domSource, result ); String s = writer.toString(); // Remove header from the XML s = s.substring( s.indexOf( '>' ) + 1 ); return s; } catch ( ParserConfigurationException | TransformerException e1 ) { LOGGER.error( "Could not parse embedded cluster xml", e1 ); return ""; } } private Element createSiteFileChildren( Document doc, ArrayList siteFiles ) { Element children = doc.createElement( CHILDREN ); int index = 0; for ( NamedClusterSiteFile sitefile : siteFiles ) { Element siteChildren = doc.createElement( CHILDREN ); siteChildren .appendChild( createChildElement( doc, "siteFileContents", UPPER_STRING, sitefile.getSiteFileContents(), null ) ); siteChildren .appendChild( createChildElement( doc, "siteFileName", UPPER_STRING, sitefile.getSiteFileName(), null ) ); children.appendChild( createChildElement( doc, String.valueOf( index++ ), UPPER_STRING, "", siteChildren ) ); } return children; } public NamedCluster fromXmlForEmbed( Node node ) { NamedClusterImpl returnCluster = this.clone(); List fields = XMLHandler.getNodes( node, CHILD ); for ( Node field: fields ) { String fieldName = XMLHandler.getTagValue( field, ID ); Object fieldValue = null; if ( "siteFiles".equals( fieldName ) ) { fieldValue = unmarshallSiteFileNode( field ); } else { String stringValue = XMLHandler.getTagValue( field, VALUE ); if ( fieldName.toLowerCase().contains( "password" ) ) { stringValue = decodePassword( stringValue ); } fieldValue = stringValue; } try { BeanUtils.setProperty( returnCluster, fieldName, fieldValue ); } catch ( IllegalAccessException | InvocationTargetException e ) { LOGGER.error( "Could not set field " + fieldName + " in NamedCluster", e ); } } return returnCluster; } private Object unmarshallSiteFileNode( Node field ) { ArrayList namedClusterSiteFiles = new ArrayList<>(); Node siteFileWrapper = XMLHandler.getSubNode( field, CHILDREN ); if ( siteFileWrapper != null ) { unmarshallSiteFiles( namedClusterSiteFiles, XMLHandler.getNodes( siteFileWrapper, CHILD ) ); } return namedClusterSiteFiles; } private void unmarshallSiteFiles( ArrayList namedClusterSiteFiles, List siteFileNodes ) { for ( Node siteFile : siteFileNodes ) { namedClusterSiteFiles.add( unmarshallSiteFields( XMLHandler.getNodes( XMLHandler.getSubNode( siteFile, CHILDREN ), CHILD ) ) ); } } private NamedClusterSiteFileImpl unmarshallSiteFields( List siteFields ) { NamedClusterSiteFileImpl namedClusterSiteFile = new NamedClusterSiteFileImpl(); for ( Node siteField : siteFields ) { String id = XMLHandler.getTagValue( siteField, ID ); if ( id != null && !id.isEmpty() ) { try { BeanUtils.setProperty( namedClusterSiteFile, id, XMLHandler.getTagValue( siteField, VALUE ) ); } catch ( IllegalAccessException | InvocationTargetException e ) { LOGGER.error( "Could not set field " + id + " in NamedClusterSiteFile", e ); } } } return namedClusterSiteFile; } private Node createChildElement( Document doc, String elementName, String elementType, String elementValue, Element children ) { Element childNode = doc.createElement( CHILD ); childNode.appendChild( createTextNode( doc, ID, elementName ) ); childNode.appendChild( createTextNode( doc, VALUE, elementValue ) ); childNode.appendChild( createTextNode( doc, "type", elementType ) ); if ( children != null ) { childNode.appendChild( children ); } return childNode; } private Node createTextNode( Document doc, String tagName, String value ) { Node node = doc.createElement( tagName ); node.appendChild( doc.createTextNode( value ) ); return node; } @Override public String getGatewayUrl() { return gatewayUrl; } @Override public void setGatewayUrl( String gatewayUrl ) { this.gatewayUrl = gatewayUrl; } @Override public String getGatewayUsername() { return gatewayUsername; } @Override public void setGatewayUsername( String gatewayUsername ) { this.gatewayUsername = gatewayUsername; } @Override public String getGatewayPassword() { return decodePassword( gatewayPassword ); } @Override public void setGatewayPassword( String gatewayPassword ) { this.gatewayPassword = encodePassword( gatewayPassword ); } @Override public boolean isUseGateway() { return useGateway; } @Override public void setUseGateway( boolean useGateway ) { this.useGateway = useGateway; } @Override public String getKafkaBootstrapServers() { return kafkaBootstrapServers; } @Override public void setKafkaBootstrapServers( String kafkaBootstrapServers ) { this.kafkaBootstrapServers = kafkaBootstrapServers; } @Override public NamedClusterOsgi nonOsgiFromXmlForEmbed( Node node ) { return (NamedClusterOsgi) fromXmlForEmbed( node ); } public String decodePassword( String password ) { if ( password == null || password.startsWith( Encr.PASSWORD_ENCRYPTED_PREFIX ) ) { return Encr.decryptPasswordOptionallyEncrypted( password ); } else { //Password is likely stored encrypted with legacy Base64TwoWayPasswordEncoder if ( !StringUtil.isVariable( password ) ) { return passwordEncoder.decode( password ); } } return password; } public String encodePassword( String password ) { return Encr.encryptPasswordIfNotUsingVariables( password ); } @Override public List getSiteFiles() { return siteFiles; } @Override public void setSiteFiles( List siteFiles ) { this.siteFiles = siteFiles; } @Override public void addSiteFile( String fileName, String content ) { siteFiles.add( new NamedClusterSiteFileImpl( fileName, content ) ); } @Override public void addSiteFile( NamedClusterSiteFile namedClusterSiteFile ) { siteFiles.add( namedClusterSiteFile ); } @Override public InputStream getSiteFileInputStream( String siteFileName ) { NamedClusterSiteFile n = siteFiles.stream().filter( sf -> sf.getSiteFileName().equals( siteFileName ) ) .findFirst().orElse( null ); return n == null ? null : new ByteArrayInputStream( n.getSiteFileContents().getBytes() ); } } ================================================ FILE: impl/cluster/src/main/java/org/pentaho/big/data/impl/cluster/NamedClusterManager.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.io.FileUtils; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileType; import org.osgi.framework.BundleContext; import org.pentaho.di.core.Const; import org.pentaho.di.core.KettleClientEnvironment; import org.pentaho.di.core.attributes.metastore.EmbeddedMetaStore; import org.pentaho.di.core.bowl.DefaultBowl; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.logging.LogChannel; import org.pentaho.di.core.osgi.api.NamedClusterSiteFile; import org.pentaho.di.core.osgi.impl.NamedClusterSiteFileImpl; import org.pentaho.di.core.plugins.LifecyclePluginType; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.steps.named.cluster.NamedClusterEmbedManager; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.persist.MetaStoreFactory; import org.pentaho.metastore.stores.xml.XmlMetaStore; import org.pentaho.metastore.stores.xml.XmlUtil; import org.pentaho.metastore.util.PentahoDefaults; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.stream.Collectors; public class NamedClusterManager implements NamedClusterService { private static NamedClusterManager instance = new NamedClusterManager(); public static final String BIG_DATA_SLAVE_METASTORE_DIR = "hadoop.configurations.path"; private static final Class PKG = NamedClusterManager.class; private Map> factoryMap = new HashMap<>(); private NamedCluster clusterTemplate; private LogChannel log = new LogChannel( this ); private Map properties = new HashMap<>(); private static final String LOCALHOST = "localhost"; private static final List siteFileNames = Arrays.asList( "hdfs-site.xml", "core-site.xml", "mapred-site.xml", "yarn-site.xml", "hbase-site.xml", "hive-site.xml" ); public static NamedClusterManager getInstance() { return instance; } /** * returns a NamedClusterMetaStoreFactory for a given MetaStore instance. NOTE: This method caches and returns a * factory for Embedded MetaStores. For all other MetaStores, a new instance of MetaStoreFactory will always be * returned. * * @param metastore - the MetaStore for which to to get a MetaStoreFactory. * @return a MetaStoreFactory for the given MetaStore. */ @VisibleForTesting MetaStoreFactory getMetaStoreFactory( IMetaStore metastore ) { MetaStoreFactory namedClusterMetaStoreFactory = null; // Only MetaStoreFactories for EmbeddedMetaStores are cached. For all other MetaStore types, create a new // MetaStoreFactory if ( !( metastore instanceof EmbeddedMetaStore ) ) { return new MetaStoreFactory<>( NamedClusterImpl.class, metastore, PentahoDefaults.NAMESPACE ); } // cache MetaStoreFactories for Embedded MetaStores namedClusterMetaStoreFactory = factoryMap.computeIfAbsent( metastore, m -> ( new MetaStoreFactory<>( NamedClusterImpl.class, m, NamedClusterEmbedManager.NAMESPACE ) ) ); return namedClusterMetaStoreFactory; } @VisibleForTesting void putMetaStoreFactory( IMetaStore metastore, MetaStoreFactory metaStoreFactory ) { factoryMap.put( metastore, metaStoreFactory ); } @Override public void close( IMetaStore metastore ) { factoryMap.remove( metastore ); } @Override public NamedCluster getClusterTemplate() { if ( clusterTemplate == null ) { clusterTemplate = new NamedClusterImpl(); clusterTemplate.setName( "" ); clusterTemplate.setHdfsHost( LOCALHOST ); clusterTemplate.setHdfsPort( "8020" ); clusterTemplate.setHdfsUsername( "user" ); clusterTemplate.setHdfsPassword( clusterTemplate.encodePassword( "password" ) ); clusterTemplate.setJobTrackerHost( LOCALHOST ); clusterTemplate.setJobTrackerPort( "8032" ); clusterTemplate.setZooKeeperHost( LOCALHOST ); clusterTemplate.setZooKeeperPort( "2181" ); clusterTemplate.setOozieUrl( "http://localhost:8080/oozie" ); } return clusterTemplate.clone(); } @Override public void setClusterTemplate( NamedCluster clusterTemplate ) { this.clusterTemplate = clusterTemplate; } @Override public void create( NamedCluster namedCluster, IMetaStore metastore ) throws MetaStoreException { getMetaStoreFactory( metastore ).saveElement( new NamedClusterImpl( namedCluster ) ); } @Override public NamedCluster read( String clusterName, IMetaStore metastore ) throws MetaStoreException { MetaStoreFactory factory = getMetaStoreFactory( metastore ); if ( metastore == null || !listNames( metastore ).contains( clusterName ) ) { // only try the slave metastore if the given one fails IMetaStore slaveMetastore = getSlaveServerMetastore(); if ( slaveMetastore != null && listNames( slaveMetastore ).contains( clusterName ) ) { factory = getMetaStoreFactory( slaveMetastore ); } } NamedCluster namedCluster = null; try { namedCluster = factory.loadElement( clusterName ); } catch ( MetaStoreException e ) { // While executing Pentaho MapReduce on a secure cluster, the .lock file // might not be able to be created due to permissions. // In this case, try and read the MetaStore without locking. namedCluster = factory.loadElement( clusterName, false ); } return namedCluster; } @Override public void update( NamedCluster namedCluster, IMetaStore metastore ) throws MetaStoreException { MetaStoreFactory factory = getMetaStoreFactory( metastore ); List namedClusters = list( metastore ); for ( NamedCluster nc : namedClusters ) { if ( namedCluster.getName().equals( nc.getName() ) ) { factory.deleteElement( nc.getName() ); factory.saveElement( new NamedClusterImpl( namedCluster ) ); } } } @Override public void delete( String clusterName, IMetaStore metastore ) throws MetaStoreException { getMetaStoreFactory( metastore ).deleteElement( clusterName ); } @Override public List list( IMetaStore metastore ) throws MetaStoreException { MetaStoreFactory factory = getMetaStoreFactory( metastore ); List namedClusters; List exceptionList = new ArrayList<>(); try { namedClusters = new ArrayList<>( factory.getElements( true, exceptionList ) ); } catch ( MetaStoreException ex ) { // While executing Pentaho MapReduce on a secure cluster, the .lock file // might not be able to be created due to permissions. // In this case, try and read the MetaStore without locking. namedClusters = new ArrayList<>( factory.getElements( false, exceptionList ) ); } return namedClusters; } /** * This method lists the NamedClusters in the given IMetaStore. If an exception is thrown when parsing the data for a * given NamedCluster. The exception will be added to the exceptionList, but list generation will continue. * * @param metastore the IMetaStore to operate with * @param exceptionList As list to hold any exceptions that occur * @return the list of NamedClusters in the provided IMetaStore * @throws MetaStoreException */ @Override public List list( IMetaStore metastore, List exceptionList ) throws MetaStoreException { MetaStoreFactory factory = getMetaStoreFactory( metastore ); return new ArrayList<>( factory.getElements( false, exceptionList ) ); } @Override public List listNames( IMetaStore metastore ) throws MetaStoreException { return getMetaStoreFactory( metastore ).getElementNames( false ); } @Override public boolean contains( String clusterName, IMetaStore metastore ) throws MetaStoreException { boolean found = false; if ( metastore != null ) { found = listNames( metastore ).contains( clusterName ); } if ( !found ) { IMetaStore slaveMetastore = getSlaveServerMetastore(); if ( slaveMetastore != null ) { found = listNames( slaveMetastore ).contains( clusterName ); } } return found; } @Override public NamedCluster getNamedClusterByName( String namedClusterName, IMetaStore metastore ) { NamedCluster namedCluster = null; if ( metastore != null ) { namedCluster = searchMetastoreByName( namedClusterName, metastore ); } if ( namedCluster == null ) { IMetaStore slaveMetastore = getSlaveServerMetastore(); if ( slaveMetastore != null ) { namedCluster = searchMetastoreByName( namedClusterName, slaveMetastore ); } if ( namedCluster != null ) { metastore = slaveMetastore; } } loadSiteFilesIfNecessary( namedCluster, metastore ); return namedCluster; } private NamedCluster searchMetastoreByName( String namedCluster, IMetaStore metastore ) { try { List namedClusters = list( metastore ); for ( NamedCluster nc : namedClusters ) { if ( nc.getName().equals( namedCluster ) ) { return nc; } } } catch ( MetaStoreException e ) { return null; } return null; } public Map getProperties() { return properties; } @Override public NamedCluster getNamedClusterByHost( String hostName, IMetaStore metastore ) { NamedCluster namedCluster = null; if ( hostName == null ) { return null; } if ( metastore != null ) { namedCluster = searchMetastoreByHost( hostName, metastore ); } if ( namedCluster == null ) { IMetaStore slaveMetastore = getSlaveServerMetastore(); if ( slaveMetastore != null ) { namedCluster = searchMetastoreByHost( hostName, slaveMetastore ); } } return namedCluster; } private NamedCluster searchMetastoreByHost( String hostName, IMetaStore metastore ) { try { List namedClusters = list( metastore ); for ( NamedCluster nc : namedClusters ) { if ( hostName.equals( nc.getHdfsHost() ) ) { loadSiteFilesIfNecessary( nc, metastore ); return nc; } } } catch ( MetaStoreException e ) { return null; } return null; } @Override public void updateNamedClusterTemplate( String hostName, int port, boolean isMapr ) { if ( clusterTemplate == null ) { getClusterTemplate(); } clusterTemplate.setHdfsHost( hostName ); if ( port > 0 ) { clusterTemplate.setHdfsPort( String.valueOf( port ) ); } else { clusterTemplate.setHdfsPort( "" ); } clusterTemplate.setMapr( isMapr ); } private String getSlaveServerMetastoreDir() throws IOException { PluginInterface pluginInterface = PluginRegistry.getInstance().findPluginWithId( LifecyclePluginType.class, "HadoopSpoonPlugin" ); Properties legacyProperties; try { legacyProperties = loadProperties( pluginInterface, "plugin.properties" ); String slaveMetaStorePath = legacyProperties.getProperty( BIG_DATA_SLAVE_METASTORE_DIR ); FileObject slaveMetastoreDir; // check for user-specified metastore directory if ( useSlaveMetastorePathFromProperties( slaveMetaStorePath ) ) { return slaveMetaStorePath; } // see if metastore was copied to the big data plugin folder (yarn kettle cluster job) slaveMetaStorePath = pluginInterface.getPluginDirectory().getPath(); slaveMetastoreDir = KettleVFS.getInstance( DefaultBowl.getInstance() ) .getFileObject( slaveMetaStorePath + File.separator + XmlUtil.META_FOLDER_NAME ); if ( null != slaveMetastoreDir && slaveMetastoreDir.exists() && slaveMetastoreDir.getType().equals( FileType.FOLDER ) // last condition exists to ensure that this path doesn't get used if two jobs are running on a slave instance // at once, and one of them is packaging up the install for a yarn carte job && KettleClientEnvironment.getInstance().getClient().equals( KettleClientEnvironment.ClientType.CARTE ) ) { return slaveMetaStorePath; } slaveMetaStorePath = System.getProperty( "user.home" ) + File.separator + ".pentaho"; slaveMetastoreDir = KettleVFS.getInstance( DefaultBowl.getInstance() ).getFileObject( slaveMetaStorePath ); if ( null != slaveMetastoreDir && slaveMetastoreDir.exists() && slaveMetastoreDir.getType().equals( FileType.FOLDER ) ) { return slaveMetaStorePath; } else { return null; } } catch ( KettleFileException | NullPointerException e ) { log.logError( BaseMessages.getString( PKG, "NamedClusterManager.ErrorFindingUserMetastore" ), e ); throw new IOException( e ); } } private boolean useSlaveMetastorePathFromProperties( String slaveMetaStorePath ) throws FileSystemException { FileObject slaveMetastoreDir; try { slaveMetastoreDir = KettleVFS.getInstance( DefaultBowl.getInstance() ) .getFileObject( slaveMetaStorePath + File.separator + XmlUtil.META_FOLDER_NAME ); return null != slaveMetaStorePath && !slaveMetaStorePath.equals( "" ) && null != slaveMetastoreDir && slaveMetastoreDir.exists(); } catch ( KettleFileException e ) { log.logError( BaseMessages.getString( PKG, "NamedClusterManager.ErrorFindingUserMetastore" ), e ); } return false; } @VisibleForTesting IMetaStore getSlaveServerMetastore() { try { String metastoreDir = getSlaveServerMetastoreDir(); if ( null != metastoreDir ) { return new XmlMetaStore( getSlaveServerMetastoreDir() ); } else { // it is essential that this method returns a null value if no slave metastore directory exists return null; } } catch ( IOException | MetaStoreException e ) { log.logError( BaseMessages.getString( PKG, "NamedClusterManager.ErrorReadingMetastore" ), e ); return null; } } /** * Loads a properties file from the plugin directory for the plugin interface provided * * @param plugin * @return * @throws KettleFileException * @throws IOException */ private Properties loadProperties( PluginInterface plugin, String relativeName ) throws KettleFileException, IOException { if ( plugin == null ) { throw new NullPointerException(); } FileObject propFile = KettleVFS.getInstance( DefaultBowl.getInstance() ) .getFileObject( plugin.getPluginDirectory().getPath() + Const.FILE_SEPARATOR + relativeName ); if ( !propFile.exists() ) { throw new FileNotFoundException( propFile.toString() ); } try { Properties pluginProperties = new Properties(); pluginProperties.load( new FileInputStream( propFile.getName().getPath() ) ); return pluginProperties; } catch ( Exception e ) { // Do not catch ConfigurationException. Different shims will use different // packages for this exception. throw new IOException( e ); } } private void loadSiteFilesIfNecessary( NamedCluster namedCluster, IMetaStore metaStore ) { if ( namedCluster == null ) { return; //Can't do anything without a cluster } if ( namedCluster.getSiteFiles().isEmpty() ) { // This seeds the site files once if not already present - standard behavior unconditionalAddOfSiteFiles( namedCluster, metaStore ); return; } if ( Boolean.parseBoolean( System.getProperties().getProperty( Const.KETTLE_AUTO_UPDATE_SITE_FILE ) ) ) { // Special mode that tries to update site files by checking modification time of the file against what // is stored in the named cluster semiIntelligentSiteFileUpdate( namedCluster, metaStore ); } } private void unconditionalAddOfSiteFiles( NamedCluster namedCluster, IMetaStore metaStore ) { String rootDir = getNamedClusterConfigsRootDir( metaStore ); for ( String siteFileName : siteFileNames ) { String path = rootDir + File.separator + namedCluster.getName() + File.separator + siteFileName; File file = new File( path ); if ( file.exists() ) { try { namedCluster.addSiteFile( new NamedClusterSiteFileImpl( siteFileName, file.lastModified(), FileUtils.readFileToString( file, StandardCharsets.UTF_8.toString() ) ) ); } catch ( IOException e ) { log.logError( "An error occurred importing " + path + " into HadoopCluster " + namedCluster.getName(), e ); } } } if ( !namedCluster.getSiteFiles().isEmpty() ) { autoUpdateMetastoreWithSiteFiles( namedCluster, metaStore ); } } private void semiIntelligentSiteFileUpdate( NamedCluster namedCluster, IMetaStore metaStore ) { String rootDir = getNamedClusterConfigsRootDir( metaStore ); Map map = namedCluster.getSiteFiles().stream().collect( Collectors.toMap( NamedClusterSiteFile::getSiteFileName, namedClusterSiteFile -> namedClusterSiteFile ) ); List newSiteFiles = new ArrayList<>(); List missingFiles = new ArrayList<>(); for ( String siteFileName : siteFileNames ) { String path = rootDir + File.separator + namedCluster.getName() + File.separator + siteFileName; File file = new File( path ); if ( file.exists() && ( map.get( siteFileName ) == null || file.lastModified() != map.get( siteFileName ) .getSourceFileModificationTime() ) ) { try { newSiteFiles.add( new NamedClusterSiteFileImpl( siteFileName, file.lastModified(), FileUtils.readFileToString( file, StandardCharsets.UTF_8.toString() ) ) ); } catch ( IOException e ) { log.logError( "An error occurred importing " + path + " into HadoopCluster " + namedCluster.getName(), e ); } } else { //List of files where we need to retain the old site file if it exists missingFiles.add( siteFileName ); } } // If there is nothing new then we don't need to change anything if ( !newSiteFiles.isEmpty() ) { //Bring in the old files not present for ( String siteFile : missingFiles ) { if ( map.get( siteFile ) != null ) { newSiteFiles.add( map.get( siteFile ) ); } } //newSiteFiles is complete, update the named cluster and write the metastore entry namedCluster.setSiteFiles( newSiteFiles ); autoUpdateMetastoreWithSiteFiles( namedCluster, metaStore ); } } private void autoUpdateMetastoreWithSiteFiles( NamedCluster namedCluster, IMetaStore metaStore ) { boolean recoverOriginal = false; try { update( namedCluster, metaStore ); } catch ( MetaStoreException e ) { log.logError( "An error occurred trying to save HadoopCluster " + namedCluster.getName() + " with embedded site files in the metastore. Recovering original HadoopCluster.", e ); recoverOriginal = true; } //As a safeguard make sure we can read the metastore if ( !recoverOriginal ) { try { getNamedClusterByName( namedCluster.getName(), metaStore ); } catch ( Exception e ) { log.logError( "Could not successfully read back Hadoop Cluster " + namedCluster.getName() + " after embedding site files. Recovering original HadoopCluster." ); recoverOriginal = true; } } if ( recoverOriginal ) { // We can't read the metastore or could store the new one. Try to put the old hadoop cluster back namedCluster.setSiteFiles( new ArrayList() ); try { update( namedCluster, metaStore ); } catch ( MetaStoreException e ) { log.logError( "An error occurred trying to recover the old HadoopCluster" + namedCluster.getName(), e ); } } } private String getNamedClusterConfigsRootDir( IMetaStore metaStore ) { String rootDir = metaStore instanceof XmlMetaStore ? ( (XmlMetaStore) metaStore ).getRootFolder() : System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore"; return rootDir + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs"; } } ================================================ FILE: impl/cluster/src/main/resources/org/pentaho/big/data/impl/cluster/messages/messages_en_US.properties ================================================ NamedClusterManager.ErrorFindingUserMetastore=No metastore found and exception encountered looking for user-specified or legacy metastore NamedClusterManager.ErrorReadingMetastore=Error loading user-specified metastore ================================================ FILE: impl/cluster/src/test/java/org/pentaho/big/data/impl/cluster/NamedClusterImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster; import org.apache.commons.io.FileUtils; import org.apache.commons.vfs2.VFS; import org.apache.commons.vfs2.impl.StandardFileSystemManager; import org.apache.commons.vfs2.provider.UriParser; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.mockito.stubbing.Answer; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.encryption.TwoWayPasswordEncoderPluginType; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.osgi.api.NamedClusterSiteFile; import org.pentaho.di.core.osgi.impl.NamedClusterSiteFileImpl; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.security.Base64TwoWayPasswordEncoder; import org.pentaho.metastore.api.security.ITwoWayPasswordEncoder; import org.w3c.dom.Element; import org.w3c.dom.Node; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathFactory; import java.io.ByteArrayInputStream; import java.io.File; import java.util.Map; import static com.google.code.beanmatchers.BeanMatchers.hasValidBeanConstructor; import static com.google.code.beanmatchers.BeanMatchers.hasValidBeanEqualsFor; import static com.google.code.beanmatchers.BeanMatchers.hasValidGettersAndSetters; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.AdditionalMatchers.or; import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.validateMockitoUsage; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by bryan on 7/14/15. */ @RunWith( MockitoJUnitRunner.class ) public class NamedClusterImplTest { private static final String HDFS_PREFIX = "hdfs"; private VariableSpace variableSpace; private NamedClusterImpl namedCluster; private String namedClusterName; private String namedClusterHdfsHost; private String namedClusterHdfsPort; private String namedClusterHdfsUsername; private String namedClusterHdfsPassword; private String namedClusterJobTrackerPort; private String namedClusterJobTrackerHost; private String namedClusterZookeeperHost; private String namedClusterZookeeperPort; private String namedClusterOozieUrl; private String namedClusterStorageScheme; private String namedClusterKafkaBootstrapServers; private boolean isMapr; private IMetaStore metaStore; private StandardFileSystemManager fsm; private String fileContents1; private String fileContents2; private MockedStatic vfsMockedStatic; private MockedStatic uriParserMockedStatic; @Before public void setup() throws Exception { PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() ); PluginRegistry.init( false ); Encr.init( "Kettle" ); vfsMockedStatic = Mockito.mockStatic( VFS.class ); uriParserMockedStatic = Mockito.mockStatic( UriParser.class ); uriParserMockedStatic.when( () -> UriParser.encode( anyString(), any( char[].class ) ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.decode( anyString() ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.appendEncoded( any( StringBuilder.class ), anyString(), any( char[].class ) ) ).thenCallRealMethod(); metaStore = mock( IMetaStore.class ); variableSpace = mock( VariableSpace.class ); namedCluster = new NamedClusterImpl(); namedCluster.shareVariablesWith( variableSpace ); namedClusterName = "namedClusterName"; namedClusterHdfsHost = "namedClusterHdfsHost"; namedClusterHdfsPort = "12345"; namedClusterHdfsUsername = "namedClusterHdfsUsername"; namedClusterHdfsPassword = "namedClusterHdfsPassword"; namedClusterJobTrackerHost = "namedClusterJobTrackerHost"; namedClusterJobTrackerPort = "namedClusterJobTrackerPort"; namedClusterZookeeperHost = "namedClusterZookeeperHost"; namedClusterZookeeperPort = "namedClusterZookeeperPort"; namedClusterOozieUrl = "namedClusterOozieUrl"; namedClusterStorageScheme = "hdfs"; namedClusterKafkaBootstrapServers = "kafkaBootstrapServers"; isMapr = true; fileContents1 = FileUtils.readFileToString( new File( getClass().getResource( "/core-site.xml" ).getFile() ), "UTF-8" ); fileContents2 = "some printable contents"; namedCluster.setName( namedClusterName ); namedCluster.setHdfsHost( namedClusterHdfsHost ); namedCluster.setHdfsPort( namedClusterHdfsPort ); namedCluster.setHdfsUsername( namedClusterHdfsUsername ); namedCluster.setHdfsPassword( namedCluster.encodePassword( namedClusterHdfsPassword ) ); namedCluster.setJobTrackerHost( namedClusterJobTrackerHost ); namedCluster.setJobTrackerPort( namedClusterJobTrackerPort ); namedCluster.setZooKeeperHost( namedClusterZookeeperHost ); namedCluster.setZooKeeperPort( namedClusterZookeeperPort ); namedCluster.setOozieUrl( namedClusterOozieUrl ); namedCluster.setMapr( isMapr ); namedCluster.setStorageScheme( namedClusterStorageScheme ); namedCluster.setKafkaBootstrapServers( namedClusterKafkaBootstrapServers ); namedCluster.addSiteFile( "core-site.xml", fileContents1 ); namedCluster.addSiteFile( new NamedClusterSiteFileImpl( "hbase-site.xml", 11111L, fileContents2 ) ); fsm = mock( StandardFileSystemManager.class ); vfsMockedStatic.when( VFS::getManager ).thenReturn( fsm ); } @After public void cleanupMocks() { vfsMockedStatic.close(); uriParserMockedStatic.close(); validateMockitoUsage(); } @Test public void testBean() { assertThat( NamedClusterImpl.class, hasValidBeanConstructor() ); assertThat( NamedClusterImpl.class, hasValidGettersAndSetters() ); assertThat( NamedClusterImpl.class, hasValidBeanEqualsFor( "name" ) ); } @Test public void testClone() { long before = System.currentTimeMillis(); NamedClusterImpl newNamedCluster = namedCluster.clone(); assertEquals( namedClusterStorageScheme, newNamedCluster.getStorageScheme() ); assertEquals( namedClusterName, newNamedCluster.getName() ); assertEquals( namedClusterHdfsHost, newNamedCluster.getHdfsHost() ); assertEquals( namedClusterHdfsPort, newNamedCluster.getHdfsPort() ); assertEquals( namedClusterHdfsUsername, newNamedCluster.getHdfsUsername() ); assertEquals( namedClusterHdfsPassword, newNamedCluster.decodePassword( newNamedCluster.getHdfsPassword() ) ); assertEquals( namedClusterJobTrackerHost, newNamedCluster.getJobTrackerHost() ); assertEquals( namedClusterJobTrackerPort, newNamedCluster.getJobTrackerPort() ); assertEquals( namedClusterZookeeperHost, newNamedCluster.getZooKeeperHost() ); assertEquals( namedClusterZookeeperPort, newNamedCluster.getZooKeeperPort() ); assertEquals( namedClusterOozieUrl, newNamedCluster.getOozieUrl() ); assertEquals( namedClusterKafkaBootstrapServers, newNamedCluster.getKafkaBootstrapServers() ); assertTrue( before <= newNamedCluster.getLastModifiedDate() ); assertTrue( newNamedCluster.getLastModifiedDate() <= System.currentTimeMillis() ); } @Test public void testCopyVariablesFrom() { VariableSpace from = mock( VariableSpace.class ); namedCluster.copyVariablesFrom( from ); verify( variableSpace ).copyVariablesFrom( from ); } @Test public void testEnvironmentSubstitute() { String testVar = "testVar"; String testVal = "testVal"; when( variableSpace.environmentSubstitute( testVar ) ).thenReturn( testVal ); assertEquals( testVal, namedCluster.environmentSubstitute( testVar ) ); } @Test public void testArrayEnvironmentSubstitute() { String[] testVars = { "testVar" }; String[] testVals = { "testVal" }; Mockito.when( variableSpace.environmentSubstitute( testVars ) ).thenReturn( testVals ); assertArrayEquals( testVals, namedCluster.environmentSubstitute( testVars ) ); } @Test public void testFieldSubstitute() throws KettleValueException { String testString = "testString"; RowMetaInterface rowMetaInterface = mock( RowMetaInterface.class ); Object[] rowData = new Object[] {}; String testVal = "testVal"; when( variableSpace.fieldSubstitute( testString, rowMetaInterface, rowData ) ).thenReturn( testVal ); assertEquals( testVal, namedCluster.fieldSubstitute( testString, rowMetaInterface, rowData ) ); } @Test public void testGetVariableDefault() { String name = "name"; String defaultValue = "default"; String val = "val"; when( variableSpace.getVariable( name, defaultValue ) ).thenReturn( val ); assertEquals( val, namedCluster.getVariable( name, defaultValue ) ); } @Test public void testGetVariable() { String name = "name"; String val = "val"; when( variableSpace.getVariable( name ) ).thenReturn( val ); assertEquals( val, namedCluster.getVariable( name ) ); } @Test public void testGetBooleanValueOfVariable() { String var = "var"; String val1 = "Y"; String val2 = "N"; assertTrue( namedCluster.getBooleanValueOfVariable( null, true ) ); assertFalse( namedCluster.getBooleanValueOfVariable( null, false ) ); when( variableSpace.environmentSubstitute( var ) ).thenReturn( val1 ).thenReturn( val2 ).thenReturn( null ); assertTrue( namedCluster.getBooleanValueOfVariable( var, false ) ); assertFalse( namedCluster.getBooleanValueOfVariable( var, true ) ); assertTrue( namedCluster.getBooleanValueOfVariable( var, true ) ); assertFalse( namedCluster.getBooleanValueOfVariable( var, false ) ); } @Test public void testListVariables() { String[] vars = new String[] { "vars" }; when( variableSpace.listVariables() ).thenReturn( vars ); assertArrayEquals( vars, namedCluster.listVariables() ); } @Test public void testSetVariable() { String var = "var"; String val = "val"; namedCluster.setVariable( var, val ); verify( variableSpace ).setVariable( var, val ); } @Test @SuppressWarnings( "unchecked" ) public void testInjectVariables() { Map prop = mock( Map.class ); namedCluster.injectVariables( prop ); verify( variableSpace ).injectVariables( prop ); } @Test public void testComparator() { NamedClusterImpl other = new NamedClusterImpl(); other.setName( "a" ); assertTrue( NamedClusterImpl.comparator.compare( namedCluster, other ) > 0 ); other.setName( "z" ); assertTrue( NamedClusterImpl.comparator.compare( namedCluster, other ) < 0 ); other.setName( namedClusterName ); assertTrue( NamedClusterImpl.comparator.compare( namedCluster, other ) == 0 ); } @Test public void testToString() { NamedClusterImpl other = new NamedClusterImpl(); assertEquals( "Named cluster: null", other.toString() ); other.setName( "a" ); assertEquals( "Named cluster: a", other.toString() ); } @Ignore @Test public void testGenerateURLNullParameters() { namedCluster.setName( null ); String scheme = "testScheme"; buildAppendEncodedUserPassMocks( namedClusterHdfsUsername, namedClusterHdfsPassword ); assertEquals( scheme + "://" + namedClusterHdfsUsername + ":" + namedClusterHdfsPassword + "@" + namedClusterHdfsHost + ":" + namedClusterHdfsPort, namedCluster.generateURL( "testScheme", metaStore, null ) ); assertNull( namedCluster.generateURL( null, metaStore, null ) ); assertEquals( scheme + "://" + namedClusterHdfsUsername + ":" + namedClusterHdfsPassword + "@" + namedClusterHdfsHost + ":" + namedClusterHdfsPort, namedCluster.generateURL( "testScheme", null, null ) ); } @Ignore @Test public void testGenerateURLHDFS() { String scheme = "hdfs"; String testHost = "testHost"; String testPort = "9333"; String testUsername = "testUsername"; String testPassword = "testPassword"; namedCluster.setHdfsHost( " " + testHost + " " ); namedCluster.setHdfsPort( " " + testPort + " " ); namedCluster.setHdfsUsername( " " + testUsername + " " ); namedCluster.setHdfsPassword( namedCluster.encodePassword( testPassword ) ); buildAppendEncodedUserPassMocks( testUsername, namedCluster.encodePassword( testPassword ) ); assertEquals( scheme + "://" + testUsername + ":" + testPassword + "@" + testHost + ":" + testPort, namedCluster.generateURL( scheme, metaStore, null ) ); } @Test public void testGenerateURLHDFSPort() { String scheme = "hdfs"; String testHost = "testHost"; String testPort = "9333"; namedCluster.setHdfsHost( " " + testHost + " " ); namedCluster.setHdfsPort( " " + testPort + " " ); namedCluster.setHdfsUsername( null ); namedCluster.setHdfsPassword( null ); assertEquals( scheme + "://" + testHost + ":" + testPort, namedCluster.generateURL( scheme, metaStore, null ) ); } @Test public void testCheckHdfsNameEmpty() { String testHost = ""; namedCluster.setHdfsHost( " " + testHost + " " ); assertEquals( true, namedCluster.isHdfsHostEmpty( null ) ); } @Test public void testGetHdfsNameParsed() { String testHost = "test"; namedCluster.setHdfsHost( " " + testHost + " " ); assertEquals( "test", namedCluster.getHostNameParsed( null ) ); } @Test public void testGetHdfsNameParsedFromVariable() { String testHost = "${hdfsHost}"; namedCluster.setHdfsHost( " " + testHost + " " ); when( variableSpace.getVariable( "hdfsHost" ) ).thenReturn( "test" ); assertEquals( "test", namedCluster.getHostNameParsed( variableSpace ) ); } @Test public void testGetHdfsNameParsedFromVariableNoVariableInSpace() { String testHost = "${hdfsHost}"; namedCluster.setHdfsHost( " " + testHost + " " ); assertEquals( null, namedCluster.getHostNameParsed( variableSpace ) ); } @Test public void testCheckHdfsNameNotEmpty() { String testHost = "test"; namedCluster.setHdfsHost( " " + testHost + " " ); assertEquals( false, namedCluster.isHdfsHostEmpty( null ) ); } @Test public void testCheckHdfsNameNull() { namedCluster.setHdfsHost( null ); assertEquals( true, namedCluster.isHdfsHostEmpty( null ) ); } @Test public void testCheckHdfsNameVariableNull() { namedCluster.setHdfsHost( "${hdfsHost}" ); assertEquals( true, namedCluster.isHdfsHostEmpty( null ) ); } @Test public void testCheckHdfsNameVariableNotNull() { namedCluster.setHdfsHost( "${hdfsHost}" ); when( variableSpace.getVariable( "hdfsHost" ) ).thenReturn( "test" ); assertEquals( false, namedCluster.isHdfsHostEmpty( variableSpace ) ); } @Test public void testProcessURLHostEmpty() { namedCluster.setHdfsHost( null ); namedCluster.setStorageScheme( "hdfs" ); String incomingURL = "${hdfsUrl}/test"; assertEquals( incomingURL, namedCluster.processURLsubstitution( incomingURL, metaStore, null ) ); } @Ignore @Test public void testProcessURLhdfsFullSubstitution() { String pathBase = "//namedClusterHdfsUsername:namedClusterHdfsPassword@hostname:12340"; String filePathInFileSystem = "/tmp/hdsfDemo.txt"; namedCluster.setHdfsHost( "hostname" ); namedCluster.setHdfsPort( "12340" ); namedCluster.setStorageScheme( HDFS_PREFIX ); String incomingURL = HDFS_PREFIX + ":" + pathBase + filePathInFileSystem; buildExtractSchemeMocks( HDFS_PREFIX, incomingURL, pathBase + filePathInFileSystem ); assertEquals( incomingURL, namedCluster.processURLsubstitution( incomingURL, metaStore, null ) ); } @Test public void testProcessURLSubstitution_Gateway() { namedCluster.setUseGateway( true ); String incomingURL = "/path"; String expected = "hc://" + namedCluster.getName() + incomingURL; String actual = namedCluster.processURLsubstitution( incomingURL, metaStore, null ); assertTrue( "Expected " + expected + " actual " + actual, expected.equalsIgnoreCase( actual ) ); } @Ignore @Test public void testProcessURLWASBFullSubstitution() { String prefix = "wasb"; String pathBase = "//namedClusterHdfsUsername:namedClusterHdfsPassword@hostname:12340"; String filePathInFileSystem = "/tmp/hdsfDemo.txt"; namedCluster.setHdfsHost( "hostname" ); namedCluster.setHdfsPort( "12340" ); namedCluster.setStorageScheme( prefix ); String incomingURL = prefix + ":" + pathBase + filePathInFileSystem; buildAppendEncodedUserPassMocks( namedClusterHdfsUsername, namedClusterHdfsPassword ); buildExtractSchemeMocks( prefix, incomingURL, pathBase + filePathInFileSystem ); assertEquals( incomingURL, namedCluster.processURLsubstitution( incomingURL, metaStore, null ) ); } @Test public void testProcessURLHostVariableNull() { namedCluster.setHdfsHost( "${hostUrl}" ); namedCluster.setStorageScheme( "hdfs" ); String incomingURL = "${hdfsUrl}/test"; assertEquals( incomingURL, namedCluster.processURLsubstitution( incomingURL, metaStore, null ) ); } @Test public void testProcessURLHostVariableNotNull() { namedCluster.setHdfsHost( "${hostUrl}" ); namedCluster.setStorageScheme( HDFS_PREFIX ); String hostPort = "1000"; namedCluster.setHdfsPort( hostPort ); namedCluster.setHdfsUsername( "" ); namedCluster.setHdfsPassword( "" ); String incomingURL = "${hdfsUrl}/test"; String hostName = "test"; when( variableSpace.getVariable( "hostUrl" ) ).thenReturn( hostName ); when( variableSpace.environmentSubstitute( namedCluster.getHdfsHost() ) ).thenReturn( hostName ); when( variableSpace.environmentSubstitute( incomingURL ) ).thenReturn( hostName + "/test" ); String pathWithoutPrefix = "//" + hostName + ":" + hostPort + "//hdfsUrl//test"; String pathWithPrefix = HDFS_PREFIX + ":" + pathWithoutPrefix; buildExtractSchemeMocks( HDFS_PREFIX, pathWithPrefix, pathWithoutPrefix ); assertEquals( "hdfs://" + hostName + ":" + hostPort + incomingURL, namedCluster.processURLsubstitution( incomingURL, metaStore, variableSpace ) ); } @Test public void testProcessCompleteClusterVariableReplacement() { String hostname = "hostname"; String hostPort = "1000"; String variableName = "hdfsUrl"; // special case to allow legacy fully qualified urls to work namedCluster.setHdfsHost( hostname ); namedCluster.setStorageScheme( HDFS_PREFIX ); namedCluster.setHdfsPort( hostPort ); namedCluster.setHdfsUsername( "" ); namedCluster.setHdfsPassword( "" ); String incomingURL = "${" + variableName + "}/test"; String pathWithoutPrefix = "//" + hostname + ":" + hostPort + "//" + variableName + "//test"; String pathWithPrefix = HDFS_PREFIX + ":" + pathWithoutPrefix; when( variableSpace.environmentSubstitute( incomingURL ) ).thenReturn( "hdfs://FullyQualifiedPath/test" ); buildExtractSchemeMocks( HDFS_PREFIX, pathWithPrefix, pathWithoutPrefix ); assertEquals( incomingURL, namedCluster.processURLsubstitution( incomingURL, metaStore, variableSpace ) ); } @Test public void testProcessURLsubstitutionMaprFS_startsWithMaprfs() { String incomingURL = "maprfs"; namedCluster.setMapr( true ); assertEquals( incomingURL, namedCluster.processURLsubstitution( incomingURL, metaStore, null ) ); } @Test public void testProcessURLsubstitutionMaprFS_startsWithNoMaprfs() { String incomingURL = "path"; namedCluster.setMapr( true ); assertEquals( "maprfs://" + incomingURL, namedCluster.processURLsubstitution( incomingURL, metaStore, null ) ); } @Ignore @Test public void testProcessURLsubstitutionNC() { String prefix = "hc"; String pathWithoutPrefix = "//cluster/input/file.txt"; String pathWithPrefix = prefix + ":" + pathWithoutPrefix; buildAppendEncodedUserPassMocks( namedClusterHdfsUsername, namedClusterHdfsPassword ); buildExtractSchemeMocks( prefix, pathWithPrefix, pathWithoutPrefix ); assertEquals( "hdfs://namedClusterHdfsUsername:namedClusterHdfsPassword@namedClusterHdfsHost:12345/input/file.txt", namedCluster.processURLsubstitution( "hc://cluster/input/file.txt", metaStore, null ) ); } @Ignore @Test public void testProcessURLSubstitutionNC_variable() { String pathWithoutPrefix = "//" + namedClusterHdfsUsername + ":" + namedClusterHdfsPassword + "@" + namedClusterHdfsHost + ":" + namedClusterHdfsPort + "//ncUrl//test"; String pathWithPrefix = HDFS_PREFIX + ":" + pathWithoutPrefix; String incomingURL = "${ncUrl}/test"; when( variableSpace.environmentSubstitute( incomingURL ) ).thenReturn( "hc://cluster/test" ); buildAppendEncodedUserPassMocks( namedClusterHdfsUsername, namedClusterHdfsPassword ); buildExtractSchemeMocks( HDFS_PREFIX, pathWithPrefix, pathWithoutPrefix ); assertEquals( incomingURL, namedCluster.processURLsubstitution( incomingURL, metaStore, variableSpace ) ); } @Test public void testGenerateURLHDFSNoPort() { String scheme = "hdfs"; String testHost = "testHost"; namedCluster.setHdfsHost( " " + testHost + " " ); namedCluster.setHdfsPort( null ); namedCluster.setHdfsUsername( null ); namedCluster.setHdfsPassword( null ); assertEquals( scheme + "://" + testHost, namedCluster.generateURL( scheme, metaStore, null ) ); } @Ignore @Test public void testGenerateURLHDFSVariableSpace() { String schemeVar = "schemeVar"; String testScheme = "hdfs"; String hostVar = "hostVar"; String testHost = "testHost"; String portVar = "portVar"; String testPort = "9333"; String usernameVar = "usernameVar"; String testUsername = "testUsername"; String passwordVar = "passwordVar"; String testPassword = "testPassword"; namedCluster.setStorageScheme( "${" + schemeVar + "}" ); namedCluster.setHdfsHost( "${" + hostVar + "}" ); namedCluster.setHdfsPort( "${" + portVar + "}" ); namedCluster.setHdfsUsername( "${" + usernameVar + "}" ); namedCluster.setHdfsPassword( "${" + passwordVar + "}" ); when( variableSpace.getVariable( schemeVar ) ).thenReturn( testScheme ); when( variableSpace.getVariable( hostVar ) ).thenReturn( testHost ); when( variableSpace.getVariable( portVar ) ).thenReturn( testPort ); when( variableSpace.getVariable( usernameVar ) ).thenReturn( testUsername ); when( variableSpace.getVariable( passwordVar ) ).thenReturn( testPassword ); when( variableSpace.environmentSubstitute( namedCluster.getStorageScheme() ) ).thenReturn( testScheme ); when( variableSpace.environmentSubstitute( namedCluster.getHdfsHost() ) ).thenReturn( testHost ); when( variableSpace.environmentSubstitute( namedCluster.getHdfsPort() ) ).thenReturn( testPort ); when( variableSpace.environmentSubstitute( namedCluster.getHdfsUsername() ) ).thenReturn( testUsername ); when( variableSpace.environmentSubstitute( namedCluster.getHdfsPassword() ) ).thenReturn( testPassword ); buildAppendEncodedUserPassMocks( testUsername, testPassword ); assertEquals( testScheme + "://" + testUsername + ":" + testPassword + "@" + testHost + ":" + testPort, namedCluster.generateURL( "${" + schemeVar + "}", metaStore, variableSpace ) ); } @Test public void testGenerateURLHDFSVariableSpace_noVariable() { String scheme = "hdfs"; String hostVar = "hostVar"; String portVar = "portVar"; String usernameVar = "usernameVar"; String passwordVar = "passwordVar"; namedCluster.setStorageScheme( "${" + scheme + "}" ); namedCluster.setHdfsHost( "${" + hostVar + "}" ); namedCluster.setHdfsPort( "${" + portVar + "}" ); namedCluster.setHdfsUsername( "${" + usernameVar + "}" ); namedCluster.setHdfsPassword( "${" + passwordVar + "}" ); assertEquals( scheme + ":", namedCluster.generateURL( scheme, metaStore, variableSpace ) ); } @Test public void testXMLEmbedding() throws Exception { Element node = createNodeFromNamedCluster(); NamedCluster nc = new NamedClusterImpl(); nc = nc.fromXmlForEmbed( node ); assertNamedClusterEquality( nc ); } @Test public void testLegacyXMLEmbedding() throws Exception { Element node = createNodeFromNamedCluster(); XPath xPath = XPathFactory.newInstance().newXPath(); //Find the node containing the hdfsPassword Node n = ( (Node) xPath.evaluate( "/NamedCluster/child/id[text()='hdfsPassword']", node, XPathConstants.NODE ) ) .getNextSibling(); //Set the password value to what it would be if we were still encoding the legacy way ITwoWayPasswordEncoder passwordEncoder = new Base64TwoWayPasswordEncoder(); n.setTextContent( passwordEncoder.encode( namedCluster.getHdfsPassword() ) ); //Now check that we can still decode it NamedCluster nc = new NamedClusterImpl(); nc = nc.fromXmlForEmbed( node ); assertNamedClusterEquality( nc ); } private Element createNodeFromNamedCluster() throws Exception { String clusterXml = namedCluster.toXmlForEmbed( "NamedCluster" ); System.out.println( clusterXml ); return DocumentBuilderFactory.newInstance().newDocumentBuilder().parse( new ByteArrayInputStream( clusterXml.getBytes() ) ) .getDocumentElement(); } private void assertNamedClusterEquality( NamedCluster nc ) { assertEquals( namedCluster.getHdfsHost(), nc.getHdfsHost() ); assertEquals( namedCluster.getHdfsPort(), nc.getHdfsPort() ); assertEquals( namedCluster.getHdfsUsername(), nc.getHdfsUsername() ); assertEquals( namedCluster.getHdfsPassword(), nc.getHdfsPassword() ); assertEquals( namedCluster.getName(), nc.getName() ); assertEquals( namedCluster.getShimIdentifier(), nc.getShimIdentifier() ); assertEquals( namedCluster.getStorageScheme(), nc.getStorageScheme() ); assertEquals( namedCluster.getJobTrackerHost(), nc.getJobTrackerHost() ); assertEquals( namedCluster.getJobTrackerPort(), nc.getJobTrackerPort() ); assertEquals( namedCluster.getZooKeeperHost(), nc.getZooKeeperHost() ); assertEquals( namedCluster.getZooKeeperPort(), nc.getZooKeeperPort() ); assertEquals( namedCluster.getOozieUrl(), nc.getOozieUrl() ); assertEquals( namedCluster.getKafkaBootstrapServers(), nc.getKafkaBootstrapServers() ); assertEquals( namedCluster.getLastModifiedDate(), nc.getLastModifiedDate() ); assertEquals( namedCluster.getSiteFiles().size(), nc.getSiteFiles().size() ); for ( NamedClusterSiteFile siteFile : namedCluster.getSiteFiles() ) { String contents = getSiteFileContents( nc, siteFile.getSiteFileName() ); assertEquals( siteFile.getSiteFileContents(), contents ); if ( "hbase-site.xml".equals( siteFile.getSiteFileName() ) ) { assertEquals( 11111L, siteFile.getSourceFileModificationTime() ); } } } private Answer buildSchemeAnswer( String prefix, String buildPath ) { return invocation -> { Object[] args = invocation.getArguments(); ( (StringBuilder) args[2] ).append( buildPath ); return prefix; }; } private Answer buildUrlEncodeAnswer( String value ) { return invocation -> { Object[] args = invocation.getArguments(); ( (StringBuilder) args[0] ).append( (String) args[1] ); return null; }; } private void buildExtractSchemeMocks( String prefix, String fullPath, String pathWithoutPrefix ) { String[] schemes = { "hc", "hdfs", "maprfs", "wasb" }; when( fsm.getSchemes() ).thenReturn( schemes ); uriParserMockedStatic.when( () -> UriParser.extractScheme( eq( schemes ), eq( fullPath ), or( isNull(), any( StringBuilder.class ) ) ) ) .thenAnswer( buildSchemeAnswer( prefix, pathWithoutPrefix ) ); } private void buildAppendEncodedUserPassMocks( String username, String password ) { uriParserMockedStatic.when( () -> UriParser.appendEncoded( or( isNull(), any( StringBuilder.class ) ), eq( username ), any( char[].class ) ) ) .thenAnswer( buildUrlEncodeAnswer( username ) ); uriParserMockedStatic.when( () -> UriParser.appendEncoded( or( isNull(), any( StringBuilder.class ) ), eq( password ), any( char[].class ) ) ) .thenAnswer( buildUrlEncodeAnswer( password ) ); } private String getSiteFileContents( NamedCluster nc, String siteFileName ) { NamedClusterSiteFile n = nc.getSiteFiles().stream().filter( sf -> sf.getSiteFileName().equals( siteFileName ) ) .findFirst().orElse( null ); return n == null ? null : n.getSiteFileContents(); } } ================================================ FILE: impl/cluster/src/test/java/org/pentaho/big/data/impl/cluster/NamedClusterManagerTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster; import org.junit.Before; import org.junit.Test; import org.pentaho.di.core.attributes.metastore.EmbeddedMetaStore; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.encryption.TwoWayPasswordEncoderPluginType; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.plugins.LifecyclePluginType; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.persist.MetaStoreFactory; import org.pentaho.metastore.stores.delegate.DelegatingMetaStore; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by bryan on 7/14/15. */ public class NamedClusterManagerTest { private IMetaStore metaStore; private MetaStoreFactory metaStoreFactory; private NamedClusterManager namedClusterManager; private PluginInterface mockBigDataPlugin; private Path tempDirectoryName; @Before @SuppressWarnings( "unchecked" ) public void setup() throws KettleException, IOException { PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() ); PluginRegistry.init( false ); Encr.init( "Kettle" ); KettleLogStore.init(); metaStore = mock( IMetaStore.class ); metaStoreFactory = mock( MetaStoreFactory.class ); namedClusterManager = new NamedClusterManager(); // the protected method NamedClusterManager.getMetaStoreFactory() will always create a new Factory // by reading xml from local store. For these tests, create a Mockito spy that will always return the mock // MetaStore factory namedClusterManager = spy( namedClusterManager ); doReturn( metaStoreFactory ).when( namedClusterManager ).getMetaStoreFactory( metaStore ); namedClusterManager.putMetaStoreFactory( metaStore, metaStoreFactory ); mockBigDataPlugin = mock( PluginInterface.class ); when( mockBigDataPlugin.getIds() ).thenReturn( new String[] { "HadoopSpoonPlugin" } ); when( mockBigDataPlugin.matches( "HadoopSpoonPlugin" ) ).thenReturn( true ); PluginRegistry.getInstance().registerPlugin( LifecyclePluginType.class, mockBigDataPlugin ); } private boolean deleteDirectory( File directoryToBeDeleted ) { File[] allContents = directoryToBeDeleted.listFiles(); if ( allContents != null ) { for ( File file : allContents ) { deleteDirectory( file ); } } return directoryToBeDeleted.delete(); } @Test public void testGetClusterTemplate() { NamedCluster clusterTemplate = namedClusterManager.getClusterTemplate(); assertFalse( clusterTemplate == namedClusterManager.getClusterTemplate() ); assertTrue( clusterTemplate.equals( namedClusterManager.getClusterTemplate() ) ); NamedCluster template = mock( NamedCluster.class ); NamedCluster clone = mock( NamedCluster.class ); when( template.clone() ).thenReturn( clone ); namedClusterManager.setClusterTemplate( template ); assertEquals( clone, namedClusterManager.getClusterTemplate() ); } @Test public void testCreate() throws MetaStoreException { NamedClusterImpl namedCluster = new NamedClusterImpl(); String testName = "testName"; namedCluster.setName( testName ); namedClusterManager.create( namedCluster, metaStore ); verify( metaStoreFactory ).saveElement( eq( namedCluster ) ); } @Test public void testRead() throws MetaStoreException { String testName = "testName"; NamedClusterImpl namedCluster = new NamedClusterImpl(); when( metaStoreFactory.loadElement( testName ) ).thenReturn( namedCluster ); assertTrue( namedCluster == namedClusterManager.read( testName, metaStore ) ); } @Test public void testUpdate() throws MetaStoreException { NamedClusterImpl namedCluster = new NamedClusterImpl(); String testName = "testName"; namedCluster.setName( testName ); List namedClusters = new ArrayList<>( Arrays.asList( namedCluster ) ); when( metaStoreFactory.getElements( true ) ).thenReturn( namedClusters ).thenReturn( namedClusters ).thenThrow( new MetaStoreException() ); NamedClusterImpl updatedNamedCluster = new NamedClusterImpl(); updatedNamedCluster.setName( testName + "updated" ); namedClusterManager.update( updatedNamedCluster, metaStore ); } @Test public void testDeleteElement() throws MetaStoreException { String testName = "testName"; namedClusterManager.delete( testName, metaStore ); verify( metaStoreFactory ).deleteElement( testName ); } @Test public void testList() throws MetaStoreException { NamedClusterImpl namedCluster = new NamedClusterImpl(); namedCluster.setName( "testName" ); List value = new ArrayList<>( Arrays.asList( namedCluster ) ); when( metaStoreFactory.getElements( anyBoolean(), any( List.class ) ) ).thenReturn( value ); assertEquals( value, namedClusterManager.list( metaStore ) ); } @Test public void testListNames() throws MetaStoreException { List names = new ArrayList<>( Arrays.asList( "testName" ) ); when( metaStoreFactory.getElementNames( false ) ).thenReturn( names ); assertEquals( names, namedClusterManager.listNames( metaStore ) ); } @Test public void testListNames_emptymetaStoreFactory() throws MetaStoreException { IMetaStore metaStore = mock( IMetaStore.class ); List expectedNames = new ArrayList<>(); verify( metaStoreFactory, never() ).getElementNames(); assertEquals( expectedNames, namedClusterManager.listNames( metaStore ) ); } @Test public void testContains() throws MetaStoreException { String testName = "testName"; List names = new ArrayList<>( Arrays.asList( testName ) ); when( metaStoreFactory.getElementNames( false ) ).thenReturn( names ); assertFalse( namedClusterManager.contains( testName, null ) ); assertTrue( namedClusterManager.contains( testName, metaStore ) ); assertFalse( namedClusterManager.contains( "testName2", metaStore ) ); } @Test public void testContainsSlaveServer() throws MalformedURLException, MetaStoreException { String pluginFilePath = getClass().getResource( "/plugin.properties" ).getFile(); String resourceDir = pluginFilePath.substring( 0, pluginFilePath.lastIndexOf( "/" ) ); when( mockBigDataPlugin.getPluginDirectory() ).thenReturn( new URL( "file://" + resourceDir ) ); String testName = "testName"; assertFalse( namedClusterManager.contains( testName, null ) ); verify( namedClusterManager, times( 1 ) ).getSlaveServerMetastore(); } @Test @SuppressWarnings( "unchecked" ) public void testGetNamedClusterByName() throws MetaStoreException { String testName = "testName"; NamedCluster namedCluster = mock( NamedCluster.class ); when( namedCluster.getName() ).thenReturn( testName ); List namedClusters = new ArrayList<>( Arrays.asList( namedCluster ) ); when( metaStoreFactory.getElements( anyBoolean(), any( List.class ) ) ).thenReturn( namedClusters ) .thenReturn( namedClusters ).thenThrow( new MetaStoreException() ); assertNull( namedClusterManager.getNamedClusterByName( testName, null ) ); assertEquals( namedCluster, namedClusterManager.getNamedClusterByName( testName, metaStore ) ); assertNull( namedClusterManager.getNamedClusterByName( "fakeName", metaStore ) ); assertNull( namedClusterManager.getNamedClusterByName( testName, metaStore ) ); } @Test @SuppressWarnings( "unchecked" ) public void testGetNamedClusterByHost() throws MetaStoreException { String testName = "testName"; String testHostName = "testHostName"; NamedCluster namedCluster = mock( NamedCluster.class ); when( namedCluster.getName() ).thenReturn( testName ); when( namedCluster.getHdfsHost() ).thenReturn( testHostName ); List namedClusters = new ArrayList<>( Arrays.asList( namedCluster ) ); when( metaStoreFactory.getElements( anyBoolean(), any( List.class ) ) ).thenReturn( namedClusters ) .thenReturn( namedClusters ).thenThrow( new MetaStoreException() ); assertNull( namedClusterManager.getNamedClusterByHost( testHostName, null ) ); assertEquals( namedCluster, namedClusterManager.getNamedClusterByHost( testHostName, metaStore ) ); assertNull( namedClusterManager.getNamedClusterByHost( "fakeName", metaStore ) ); assertNull( namedClusterManager.getNamedClusterByHost( testHostName, metaStore ) ); } @Test public void testGetMetaStoreFactoryEmbeddedMetaStoreSuccess() throws MetaStoreException { NamedClusterManager namedClusterManager = new NamedClusterManager(); MetaStoreFactory metaStoreFactoryFirst = null; MetaStoreFactory metaStoreFactorySecond = null; EmbeddedMetaStore embeddedMetaStore = mock( EmbeddedMetaStore.class ); // get the metastore factory - the first time called, it should create a new one and cache it metaStoreFactoryFirst = namedClusterManager.getMetaStoreFactory( embeddedMetaStore ); // get the metastore factory again - this time it should return the same instance as the first (the cached instance) metaStoreFactorySecond = namedClusterManager.getMetaStoreFactory( embeddedMetaStore ); assertNotNull( "metaStoreFactoryFirst is expected to NOT be null", metaStoreFactoryFirst ); assertNotNull( "metaStoreFactorySecond is expected to NOT be null", metaStoreFactoryFirst ); assertEquals( "Called NamedClusterManager.getMetaStoreFactory twice, passing in the same EmbeddedMetaStore. " + "Both calls should return the same instance of MetaStoreFactory", metaStoreFactoryFirst, metaStoreFactorySecond ); } @Test public void testGetMetaStoreFactoryNonEmbeddedMetaStore() throws MetaStoreException { NamedClusterManager namedClusterManager = new NamedClusterManager(); MetaStoreFactory metaStoreFactoryFirst = null; MetaStoreFactory metaStoreFactorySecond = null; DelegatingMetaStore nonEmbeddedMetaStore = mock( DelegatingMetaStore.class ); // get the metastore factory - the first time called, it should create a new one and cache it metaStoreFactoryFirst = namedClusterManager.getMetaStoreFactory( nonEmbeddedMetaStore ); // get the metastore factory again - this time it should return the same instance as the first (the cached instance) metaStoreFactorySecond = namedClusterManager.getMetaStoreFactory( nonEmbeddedMetaStore ); assertNotNull( "metaStoreFactoryFirst is expected to NOT be null", metaStoreFactoryFirst ); assertNotNull( "metaStoreFactorySecond is expected to NOT be null", metaStoreFactoryFirst ); assertNotEquals( "Called NamedClusterManager.getMetaStoreFactory twice, passing in the same non EmbeddedMetaStore. " + "Both calls should return the different instances of MetaStoreFactory", metaStoreFactoryFirst, metaStoreFactorySecond ); } @Test public void testUpdateNamedClusterTemplate() { namedClusterManager.getClusterTemplate(); namedClusterManager.updateNamedClusterTemplate( "testHostName", 9999, true ); assertEquals( "testHostName", namedClusterManager.getClusterTemplate().getHdfsHost() ); assertEquals( "9999", namedClusterManager.getClusterTemplate().getHdfsPort() ); assertTrue( namedClusterManager.getClusterTemplate().isMapr() ); } } ================================================ FILE: impl/cluster/src/test/java/org/pentaho/big/data/impl/cluster/NamedClusterMetastoreIT.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster; import org.apache.commons.io.FileUtils; import org.apache.commons.vfs2.impl.StandardFileSystemManager; import org.junit.Before; import org.junit.Test; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.encryption.TwoWayPasswordEncoderPluginType; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.KettleLoggingEvent; import org.pentaho.di.core.logging.KettleLoggingEventListener; import org.pentaho.di.core.osgi.api.NamedClusterSiteFile; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.stores.xml.XmlMetaStore; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.UUID; import static org.junit.Assert.*; public class NamedClusterMetastoreIT { private static final String HDFS_PREFIX = "hdfs"; private VariableSpace variableSpace; private NamedClusterImpl namedCluster; private String namedClusterName; private String namedClusterHdfsHost; private String namedClusterHdfsPort; private String namedClusterHdfsUsername; private String namedClusterHdfsPassword; private String namedClusterJobTrackerPort; private String namedClusterJobTrackerHost; private String namedClusterZookeeperHost; private String namedClusterZookeeperPort; private String namedClusterOozieUrl; private String namedClusterStorageScheme; private String namedClusterKafkaBootstrapServers; private boolean isMapr; private IMetaStore metaStore; private StandardFileSystemManager fsm; private NamedClusterService namedClusterService; private String fileContents1; private String metastoreRootFolder; private KettleLoggingEventListener kettleLoggingEventListener; private LinkedList loggingEventList; @Before public void setup() throws Exception { PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() ); PluginRegistry.init( false ); loggingEventList = new LinkedList<>(); kettleLoggingEventListener = new KettleLoggingEventListener() { @Override public void eventAdded( KettleLoggingEvent event ) { loggingEventList.add( event ); } }; KettleLogStore.init(); KettleLogStore.getAppender().addLoggingEventListener( kettleLoggingEventListener ); Encr.init( "Kettle" ); fileContents1 = FileUtils.readFileToString( new File( getClass().getResource( "/core-site.xml" ).getFile() ), "UTF-8" ); metastoreRootFolder = System.getProperty( "java.io.tmpdir" ) + File.separator + UUID.randomUUID(); metaStore = new XmlMetaStore( metastoreRootFolder ); variableSpace = new Variables(); namedCluster = new NamedClusterImpl(); namedCluster.shareVariablesWith( variableSpace ); namedClusterName = "namedClusterName"; namedClusterHdfsHost = "namedClusterHdfsHost"; namedClusterHdfsPort = "12345"; namedClusterHdfsUsername = "namedClusterHdfsUsername"; namedClusterHdfsPassword = "namedClusterHdfsPassword"; namedClusterJobTrackerHost = "namedClusterJobTrackerHost"; namedClusterJobTrackerPort = "namedClusterJobTrackerPort"; namedClusterZookeeperHost = "namedClusterZookeeperHost"; namedClusterZookeeperPort = "namedClusterZookeeperPort"; namedClusterOozieUrl = "namedClusterOozieUrl"; namedClusterStorageScheme = "hdfs"; namedClusterKafkaBootstrapServers = "kafkaBootstrapServers"; isMapr = true; namedCluster.setName( namedClusterName ); namedCluster.setHdfsHost( namedClusterHdfsHost ); namedCluster.setHdfsPort( namedClusterHdfsPort ); namedCluster.setHdfsUsername( namedClusterHdfsUsername ); namedCluster.setHdfsPassword( namedCluster.encodePassword( namedClusterHdfsPassword ) ); namedCluster.setJobTrackerHost( namedClusterJobTrackerHost ); namedCluster.setJobTrackerPort( namedClusterJobTrackerPort ); namedCluster.setZooKeeperHost( namedClusterZookeeperHost ); namedCluster.setZooKeeperPort( namedClusterZookeeperPort ); namedCluster.setOozieUrl( namedClusterOozieUrl ); namedCluster.setMapr( isMapr ); namedCluster.setStorageScheme( namedClusterStorageScheme ); namedCluster.setKafkaBootstrapServers( namedClusterKafkaBootstrapServers ); namedCluster.addSiteFile( "core-site.xml", fileContents1 ); namedCluster.addSiteFile( "fileName2", "fileContents2" ); namedClusterService = new NamedClusterManager( ); } @Test public void testWriteAndRead() throws Exception { namedClusterService.create( namedCluster, metaStore ); NamedCluster nc = namedClusterService.getNamedClusterByName( namedClusterName, metaStore ); assertEquals( namedClusterName, nc.getName() ); assertEquals( fileContents1, getSiteFileContents( nc, "core-site.xml" ) ); } @Test public void testAutoEmbedSiteFiles() throws Exception { commonAutoEmbedSetupLogic(); namedClusterService.create( namedCluster, metaStore ); NamedCluster nc = namedClusterService.getNamedClusterByName( namedClusterName, metaStore ); assertEquals( namedClusterName, nc.getName() ); assertEquals( fileContents1, getSiteFileContents( nc, "core-site.xml" ) ); } @Test public void testAutoEmbedWhenUpdateMetastoreAndRecoveryFails() throws Exception { commonAutoEmbedSetupLogic(); NamedClusterService disabledNamedClusterService = new NamedClusterManager() { @Override public void update( NamedCluster namedCluster, IMetaStore metastore ) throws MetaStoreException { throw new MetaStoreException( "Something bad happened" ); } }; namedClusterService = disabledNamedClusterService; namedClusterService.create( namedCluster, metaStore ); NamedCluster nc = namedClusterService.getNamedClusterByName( namedClusterName, metaStore ); assertEquals( 4, loggingEventList.size() ); } @Test public void testAutoEmbedWhenUpdateMetastoreFails() throws Exception { commonAutoEmbedSetupLogic(); NamedClusterService disabledNamedClusterService = new NamedClusterManager() { private int counter; @Override public void update( NamedCluster namedCluster, IMetaStore metastore ) throws MetaStoreException { counter++; if ( counter == 1 ) { //Force the first update (when we try to add the site files) to fail throw new MetaStoreException( "Something bad happened" ); } else { //Thereafter the recovery update works super.update( namedCluster, metastore ); } } }; namedClusterService = disabledNamedClusterService; namedClusterService .create( namedCluster, metaStore ); //Create the namedCluster (without site files) in the metastore NamedCluster nc = namedClusterService.getNamedClusterByName( namedClusterName, metaStore ); assertEquals( 2, loggingEventList.size() ); assertEquals( namedClusterName, nc.getName() ); assert ( nc.getSiteFiles().isEmpty() ); } private void commonAutoEmbedSetupLogic() throws IOException { namedCluster.setSiteFiles( new ArrayList() ); //No site files in named cluster File destFile = new File( metastoreRootFolder + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs" + File.separator + "namedClusterName" + File.separator + "core-site.xml" ); destFile.getParentFile().mkdirs(); //Put a site file out on the metastore FileUtils.copyFile( new File( getClass().getResource( "/core-site.xml" ).getFile() ), destFile ); } private String getSiteFileContents( NamedCluster nc, String siteFileName ) { NamedClusterSiteFile n = nc.getSiteFiles().stream().filter( sf -> sf.getSiteFileName().equals( siteFileName ) ) .findFirst().orElse( null ); return n == null ? null : n.getSiteFileContents(); } @Test public void testCorruptedFileWithList() throws Exception { NamedClusterImpl corruptedNamedCluster = new NamedClusterImpl( namedCluster ); final String corruptedName = "corruptedNamedCluster"; corruptedNamedCluster.setName( corruptedName ); corruptedNamedCluster.addSiteFile( "core-site.xml", Character.toString( (char) 5 ) ); //Make the site file corrupt namedClusterService.create( namedCluster, metaStore ); //Write the good one ... namedClusterService.create( corruptedNamedCluster, metaStore ); //... and the bad one //We should not get an error when we try to get the cluster by name because it uses a tolerant list //The list must be tolerant or a good clusters will never be returned. assertNotNull( namedClusterService.getNamedClusterByName( namedClusterName, metaStore ) ); assertNull( namedClusterService.getNamedClusterByName( corruptedName, metaStore ) ); List exceptionList = new ArrayList(); //Getting the list with a non-null exceptionList is tolerant of corrupt entries. List namedClusterList = namedClusterService.list( metaStore, exceptionList ); //The list contains the good cluster only assertEquals( 1, namedClusterList.size() ); assertEquals( namedCluster, namedClusterList.get( 0 ) ); assertEquals( 1, exceptionList.size() ); assert ( exceptionList.get( 0 ).getMessage().contains( "Could not load metaStore element '" + corruptedName + "'" ) ); //Even if we didn't ask for the exception list, NamedClusters should still be tolerant, even if the metastore would // not be. namedClusterService.list( metaStore ); } } ================================================ FILE: impl/cluster/src/test/resources/core-site.xml ================================================ fs.defaultFS hdfs://CDH61Secure fs.trash.interval 1 io.compression.codecs org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec,org.apache.hadoop.io.compress.Lz4Codec hadoop.security.authentication kerberos hadoop.security.authorization true hadoop.rpc.protection privacy hadoop.security.auth_to_local DEFAULT hadoop.proxyuser.oozie.hosts * hadoop.proxyuser.oozie.groups * hadoop.proxyuser.flume.hosts * hadoop.proxyuser.flume.groups * hadoop.proxyuser.HTTP.hosts * hadoop.proxyuser.HTTP.groups * hadoop.proxyuser.hive.hosts * hadoop.proxyuser.hive.groups * hadoop.proxyuser.hue.hosts * hadoop.proxyuser.hue.groups * hadoop.proxyuser.httpfs.hosts * hadoop.proxyuser.httpfs.groups * hadoop.proxyuser.hdfs.groups * hadoop.proxyuser.hdfs.hosts * hadoop.proxyuser.yarn.hosts * hadoop.proxyuser.yarn.groups * hadoop.security.group.mapping org.apache.hadoop.security.ShellBasedUnixGroupsMapping hadoop.security.instrumentation.requires.admin false net.topology.script.file.name /etc/hadoop/conf.cloudera.yarn/topology.py io.file.buffer.size 65536 hadoop.ssl.enabled true hadoop.ssl.require.client.cert false true hadoop.ssl.keystores.factory.class org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory true hadoop.ssl.server.conf ssl-server.xml true hadoop.ssl.client.conf ssl-client.xml true hadoop.security.key.provider.path kms://https@svqxobcdh61secn1.pentaho.net:16000/kms ================================================ FILE: impl/cluster/src/test/resources/plugin.properties ================================================ big.data.slave.metastore.dir= ================================================ FILE: impl/clusterTests/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-impl 11.1.0.0-SNAPSHOT pentaho-big-data-impl-clusterTests 11.1.0.0-SNAPSHOT jar Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com site org.pentaho shim-api ${pentaho-hadoop-shims.version} pentaho pentaho-big-data-api-runtimeTest ${project.version} org.apache.kafka kafka-clients pentaho metastore ${metastore.version} provided pentaho-kettle kettle-core ${pdi.version} provided junit junit ${dependency.junit.revision} test org.mockito mockito-all ${dependency.mockito.revision} test com.google.code.bean-matchers bean-matchers ${dependency.bean-matchers.revision} test pentaho pentaho-big-data-api-runtimeTest ${project.version} tests test ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/ClusterRuntimeTestEntry.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests; import org.pentaho.runtime.test.action.RuntimeTestAction; import org.pentaho.runtime.test.action.impl.HelpUrlPayload; import org.pentaho.runtime.test.action.impl.RuntimeTestActionImpl; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.test.impl.RuntimeTestResultEntryImpl; import org.pentaho.di.core.Const; /** * This is a convenience class that will add a shim troubleshooting guide action if none is specified and the severity * is >= WARNING */ public class ClusterRuntimeTestEntry extends RuntimeTestResultEntryImpl { public static final String RUNTIME_TEST_RESULT_ENTRY_WITH_DEFAULT_SHIM_HELP_TROUBLESHOOTING_GUIDE = "RuntimeTestResultEntryWithDefaultShimHelp.TroubleshootingGuide"; public static final String RUNTIME_TEST_RESULT_ENTRY_WITH_DEFAULT_SHIM_HELP_SHELL_DOC = "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc"; public static final String RUNTIME_TEST_RESULT_ENTRY_WITH_DEFAULT_SHIM_HELP_SHELL_DOC_TITLE = "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Title"; public static final String RUNTIME_TEST_RESULT_ENTRY_WITH_DEFAULT_SHIM_HELP_SHELL_DOC_HEADER = "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Header"; private static final Class PKG = ClusterRuntimeTestEntry.class; public ClusterRuntimeTestEntry( MessageGetterFactory messageGetterFactory, RuntimeTestEntrySeverity severity, String description, String message, DocAnchor docAnchor ) { this( messageGetterFactory, severity, description, message, null, docAnchor ); } public ClusterRuntimeTestEntry( RuntimeTestEntrySeverity severity, String description, String message, RuntimeTestAction runtimeTestAction ) { this( severity, description, message, null, runtimeTestAction ); } public ClusterRuntimeTestEntry( MessageGetterFactory messageGetterFactory, RuntimeTestResultEntry runtimeTestResultEntry, DocAnchor docAnchor ) { this( runtimeTestResultEntry.getSeverity(), runtimeTestResultEntry.getDescription(), runtimeTestResultEntry.getMessage(), runtimeTestResultEntry.getException(), getDefaultAction( messageGetterFactory, runtimeTestResultEntry, docAnchor ) ); } public ClusterRuntimeTestEntry( MessageGetterFactory messageGetterFactory, RuntimeTestEntrySeverity severity, String description, String message, Throwable exception, DocAnchor docAnchor ) { this( severity, description, message, exception, createDefaultAction( messageGetterFactory, severity, docAnchor ) ); } public ClusterRuntimeTestEntry( RuntimeTestEntrySeverity severity, String description, String message, Throwable exception, RuntimeTestAction runtimeTestAction ) { super( severity, description, message, exception, runtimeTestAction ); } private static RuntimeTestAction getDefaultAction( MessageGetterFactory messageGetterFactory, RuntimeTestResultEntry runtimeTestResultEntry, DocAnchor docAnchor ) { RuntimeTestAction action = runtimeTestResultEntry.getAction(); if ( action != null ) { return action; } return createDefaultAction( messageGetterFactory, runtimeTestResultEntry.getSeverity(), docAnchor ); } private static RuntimeTestAction createDefaultAction( MessageGetterFactory messageGetterFactory, RuntimeTestEntrySeverity severity, DocAnchor docAnchor ) { if ( severity == null || severity.ordinal() >= RuntimeTestEntrySeverity.WARNING.ordinal() ) { MessageGetter messageGetter = messageGetterFactory.create( PKG ); String docUrl = Const.getDocUrl( messageGetter.getMessage( RUNTIME_TEST_RESULT_ENTRY_WITH_DEFAULT_SHIM_HELP_SHELL_DOC ) ); if ( docAnchor != null ) { docUrl += messageGetter.getMessage( docAnchor.getAnchorTextKey() ); } return new RuntimeTestActionImpl( messageGetter.getMessage( RUNTIME_TEST_RESULT_ENTRY_WITH_DEFAULT_SHIM_HELP_TROUBLESHOOTING_GUIDE ), docUrl, severity, new HelpUrlPayload( messageGetterFactory, messageGetter.getMessage( RUNTIME_TEST_RESULT_ENTRY_WITH_DEFAULT_SHIM_HELP_SHELL_DOC_TITLE ), messageGetter.getMessage( RUNTIME_TEST_RESULT_ENTRY_WITH_DEFAULT_SHIM_HELP_SHELL_DOC_HEADER ), docUrl ) ); } return null; } public enum DocAnchor { GENERAL( "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.General" ), SHIM_LOAD( "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.ShimLoad" ), CLUSTER_CONNECT( "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.ClusterConnect" ), CLUSTER_CONNECT_GATEWAY( "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.ClusterConnectGateway" ), ACCESS_DIRECTORY( "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.AccessDirectory" ), OOZIE( "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.Oozie" ), ZOOKEEPER( "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.Zookeeper" ), KAFKA( "RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.Kafka" ); private final String anchorTextKey; DocAnchor( String anchorTextKey ) { this.anchorTextKey = anchorTextKey; } public String getAnchorTextKey() { return anchorTextKey; } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/Constants.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests; /** * Created by bryan on 8/14/15. */ public class Constants { public static final String HADOOP_FILE_SYSTEM = "Hadoop File System"; public static final String MAP_REDUCE = "Map Reduce"; public static final String OOZIE = "Oozie"; public static final String ZOOKEEPER = "Zookeeper"; public static final String KAFKA = "Kafka"; } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/hdfs/GatewayListHomeDirectoryTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.di.core.variables.Variables; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; /** * Created by vamshidhar on 02/02/23. */ public class GatewayListHomeDirectoryTest extends ListHomeDirectoryTest { public static final String TEST_PATH = "/webhdfs/v1/~?op=LISTSTATUS"; private final ConnectivityTestFactory connectivityTestFactory; public GatewayListHomeDirectoryTest( MessageGetterFactory messageGetterFactory, ConnectivityTestFactory connectivityTestFactory, HadoopFileSystemLocator fileSystemLocator ) { super( messageGetterFactory, fileSystemLocator ); this.connectivityTestFactory = connectivityTestFactory; } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); if ( !namedCluster.isUseGateway() ) { return super.runTest( objectUnderTest ); } else { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, connectivityTestFactory.create( messageGetterFactory, variables.environmentSubstitute( namedCluster.decodePassword( namedCluster.getGatewayUrl() ) ), TEST_PATH, variables.environmentSubstitute( namedCluster.getGatewayUsername() ), variables.environmentSubstitute( namedCluster.decodePassword( namedCluster.getGatewayPassword() ) ) ) .runTest(), ClusterRuntimeTestEntry.DocAnchor.CLUSTER_CONNECT_GATEWAY ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/hdfs/GatewayListRootDirectoryTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.di.core.variables.Variables; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; /** * Created by vamshidhar on 02/02/23. */ public class GatewayListRootDirectoryTest extends ListRootDirectoryTest { public static final String TEST_PATH = "/webhdfs/v1/?op=LISTSTATUS"; private final ConnectivityTestFactory connectivityTestFactory; public GatewayListRootDirectoryTest( MessageGetterFactory messageGetterFactory, ConnectivityTestFactory connectivityTestFactory, HadoopFileSystemLocator hadoopFileSystemLocator ) { super( messageGetterFactory, hadoopFileSystemLocator); this.connectivityTestFactory = connectivityTestFactory; } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; Variables variables = new Variables(); variables.initializeVariablesFrom( null ); if ( !namedCluster.isUseGateway() ) { return super.runTest( objectUnderTest ); } else { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, connectivityTestFactory.create( messageGetterFactory, variables.environmentSubstitute( namedCluster.decodePassword( namedCluster.getGatewayUrl() ) ), TEST_PATH, variables.environmentSubstitute( namedCluster.getGatewayUsername() ), variables.environmentSubstitute( namedCluster.decodePassword( namedCluster.getGatewayPassword() ) ) ) .runTest(), ClusterRuntimeTestEntry.DocAnchor.CLUSTER_CONNECT_GATEWAY ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/hdfs/GatewayPingFileSystemEntryPoint.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.di.core.variables.Variables; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; /** * Created by dstepanov on 26/04/17. */ public class GatewayPingFileSystemEntryPoint extends PingFileSystemEntryPointTest { public static final String TEST_PATH = "/webhdfs/v1/?op=LISTSTATUS"; public GatewayPingFileSystemEntryPoint( MessageGetterFactory messageGetterFactory, ConnectivityTestFactory connectivityTestFactory ) { super( messageGetterFactory, connectivityTestFactory ); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); if ( !namedCluster.isUseGateway() ) { return super.runTest( objectUnderTest ); } else { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, connectivityTestFactory.create( messageGetterFactory, variables.environmentSubstitute( namedCluster.decodePassword( namedCluster.getGatewayUrl() ) ), TEST_PATH, variables.environmentSubstitute( namedCluster.getGatewayUsername() ), variables.environmentSubstitute( namedCluster.decodePassword( namedCluster.getGatewayPassword() ) ) ) .runTest(), ClusterRuntimeTestEntry.DocAnchor.CLUSTER_CONNECT_GATEWAY ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/hdfs/GatewayWriteToAndDeleteFromUsersHomeFolderTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.apache.commons.lang.StringUtils; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.di.core.util.HttpClientManager; import org.pentaho.di.core.util.HttpClientUtil; import org.pentaho.di.core.variables.Variables; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.RuntimeTestResultEntryImpl; import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URI; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.cert.X509Certificate; import java.util.Arrays; /** * Created by vamshidhar on 8/14/15. */ public class GatewayWriteToAndDeleteFromUsersHomeFolderTest extends WriteToAndDeleteFromUsersHomeFolderTest { public static final String CONNECT_TEST_HOST_BLANK_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.HostBlank.Desc"; public static final String CONNECT_TEST_HOST_BLANK_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.HostBlank.Message"; public static final String CONNECT_FILE_SYSTEM_TEST_PATH = "/webhdfs/v1/?op=LISTSTATUS"; public static final String PENTAHO_SHIM_TEST_FILE_PATH = "/webhdfs/v1/~/pentaho-shim-test-file.test?op=LISTSTATUS"; public static final String PENTAHO_SHIM_TEST_FILE_PATH_DELETE = "/webhdfs/v1/~/pentaho-shim-test-file.test?op=DELETE"; public static final String PENTAHO_SHIM_TEST_FILE_PATH_CREATE = "/webhdfs/v1/~/pentaho-shim-test-file.test?op=CREATE"; private final HttpClientManager httpClientManager = HttpClientManager.getInstance(); public GatewayWriteToAndDeleteFromUsersHomeFolderTest( MessageGetterFactory messageGetterFactory, HadoopFileSystemLocator hadoopFileSystemLocator ) { super( messageGetterFactory, hadoopFileSystemLocator ); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); if ( !namedCluster.isUseGateway() ) { return super.runTest( objectUnderTest ); } else { String url = namedCluster.decodePassword( namedCluster.getGatewayUrl() ); String password = namedCluster.decodePassword( variables.environmentSubstitute( namedCluster.getGatewayPassword() ) ); String username = variables.environmentSubstitute( namedCluster.getGatewayUsername() ); URI uri = URI.create( url ); String hostname = uri.getHost(); int port = uri.getPort(); boolean ignoreSSL = variables.getBooleanValueOfVariable( "${KETTLE_KNOX_IGNORE_SSL}", false ); if ( StringUtils.isBlank( hostname ) ) { return new RuntimeTestResultSummaryImpl( new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( CONNECT_TEST_HOST_BLANK_DESC ), messageGetter.getMessage( CONNECT_TEST_HOST_BLANK_MESSAGE ) ) ); } boolean exists; try { exists = doesFileExists( url, username, password, port, ignoreSSL ); } catch ( IOException | NoSuchAlgorithmException | KeyManagementException e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CHECKING_IF_FILE_EXISTS_DESC ), messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CHECKING_IF_FILE_EXISTS_MESSAGE, CONNECT_FILE_SYSTEM_TEST_PATH, CONNECT_FILE_SYSTEM_TEST_PATH ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } if ( exists ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_FILE_EXISTS_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_FILE_EXISTS_MESSAGE, CONNECT_FILE_SYSTEM_TEST_PATH, CONNECT_FILE_SYSTEM_TEST_PATH ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } else { String fileLocationUrl; try { fileLocationUrl = createFile( url, username, password, port, ignoreSSL ); } catch ( IOException | NoSuchAlgorithmException | KeyManagementException e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CREATING_FILE_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CREATING_FILE_MESSAGE, CONNECT_FILE_SYSTEM_TEST_PATH, CONNECT_FILE_SYSTEM_TEST_PATH ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } RuntimeTestResultEntry writeExceptionEntry = null; try { if ( !appendContentToFile( fileLocationUrl, username, password, port, ignoreSSL ) ) { writeExceptionEntry = new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_TO_FILE_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_TO_FILE_MESSAGE, CONNECT_FILE_SYSTEM_TEST_PATH, CONNECT_FILE_SYSTEM_TEST_PATH ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ); } } catch ( IOException | NoSuchAlgorithmException | KeyManagementException e ) { writeExceptionEntry = new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_TO_FILE_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_TO_FILE_MESSAGE, CONNECT_FILE_SYSTEM_TEST_PATH, CONNECT_FILE_SYSTEM_TEST_PATH ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ); } try { if ( deleteFile( url, username, password, port, ignoreSSL ) ) { if ( writeExceptionEntry == null ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_SUCCESS_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_SUCCESS_MESSAGE, PENTAHO_SHIM_TEST_FILE_PATH ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } else { return new RuntimeTestResultSummaryImpl( writeExceptionEntry ); } } else { if ( writeExceptionEntry == null ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_UNABLE_TO_DELETE_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_UNABLE_TO_DELETE_MESSAGE, PENTAHO_SHIM_TEST_FILE_PATH, PENTAHO_SHIM_TEST_FILE_PATH ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } else { return new RuntimeTestResultSummaryImpl( writeExceptionEntry ); } } } catch ( IOException | NoSuchAlgorithmException | KeyManagementException e ) { RuntimeTestResultEntryImpl deleteExceptionEntry = new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_DELETING_FILE_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_DELETING_FILE_MESSAGE, PENTAHO_SHIM_TEST_FILE_PATH, PENTAHO_SHIM_TEST_FILE_PATH ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ); if ( writeExceptionEntry == null ) { return new RuntimeTestResultSummaryImpl( deleteExceptionEntry ); } else { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_DELETING_FILE_DESC ), messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_DELETING_FILE_MESSAGE ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ), Arrays.asList( writeExceptionEntry, deleteExceptionEntry ) ); } } } } } private boolean doesFileExists( String url, String user, String password, int port, boolean ignoreSSL ) throws NoSuchAlgorithmException, IOException, KeyManagementException { ServerResponse response = runServiceTest( RequestType.GET, URI.create( url + PENTAHO_SHIM_TEST_FILE_PATH ), user, password, port, ignoreSSL ); return response.getStatusCode() == 200; } private String createFile( String url, String user, String password, int port, boolean ignoreSSL ) throws NoSuchAlgorithmException, IOException, KeyManagementException { ServerResponse responseCreate = runServiceTest( RequestType.CREATE, URI.create( url + PENTAHO_SHIM_TEST_FILE_PATH_CREATE ), user, password, port, ignoreSSL ); if ( responseCreate.getStatusCode() == 307 ) { return responseCreate.getLocationHeader(); } else { return null; } } private boolean appendContentToFile( String location, String user, String password, int port, boolean ignoreSSL ) throws NoSuchAlgorithmException, IOException, KeyManagementException { ServerResponse responseWrite = runServiceTest( RequestType.APPEND, URI.create( location ), user, password, port, ignoreSSL ); return responseWrite.getStatusCode() == 201; } private boolean deleteFile( String url, String user, String password, int port, boolean ignoreSSL ) throws NoSuchAlgorithmException, IOException, KeyManagementException { ServerResponse response = runServiceTest( RequestType.DELETE, URI.create( url + PENTAHO_SHIM_TEST_FILE_PATH_DELETE ), user, password, port, ignoreSSL ); return response.getStatusCode() == 200; } private ServerResponse runServiceTest( RequestType requestType, URI uri, String user, String password, int port, boolean ignoreSSL ) throws NoSuchAlgorithmException, KeyManagementException, IOException { // Ignore ssl certificate issues if KETTLE_KNOX_IGNORE_SSL = true if ( ignoreSSL ) { SSLContext ctx = getTlsContext(); initContextWithTrustAll( ctx ); SSLContext.setDefault( ctx ); } HttpClientContext context = null; HttpUriRequest method = getHttpRequestMethod( requestType, uri ); CloseableHttpClient httpClient = null; try { if ( StringUtils.isNotBlank( user ) ) { httpClient = getHttpClient( user, password ); context = HttpClientUtil.createPreemptiveBasicAuthentication( uri.getHost(), port, user, password ); } else { httpClient = httpClientManager.createDefaultClient(); } HttpResponse httpResponse = context != null ? httpClient.execute( method, context ) : httpClient.execute( method ); Header locationHeader = httpResponse.getFirstHeader( "Location" ); return new ServerResponse( locationHeader != null ? locationHeader.getValue() : null, httpResponse.getStatusLine().getStatusCode() ); } finally { if ( httpClient != null ) { httpClient.close(); } } } private HttpUriRequest getHttpRequestMethod( RequestType requestType, URI uri ) throws UnsupportedEncodingException { if ( requestType == RequestType.GET ) { return new HttpGet( uri.toString() ); } else if ( requestType == RequestType.APPEND ) { HttpPut putMethod = new HttpPut( uri ); putMethod.setEntity( new StringEntity( HELLO_CLUSTER ) ); return putMethod; } else if ( requestType == RequestType.CREATE ) { return new HttpPut( uri ); } else if ( requestType == RequestType.DELETE ) { return new HttpDelete( uri ); } return null; } void initContextWithTrustAll( SSLContext ctx ) throws KeyManagementException { ctx.init( new KeyManager[ 0 ], new TrustManager[] { new X509TrustManager() { @Override public void checkClientTrusted( X509Certificate[] x509Certificates, String s ) { // Nothing to do } @Override public void checkServerTrusted( X509Certificate[] x509Certificates, String s ) { // Nothing to do } @Override public X509Certificate[] getAcceptedIssuers() { return new X509Certificate[ 0 ]; } } }, new SecureRandom() ); } SSLContext getTlsContext() throws NoSuchAlgorithmException { return SSLContext.getInstance( "TLS" ); } CloseableHttpClient getHttpClient( String user, String password ) { HttpClientManager.HttpClientBuilderFacade clientBuilder = httpClientManager.createBuilder(); clientBuilder.setCredentials( user, password ); return clientBuilder.build(); } enum RequestType { GET, APPEND, CREATE, DELETE } static class ServerResponse { private final String locationHeader; private final int statusCode; ServerResponse( String locationHeader, int statusCode ) { this.locationHeader = locationHeader; this.statusCode = statusCode; } public String getLocationHeader() { return locationHeader; } public int getStatusCode() { return statusCode; } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/hdfs/ListDirectoryTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.big.data.impl.cluster.tests.Constants; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.hadoop.shim.api.hdfs.exceptions.AccessControlException; import org.pentaho.di.core.Const; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileStatus; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemPath; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.BaseRuntimeTest; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; /** * Created by bryan on 8/14/15. */ public class ListDirectoryTest extends BaseRuntimeTest { public static final String LIST_DIRECTORY_TEST_COULDNT_GET_FILE_SYSTEM_DESC = "ListDirectoryTest.CouldntGetFileSystem.Desc"; public static final String LIST_DIRECTORY_TEST_COULDNT_GET_FILE_SYSTEM_MESSAGE = "ListDirectoryTest.CouldntGetFileSystem.Message"; public static final String LIST_DIRECTORY_TEST_SUCCESS_DESC = "ListDirectoryTest.Success.Desc"; public static final String LIST_DIRECTORY_TEST_SUCCESS_MESSAGE = "ListDirectoryTest.Success.Message"; public static final String LIST_DIRECTORY_TEST_ACCESS_CONTROL_EXCEPTION_DESC = "ListDirectoryTest.AccessControlException.Desc"; public static final String LIST_DIRECTORY_TEST_ACCESS_CONTROL_EXCEPTION_MESSAGE = "ListDirectoryTest.AccessControlException.Message"; public static final String LIST_DIRECTORY_TEST_ERROR_LISTING_DIRECTORY_DESC = "ListDirectoryTest.ErrorListingDirectory.Desc"; public static final String LIST_DIRECTORY_TEST_ERROR_LISTING_DIRECTORY_MESSAGE = "ListDirectoryTest.ErrorListingDirectory.Message"; public static final String LIST_DIRECTORY_TEST_ERROR_INITIALIZING_CLUSTER_DESC = "ListDirectoryTest.ErrorInitializingCluster.Desc"; public static final String LIST_DIRECTORY_TEST_ERROR_INITIALIZING_CLUSTER_MESSAGE = "ListDirectoryTest.ErrorInitializingCluster.Message"; private static final Class PKG = ListDirectoryTest.class; private final HadoopFileSystemLocator hadoopFileSystemLocator; private final String directory; protected final MessageGetterFactory messageGetterFactory; private final MessageGetter messageGetter; public ListDirectoryTest( MessageGetterFactory messageGetterFactory, HadoopFileSystemLocator hadoopFileSystemLocator, String directory, String id, String name ) { super( NamedCluster.class, Constants.HADOOP_FILE_SYSTEM, id, name, new HashSet<>( Arrays.asList( PingFileSystemEntryPointTest.HADOOP_FILE_SYSTEM_PING_FILE_SYSTEM_ENTRY_POINT_TEST ) ) ); this.hadoopFileSystemLocator = hadoopFileSystemLocator; this.directory = directory; this.messageGetterFactory = messageGetterFactory; this.messageGetter = messageGetterFactory.create( PKG ); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; try { HadoopFileSystem hadoopFilesystem = hadoopFileSystemLocator.getHadoopFilesystem( namedCluster ); if ( hadoopFilesystem == null ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( LIST_DIRECTORY_TEST_COULDNT_GET_FILE_SYSTEM_DESC ), messageGetter.getMessage( LIST_DIRECTORY_TEST_COULDNT_GET_FILE_SYSTEM_MESSAGE, directory ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } else { HadoopFileSystemPath hadoopFilesystemPath; if ( Const.isEmpty( directory ) ) { hadoopFilesystemPath = hadoopFilesystem.getHomeDirectory(); } else { hadoopFilesystemPath = hadoopFilesystem.getPath( directory ); } try { HadoopFileStatus[] hadoopFileStatuses = hadoopFilesystem.listStatus( hadoopFilesystemPath ); StringBuilder paths = new StringBuilder(); for ( HadoopFileStatus hadoopFileStatus : hadoopFileStatuses ) { paths.append( hadoopFileStatus.getPath() ); paths.append( ", " ); } if ( paths.length() > 0 ) { paths.setLength( paths.length() - 2 ); } return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( LIST_DIRECTORY_TEST_SUCCESS_DESC ), messageGetter.getMessage( LIST_DIRECTORY_TEST_SUCCESS_MESSAGE, paths.toString() ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } catch ( AccessControlException e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( LIST_DIRECTORY_TEST_ACCESS_CONTROL_EXCEPTION_DESC ), messageGetter .getMessage( LIST_DIRECTORY_TEST_ACCESS_CONTROL_EXCEPTION_MESSAGE, hadoopFilesystemPath.toString() ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } catch ( IOException e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( LIST_DIRECTORY_TEST_ERROR_LISTING_DIRECTORY_DESC ), messageGetter.getMessage( LIST_DIRECTORY_TEST_ERROR_LISTING_DIRECTORY_MESSAGE, hadoopFilesystemPath.toString() ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } } } catch ( ClusterInitializationException e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( LIST_DIRECTORY_TEST_ERROR_INITIALIZING_CLUSTER_DESC ), messageGetter.getMessage( LIST_DIRECTORY_TEST_ERROR_INITIALIZING_CLUSTER_MESSAGE, namedCluster.getName() ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/hdfs/ListHomeDirectoryTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.runtime.test.i18n.MessageGetterFactory; /** * Created by bryan on 8/14/15. */ public class ListHomeDirectoryTest extends ListDirectoryTest { public static final String HADOOP_FILE_SYSTEM_LIST_HOME_DIRECTORY_TEST = "hadoopFileSystemListHomeDirectoryTest"; public static final String LIST_HOME_DIRECTORY_TEST_NAME = "ListHomeDirectoryTest.Name"; private static final Class PKG = ListHomeDirectoryTest.class; public ListHomeDirectoryTest( MessageGetterFactory messageGetterFactory, HadoopFileSystemLocator hadoopFileSystemLocator ) { super( messageGetterFactory, hadoopFileSystemLocator, "", HADOOP_FILE_SYSTEM_LIST_HOME_DIRECTORY_TEST, messageGetterFactory.create( PKG ).getMessage( LIST_HOME_DIRECTORY_TEST_NAME ) ); } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/hdfs/ListRootDirectoryTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.runtime.test.i18n.MessageGetterFactory; /** * Created by bryan on 8/14/15. */ public class ListRootDirectoryTest extends ListDirectoryTest { public static final String HADOOP_FILE_SYSTEM_LIST_ROOT_DIRECTORY_TEST = "hadoopFileSystemListRootDirectoryTest"; public static final String LIST_ROOT_DIRECTORY_TEST_NAME = "ListRootDirectoryTest.Name"; private static final Class PKG = ListRootDirectoryTest.class; public ListRootDirectoryTest( MessageGetterFactory messageGetterFactory, HadoopFileSystemLocator hadoopFileSystemLocator ) { super( messageGetterFactory, hadoopFileSystemLocator, "/", HADOOP_FILE_SYSTEM_LIST_ROOT_DIRECTORY_TEST, messageGetterFactory.create( PKG ).getMessage( LIST_ROOT_DIRECTORY_TEST_NAME ) ); } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/hdfs/PingFileSystemEntryPointTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.big.data.impl.cluster.tests.Constants; import org.pentaho.di.core.variables.Variables; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.BaseRuntimeTest; import java.util.HashSet; /** * Created by bryan on 8/14/15. */ public class PingFileSystemEntryPointTest extends BaseRuntimeTest { public static final String HADOOP_FILE_SYSTEM_PING_FILE_SYSTEM_ENTRY_POINT_TEST = "_hadoopFileSystemPingFileSystemEntryPointTest"; public static final String PING_FILE_SYSTEM_ENTRY_POINT_TEST_NAME = "PingFileSystemEntryPointTest.Name"; private static final Class PKG = PingFileSystemEntryPointTest.class; public static final String PING_FILE_SYSTEM_ENTRY_POINT_TEST_IS_MAPR_DESC = "PingFileSystemEntryPointTest.isMapr.Desc"; public static final String PING_FILE_SYSTEM_ENTRY_POINT_TEST_IS_MAPR_MESSAGE = "PingFileSystemEntryPointTest.isMapr.Message"; protected final MessageGetterFactory messageGetterFactory; private final MessageGetter messageGetter; protected final ConnectivityTestFactory connectivityTestFactory; public PingFileSystemEntryPointTest( MessageGetterFactory messageGetterFactory, ConnectivityTestFactory connectivityTestFactory ) { super( NamedCluster.class, Constants.HADOOP_FILE_SYSTEM, HADOOP_FILE_SYSTEM_PING_FILE_SYSTEM_ENTRY_POINT_TEST, messageGetterFactory.create( PKG ).getMessage( PING_FILE_SYSTEM_ENTRY_POINT_TEST_NAME ), new HashSet() ); this.messageGetterFactory = messageGetterFactory; this.messageGetter = messageGetterFactory.create( PKG ); this.connectivityTestFactory = connectivityTestFactory; } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); // The connectivity test (ping the name node) is not applicable for MapR clusters due to their native client, so // just pass this test and move on if ( namedCluster.isMapr() ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( PING_FILE_SYSTEM_ENTRY_POINT_TEST_IS_MAPR_DESC ), messageGetter.getMessage( PING_FILE_SYSTEM_ENTRY_POINT_TEST_IS_MAPR_MESSAGE ), null ) ); } else { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, connectivityTestFactory.create( messageGetterFactory, variables.environmentSubstitute( namedCluster.getHdfsHost() ), variables.environmentSubstitute( namedCluster.getHdfsPort() ), true ).runTest(), ClusterRuntimeTestEntry.DocAnchor.CLUSTER_CONNECT ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/hdfs/WriteToAndDeleteFromUsersHomeFolderTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.big.data.impl.cluster.tests.Constants; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemPath; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.BaseRuntimeTest; import org.pentaho.runtime.test.test.impl.RuntimeTestResultEntryImpl; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.Charset; import java.util.Arrays; import java.util.HashSet; /** * Created by bryan on 8/14/15. */ public class WriteToAndDeleteFromUsersHomeFolderTest extends BaseRuntimeTest { public static final String HADOOP_FILE_SYSTEM_WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST = "hadoopFileSystemWriteToAndDeleteFromUsersHomeFolderTest"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_NAME = "WriteToAndDeleteFromUsersHomeFolderTest.Name"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_COULDNT_GET_FILE_SYSTEM_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.CouldntGetFileSystem.Desc"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_COULDNT_GET_FILE_SYSTEM_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.CouldntGetFileSystem.Message"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_FILE_EXISTS_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.FileExists.Desc"; public static final String PENTAHO_SHIM_TEST_FILE_TEST = "pentaho-shim-test-file.test"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_FILE_EXISTS_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.FileExists.Message"; public static final String HELLO_CLUSTER = "Hello, Cluster"; public static final Charset UTF8 = Charset.forName( "UTF-8" ); public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_SUCCESS_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.Success.Desc"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_SUCCESS_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.Success.Message"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_UNABLE_TO_DELETE_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.UnableToDelete.Desc"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_UNABLE_TO_DELETE_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.UnableToDelete.Message"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_INITIALIZING_CLUSTER_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorInitializingCluster.Desc"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_INITIALIZING_CLUSTER_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorInitializingCluster.Message"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CHECKING_IF_FILE_EXISTS_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorCheckingIfFileExists.Desc"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CHECKING_IF_FILE_EXISTS_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorCheckingIfFileExists.Message"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CREATING_FILE_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorCreatingFile.Desc"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CREATING_FILE_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorCreatingFile.Message"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_TO_FILE_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorWritingToFile.Desc"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_TO_FILE_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorWritingToFile.Message"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_DELETING_FILE_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorDeletingFile.Desc"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_DELETING_FILE_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorDeletingFile.Message"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_DELETING_FILE_DESC = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorWritingDeletingFile.Desc"; public static final String WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_DELETING_FILE_MESSAGE = "WriteToAndDeleteFromUsersHomeFolderTest.ErrorWritingDeletingFile.Message"; private static final Class PKG = WriteToAndDeleteFromUsersHomeFolderTest.class; private final HadoopFileSystemLocator hadoopFileSystemLocator; protected final MessageGetterFactory messageGetterFactory; protected final MessageGetter messageGetter; public WriteToAndDeleteFromUsersHomeFolderTest( MessageGetterFactory messageGetterFactory, HadoopFileSystemLocator hadoopFileSystemLocator ) { super( NamedCluster.class, Constants.HADOOP_FILE_SYSTEM, HADOOP_FILE_SYSTEM_WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST, messageGetterFactory.create( PKG ).getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_NAME ), new HashSet<>( Arrays.asList( ListHomeDirectoryTest.HADOOP_FILE_SYSTEM_LIST_HOME_DIRECTORY_TEST ) ) ); this.hadoopFileSystemLocator = hadoopFileSystemLocator; this.messageGetterFactory = messageGetterFactory; this.messageGetter = messageGetterFactory.create( PKG ); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; try { HadoopFileSystem hadoopFilesystem = hadoopFileSystemLocator.getHadoopFilesystem( namedCluster ); if ( hadoopFilesystem == null ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_COULDNT_GET_FILE_SYSTEM_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_COULDNT_GET_FILE_SYSTEM_MESSAGE, namedCluster.getName() ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } else { HadoopFileSystemPath path = hadoopFilesystem.getPath( PENTAHO_SHIM_TEST_FILE_TEST ); HadoopFileSystemPath qualifiedPath = hadoopFilesystem.makeQualified( path ); Boolean exists; try { exists = hadoopFilesystem.exists( path ); } catch ( IOException e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CHECKING_IF_FILE_EXISTS_DESC ), messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CHECKING_IF_FILE_EXISTS_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } if ( exists ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_FILE_EXISTS_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_FILE_EXISTS_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } else { OutputStream outputStream; try { outputStream = hadoopFilesystem.create( path ); } catch ( IOException e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CREATING_FILE_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CREATING_FILE_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } RuntimeTestResultEntry writeExceptionEntry = null; try { outputStream.write( HELLO_CLUSTER.getBytes( UTF8 ) ); } catch ( IOException e ) { writeExceptionEntry = new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_TO_FILE_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_TO_FILE_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ); } finally { try { outputStream.close(); } catch ( IOException e ) { //Ignore } } try { if ( hadoopFilesystem.delete( path, false ) ) { if ( writeExceptionEntry == null ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_SUCCESS_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_SUCCESS_MESSAGE, qualifiedPath.toString() ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } else { return new RuntimeTestResultSummaryImpl( writeExceptionEntry ); } } else { if ( writeExceptionEntry == null ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_UNABLE_TO_DELETE_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_UNABLE_TO_DELETE_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } else { return new RuntimeTestResultSummaryImpl( writeExceptionEntry ); } } } catch ( IOException e ) { RuntimeTestResultEntryImpl deleteExceptionEntry = new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_DELETING_FILE_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_DELETING_FILE_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ); if ( writeExceptionEntry == null ) { return new RuntimeTestResultSummaryImpl( deleteExceptionEntry ); } else { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_DELETING_FILE_DESC ), messageGetter .getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_DELETING_FILE_MESSAGE ), ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ), Arrays.asList( writeExceptionEntry, deleteExceptionEntry ) ); } } } } } catch ( ClusterInitializationException e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_INITIALIZING_CLUSTER_DESC ), messageGetter.getMessage( WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_INITIALIZING_CLUSTER_MESSAGE, namedCluster.getName() ), e, ClusterRuntimeTestEntry.DocAnchor.ACCESS_DIRECTORY ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/kafka/KafkaConnectTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.kafka; import org.apache.commons.lang.StringUtils; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.config.SaslConfigs; import org.apache.kafka.common.serialization.StringDeserializer; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.big.data.impl.cluster.tests.Constants; import org.pentaho.hadoop.shim.api.jaas.JaasConfigService; import org.pentaho.di.core.variables.Variables; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.BaseRuntimeTest; import org.pentaho.runtime.test.test.impl.RuntimeTestResultEntryImpl; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.Function; public class KafkaConnectTest extends BaseRuntimeTest { public static final String KAFKA_CONNECT_TEST = "KafkaConnectTest"; public static final String KAFKA_CONNECT_TEST_NAME = "KafkaConnectTest.Name"; public static final String KAFKA_CONNECT_TEST_MALFORMED_URL_DESC = "KafkaConnectTest.MalformedUrl.Desc"; public static final String KAFKA_CONNECT_TEST_MALFORMED_URL_MESSAGE = "KafkaConnectTest.MalformedUrl.Message"; public static final String KAFKA_CONNECT_TEST_SUCCESS_DESC = "KafkaConnectTest.Success.Desc"; public static final String KAFKA_CONNECT_TEST_SUCCESS_MESSAGE = "KafkaConnectTest.Success.Message"; public static final String KAFKA_CONNECT_TEST_EMPTY_DESC = "KafkaConnectTest.Empty.Desc"; public static final String KAFKA_CONNECT_TEST_EMPTY_MESSAGE = "KafkaConnectTest.Empty.Message"; private final MessageGetter messageGetter; Function, Consumer> consumerFunction; static final Class PKG = KafkaConnectTest.class; protected final MessageGetterFactory messageGetterFactory; private NamedClusterServiceLocator namedClusterServiceLocator; public KafkaConnectTest( MessageGetterFactory messageGetterFactory, NamedClusterServiceLocator namedClusterServiceLocator ) { this( messageGetterFactory, KafkaConsumer::new, namedClusterServiceLocator ); } KafkaConnectTest( MessageGetterFactory messageGetterFactory, Function, Consumer> consumerFunction, final NamedClusterServiceLocator namedClusterServiceLocator ) { super( NamedCluster.class, Constants.KAFKA, KAFKA_CONNECT_TEST, messageGetterFactory.create( PKG ).getMessage( KAFKA_CONNECT_TEST_NAME ), Collections.emptySet() ); this.messageGetterFactory = messageGetterFactory; this.namedClusterServiceLocator = namedClusterServiceLocator; messageGetter = messageGetterFactory.create( PKG ); this.consumerFunction = consumerFunction; } @Override public RuntimeTestResultSummary runTest( final Object objectUnderTest ) { NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String bootstrapServers = variables.environmentSubstitute( namedCluster.getKafkaBootstrapServers() ); if ( StringUtils.isBlank( bootstrapServers ) ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.SKIPPED, messageGetter.getMessage( KAFKA_CONNECT_TEST_EMPTY_DESC ), messageGetter.getMessage( KAFKA_CONNECT_TEST_EMPTY_MESSAGE ) ), ClusterRuntimeTestEntry.DocAnchor.KAFKA ) ); } HashMap configs = new HashMap<>(); configs.put( ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers ); configs.put( ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class ); configs.put( ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class ); configs.put( ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, 10000 ); configs.put( ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 9000 ); try { JaasConfigService jaasConfigService = namedClusterServiceLocator.getService( namedCluster, JaasConfigService.class ); if ( jaasConfigService != null ) { if ( jaasConfigService.isKerberos() ) { configs.put( SaslConfigs.SASL_JAAS_CONFIG, jaasConfigService.getJaasConfig() ); configs.put( CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT" ); } } } catch ( ClusterInitializationException e ) { //ok, try and connect anyway. If kafka requires kerberos we'll still get an error } try ( Consumer consumer = consumerFunction.apply( configs ) ) { consumer.listTopics(); return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( KAFKA_CONNECT_TEST_SUCCESS_DESC ), messageGetter.getMessage( KAFKA_CONNECT_TEST_SUCCESS_MESSAGE ) ), ClusterRuntimeTestEntry.DocAnchor.KAFKA ) ); } catch ( Exception e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, new RuntimeTestResultEntryImpl( RuntimeTestEntrySeverity.ERROR, messageGetter.getMessage( KAFKA_CONNECT_TEST_MALFORMED_URL_DESC ), messageGetter.getMessage( KAFKA_CONNECT_TEST_MALFORMED_URL_MESSAGE, bootstrapServers ) ), ClusterRuntimeTestEntry.DocAnchor.KAFKA ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/mr/GatewayPingJobTrackerTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.mr; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.di.core.variables.Variables; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; /** * Created by dstepanov on 27/04/17. */ public class GatewayPingJobTrackerTest extends PingJobTrackerTest { private static final String TEST_PATH = "/resourcemanager/v1/cluster/info"; public GatewayPingJobTrackerTest( MessageGetterFactory messageGetterFactory, ConnectivityTestFactory connectivityTestFactory ) { super( messageGetterFactory, connectivityTestFactory ); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); if ( !namedCluster.isUseGateway() ) { return super.runTest( objectUnderTest ); } else { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, connectivityTestFactory.create( messageGetterFactory, variables.environmentSubstitute( namedCluster.decodePassword( namedCluster.getGatewayUrl() ) ), TEST_PATH, variables.environmentSubstitute( namedCluster.getGatewayUsername() ), variables.environmentSubstitute( namedCluster.decodePassword( namedCluster.getGatewayPassword() ) ) ) .runTest(), ClusterRuntimeTestEntry.DocAnchor.CLUSTER_CONNECT_GATEWAY ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/mr/PingJobTrackerTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.mr; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.big.data.impl.cluster.tests.Constants; import org.pentaho.di.core.variables.Variables; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.BaseRuntimeTest; import java.util.HashSet; /** * Created by bryan on 8/14/15. */ public class PingJobTrackerTest extends BaseRuntimeTest { public static final String JOB_TRACKER_PING_JOB_TRACKER_TEST = "jobTrackerPingJobTrackerTest"; public static final String PING_JOB_TRACKER_TEST_NAME = "PingJobTrackerTest.Name"; private static final Class PKG = PingJobTrackerTest.class; protected final MessageGetterFactory messageGetterFactory; private final MessageGetter messageGetter; protected final ConnectivityTestFactory connectivityTestFactory; public PingJobTrackerTest( MessageGetterFactory messageGetterFactory, ConnectivityTestFactory connectivityTestFactory ) { super( NamedCluster.class, Constants.MAP_REDUCE, JOB_TRACKER_PING_JOB_TRACKER_TEST, messageGetterFactory.create( PKG ).getMessage( PING_JOB_TRACKER_TEST_NAME ), new HashSet() ); this.messageGetterFactory = messageGetterFactory; this.messageGetter = messageGetterFactory.create( PKG ); this.connectivityTestFactory = connectivityTestFactory; } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); // The connectivity test (ping the name node) is not applicable for MapR clusters due to their native client, so // just pass this test and move on if ( namedCluster.isMapr() ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( "PingJobTrackerTest.isMapr.Desc" ), messageGetter.getMessage( "PingJobTrackerTest.isMapr.Message" ), null ) ); } else { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, connectivityTestFactory .create( messageGetterFactory, variables.environmentSubstitute( namedCluster.getJobTrackerHost() ), variables.environmentSubstitute( namedCluster.getJobTrackerPort() ), true ) .runTest(), ClusterRuntimeTestEntry.DocAnchor.CLUSTER_CONNECT ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/oozie/GatewayPingOozieHostTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.oozie; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.di.core.variables.Variables; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; /** * Created by dstepanov on 27/04/17. */ public class GatewayPingOozieHostTest extends PingOozieHostTest { private static final String TEST_PATH = "/oozie/v1/admin/status"; public GatewayPingOozieHostTest( MessageGetterFactory messageGetterFactory, ConnectivityTestFactory connectivityTestFactory ) { super( messageGetterFactory, connectivityTestFactory ); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); if ( !namedCluster.isUseGateway() ) { return super.runTest( objectUnderTest ); } else { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, connectivityTestFactory.create( messageGetterFactory, variables.environmentSubstitute( namedCluster.decodePassword( namedCluster.getGatewayUrl() ) ), TEST_PATH, variables.environmentSubstitute( namedCluster.getGatewayUsername() ), variables.environmentSubstitute( namedCluster.decodePassword( namedCluster.getGatewayPassword() ) ) ) .runTest(), ClusterRuntimeTestEntry.DocAnchor.OOZIE ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/oozie/PingOozieHostTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.oozie; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.big.data.impl.cluster.tests.Constants; import org.pentaho.di.core.variables.Variables; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.BaseRuntimeTest; import java.net.MalformedURLException; import java.net.URL; import java.util.HashSet; /** * Created by bryan on 8/14/15. */ public class PingOozieHostTest extends BaseRuntimeTest { public static final String OOZIE_PING_OOZIE_HOST_TEST = "ooziePingOozieHostTest"; public static final String PING_OOZIE_HOST_TEST_NAME = "PingOozieHostTest.Name"; public static final String PING_OOZIE_HOST_TEST_MALFORMED_URL_DESC = "PingOozieHostTest.MalformedUrl.Desc"; public static final String PING_OOZIE_HOST_TEST_MALFORMED_URL_MESSAGE = "PingOozieHostTest.MalformedUrl.Message"; private static final Class PKG = PingOozieHostTest.class; protected final MessageGetterFactory messageGetterFactory; protected final ConnectivityTestFactory connectivityTestFactory; private final MessageGetter messageGetter; public PingOozieHostTest( MessageGetterFactory messageGetterFactory, ConnectivityTestFactory connectivityTestFactory ) { super( NamedCluster.class, Constants.OOZIE, OOZIE_PING_OOZIE_HOST_TEST, messageGetterFactory.create( PKG ).getMessage( PING_OOZIE_HOST_TEST_NAME ), new HashSet() ); this.messageGetterFactory = messageGetterFactory; this.messageGetter = messageGetterFactory.create( PKG ); this.connectivityTestFactory = connectivityTestFactory; } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String oozieUrl = variables.environmentSubstitute( namedCluster.getOozieUrl() ); try { URL url = new URL( oozieUrl ); return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, connectivityTestFactory.create( messageGetterFactory, url.getHost(), String.valueOf( url.getPort() ), false ).runTest(), ClusterRuntimeTestEntry.DocAnchor.OOZIE ) ); } catch ( MalformedURLException e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( PING_OOZIE_HOST_TEST_MALFORMED_URL_DESC ), messageGetter.getMessage( PING_OOZIE_HOST_TEST_MALFORMED_URL_MESSAGE, oozieUrl ), e, ClusterRuntimeTestEntry.DocAnchor.OOZIE ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/zookeeper/GatewayPingZookeeperEnsembleTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.zookeeper; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.di.core.variables.Variables; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; /** * Created by dstepanov on 27/04/17. */ public class GatewayPingZookeeperEnsembleTest extends PingZookeeperEnsembleTest { public static final String GATEWAY_PING_ZOOKEEPER_NOT_SUPPORT_DESC = "GatewayPingZookeeperEnsembleTest.ZookeeperNotSupport.Desc"; public static final String GATEWAY_PING_ZOOKEEPER_NOT_SUPPORT_MESSAGE = "GatewayPingZookeeperEnsembleTest.ZookeeperNotSupport.Message"; public GatewayPingZookeeperEnsembleTest( MessageGetterFactory messageGetterFactory, ConnectivityTestFactory connectivityTestFactory ) { super( messageGetterFactory, connectivityTestFactory ); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); if ( !namedCluster.isUseGateway() ) { return super.runTest( objectUnderTest ); } else { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( RuntimeTestEntrySeverity.SKIPPED, messageGetter.getMessage( GATEWAY_PING_ZOOKEEPER_NOT_SUPPORT_DESC ), messageGetter.getMessage( GATEWAY_PING_ZOOKEEPER_NOT_SUPPORT_MESSAGE ), null ) ); } } } ================================================ FILE: impl/clusterTests/src/main/java/org/pentaho/big/data/impl/cluster/tests/zookeeper/PingZookeeperEnsembleTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.zookeeper; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.big.data.impl.cluster.tests.Constants; import org.pentaho.di.core.Const; import org.pentaho.di.core.variables.Variables; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.BaseRuntimeTest; import org.pentaho.runtime.test.test.impl.RuntimeTestResultEntryImpl; import java.util.ArrayList; import java.util.HashSet; import java.util.List; /** * Created by bryan on 8/14/15. */ public class PingZookeeperEnsembleTest extends BaseRuntimeTest { public static final String HADOOP_FILE_SYSTEM_PING_FILE_SYSTEM_ENTRY_POINT_TEST = "zookeeperPingZookeeperEnsembleTest"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_NAME = "PingZookeeperEnsembleTest.Name"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_HOST_DESC = "PingZookeeperEnsembleTest.BlankHost.Desc"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_HOST_MESSAGE = "PingZookeeperEnsembleTest.BlankHost.Message"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_PORT_DESC = "PingZookeeperEnsembleTest.BlankPort.Desc"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_PORT_MESSAGE = "PingZookeeperEnsembleTest.BlankPort.Message"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_NO_NODES_SUCCEEDED_DESC = "PingZookeeperEnsembleTest.NoNodesSucceeded.Desc"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_NO_NODES_SUCCEEDED_MESSAGE = "PingZookeeperEnsembleTest.NoNodesSucceeded.Message"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_SOME_NODES_FAILED_DESC = "PingZookeeperEnsembleTest.SomeNodesFailed.Desc"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_SOME_NODES_FAILED_MESSAGE = "PingZookeeperEnsembleTest.SomeNodesFailed.Message"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_ALL_NODES_SUCCEEDED_DESC = "PingZookeeperEnsembleTest.AllNodesSucceeded.Desc"; public static final String PING_ZOOKEEPER_ENSEMBLE_TEST_ALL_NODES_SUCCEEDED_MESSAGE = "PingZookeeperEnsembleTest.AllNodesSucceeded.Message"; private static final Class PKG = PingZookeeperEnsembleTest.class; private final MessageGetterFactory messageGetterFactory; protected final MessageGetter messageGetter; private final ConnectivityTestFactory connectivityTestFactory; public PingZookeeperEnsembleTest( MessageGetterFactory messageGetterFactory, ConnectivityTestFactory connectivityTestFactory ) { super( NamedCluster.class, Constants.ZOOKEEPER, HADOOP_FILE_SYSTEM_PING_FILE_SYSTEM_ENTRY_POINT_TEST, messageGetterFactory.create( PKG ).getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_NAME ), new HashSet() ); this.messageGetterFactory = messageGetterFactory; this.connectivityTestFactory = connectivityTestFactory; messageGetter = messageGetterFactory.create( PKG ); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { // Safe to cast as our accepts method will only return true for named clusters NamedCluster namedCluster = (NamedCluster) objectUnderTest; // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String zooKeeperHost = variables.environmentSubstitute( namedCluster.getZooKeeperHost() ); String zooKeeperPort = variables.environmentSubstitute( namedCluster.getZooKeeperPort() ); if ( Const.isEmpty( zooKeeperHost ) ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_HOST_DESC ), messageGetter.getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_HOST_MESSAGE ), ClusterRuntimeTestEntry.DocAnchor.ZOOKEEPER ) ); } else if ( Const.isEmpty( zooKeeperPort ) ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_PORT_DESC ), messageGetter.getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_PORT_MESSAGE ), ClusterRuntimeTestEntry.DocAnchor.ZOOKEEPER ) ); } else { String[] quorum = zooKeeperHost.split( "," ); List clusterTestResultEntries = new ArrayList<>(); int failedNodes = 0; StringBuilder failedNodeString = new StringBuilder(); for ( String node : quorum ) { RuntimeTestResultEntry nodeResults = new ClusterRuntimeTestEntry( messageGetterFactory, connectivityTestFactory .create( messageGetterFactory, node, zooKeeperPort, false, RuntimeTestEntrySeverity.WARNING ).runTest(), ClusterRuntimeTestEntry.DocAnchor.ZOOKEEPER ); if ( nodeResults.getSeverity() == RuntimeTestEntrySeverity.WARNING ) { failedNodeString.append( node ).append( ", " ); failedNodes++; } clusterTestResultEntries.add( nodeResults ); } if ( failedNodes > 0 ) { failedNodeString.setLength( failedNodeString.length() - 2 ); } RuntimeTestResultEntryImpl overallResult; if ( failedNodes == quorum.length ) { overallResult = new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_NO_NODES_SUCCEEDED_DESC ), messageGetter .getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_NO_NODES_SUCCEEDED_MESSAGE, failedNodeString.toString() ), ClusterRuntimeTestEntry.DocAnchor.ZOOKEEPER ); } else if ( failedNodes > 0 ) { overallResult = new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_SOME_NODES_FAILED_DESC ), messageGetter .getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_SOME_NODES_FAILED_MESSAGE, failedNodeString.toString() ), ClusterRuntimeTestEntry.DocAnchor.ZOOKEEPER ); } else { overallResult = new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_ALL_NODES_SUCCEEDED_DESC ), messageGetter.getMessage( PING_ZOOKEEPER_ENSEMBLE_TEST_ALL_NODES_SUCCEEDED_MESSAGE ), ClusterRuntimeTestEntry.DocAnchor.ZOOKEEPER ); } return new RuntimeTestResultSummaryImpl( overallResult, clusterTestResultEntries ); } } } ================================================ FILE: impl/clusterTests/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: impl/clusterTests/src/main/resources/org/pentaho/big/data/impl/cluster/tests/hdfs/messages/messages_en_US.properties ================================================ ListDirectoryTest.CouldntGetFileSystem.Desc=Unable to access the directory. ListDirectoryTest.CouldntGetFileSystem.Message=Could not access this directory: {0}. ListDirectoryTest.Success.Desc=Successfully read directory contents. ListDirectoryTest.Success.Message=Successfully read the contents of {0} directory. ListDirectoryTest.AccessControlException.Desc=User does not have permission to read directory contents. ListDirectoryTest.AccessControlException.Message=Could not read the contents of {0} directory. User does not have read permission for this directory. ListDirectoryTest.ErrorListingDirectory.Desc=Could not read directory contents. ListDirectoryTest.ErrorListingDirectory.Message=Could not read the contents of {0} directory. ListDirectoryTest.ErrorInitializingCluster.Desc=Could not initialize the cluster. ListDirectoryTest.ErrorInitializingCluster.Message=Unable to initialize this cluster: {0}. Verify that the shim is configured properly. PingFileSystemEntryPointTest.Name=Hadoop File System Connection PingFileSystemEntryPointTest.isMapr.Desc=Test not applicable for MapR clusters PingFileSystemEntryPointTest.isMapr.Message=The Namenode connectivity test is not applicable to MapR clusters as they use a native client to connect. ListRootDirectoryTest.Name=Root Directory Access ListHomeDirectoryTest.Name=User Home Directory Access WriteToAndDeleteFromUsersHomeFolderTest.Name=Verify User Home Permissions WriteToAndDeleteFromUsersHomeFolderTest.CouldntGetFileSystem.Desc=File system not found. WriteToAndDeleteFromUsersHomeFolderTest.CouldntGetFileSystem.Message=We cannot access the file system on this cluster: {0}. Verify the path to the user home directory on the cluster. WriteToAndDeleteFromUsersHomeFolderTest.FileExists.Desc=Test file already exists in the users home directory. WriteToAndDeleteFromUsersHomeFolderTest.FileExists.Message={0} already exists in the {1} directory. WriteToAndDeleteFromUsersHomeFolderTest.Success.Desc=User has write and delete permissions for their home directory. WriteToAndDeleteFromUsersHomeFolderTest.Success.Message=User has write and delete permissions for {0} directory. WriteToAndDeleteFromUsersHomeFolderTest.UnableToDelete.Desc=Unable to delete the test file from users home directory. WriteToAndDeleteFromUsersHomeFolderTest.UnableToDelete.Message=Unable to delete {0} from {1} directory. WriteToAndDeleteFromUsersHomeFolderTest.ErrorCheckingIfFileExists.Desc=Unable to find the test file in the users home directory. WriteToAndDeleteFromUsersHomeFolderTest.ErrorCheckingIfFileExists.Message=Unable to find {0} in the {1} directory. WriteToAndDeleteFromUsersHomeFolderTest.ErrorInitializingCluster.Desc=Unable to initialize the cluster. WriteToAndDeleteFromUsersHomeFolderTest.ErrorInitializingCluster.Message=Unable to initialize the cluster {0}. Verify the shim was configured correctly. WriteToAndDeleteFromUsersHomeFolderTest.ErrorCreatingFile.Desc=Unable to create the test file in the users home directory. WriteToAndDeleteFromUsersHomeFolderTest.ErrorCreatingFile.Message=Could not create {0} in the {1} directory. Verify that you have permission to create a file in the directory. WriteToAndDeleteFromUsersHomeFolderTest.ErrorWritingToFile.Desc=Could not write to the test file in the users home directory. WriteToAndDeleteFromUsersHomeFolderTest.ErrorWritingToFile.Message=Could not write to {0} in the {1} directory. Verify that you have permission to write to a file in the directory. WriteToAndDeleteFromUsersHomeFolderTest.ErrorDeletingFile.Desc=Could not delete test file. WriteToAndDeleteFromUsersHomeFolderTest.ErrorDeletingFile.Message=Could not delete {0} from the {1} directory. Verify that you have permission to delete a file from the directory. ================================================ FILE: impl/clusterTests/src/main/resources/org/pentaho/big/data/impl/cluster/tests/kafka/messages/messages_en_US.properties ================================================ KafkaConnectTest.Name=Kafka Connection KafkaConnectTest.MalformedUrl.Desc=Unable to connect to Kafka. KafkaConnectTest.MalformedUrl.Message=We are unable to connect to Kafka at {0}. Please verify the Kafka Bootstrap URL and network access. KafkaConnectTest.Success.Desc=Successfully connected to Kafka. KafkaConnectTest.Success.Message=Successfully connected to Kafka. KafkaConnectTest.Empty.Desc=This test was skipped because Bootstrap server field is empty. KafkaConnectTest.Empty.Message=This test was skipped because Bootstrap server field is empty. ================================================ FILE: impl/clusterTests/src/main/resources/org/pentaho/big/data/impl/cluster/tests/messages/messages_en_US.properties ================================================ RuntimeTestResultEntryWithDefaultShimHelp.TroubleshootingGuide=Learn more RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc=Setup/Administration/Troubleshooting/Big_Data_Issues RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.General= RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.ShimLoad=#Shim_and_Configuration_Issues RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.ClusterConnect=#Connection_Problems RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.ClusterConnectGateway=#Connection_Problems RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.AccessDirectory=#Directory_Access_or_Permissions_Issues RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.Oozie=#Oozie_Issues RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.Zookeeper=#Zookeeper_Problems RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Anchor.Kafka=#Kafka_Problems RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Title=Hadoop Cluster Test RuntimeTestResultEntryWithDefaultShimHelp.Shell.Doc.Header=Hadoop Cluster Test details ================================================ FILE: impl/clusterTests/src/main/resources/org/pentaho/big/data/impl/cluster/tests/mr/messages/messages_en_US.properties ================================================ PingJobTrackerTest.Name=Ping Job Tracker / Resource Manager PingJobTrackerTest.isMapr.Desc=Test not applicable for MapR clusters PingJobTrackerTest.isMapr.Message=The JobTracker / ResourceManager connectivity test is not applicable to MapR clusters as they use a native client to connect. ================================================ FILE: impl/clusterTests/src/main/resources/org/pentaho/big/data/impl/cluster/tests/oozie/messages/messages_en_US.properties ================================================ PingOozieHostTest.Name=Oozie Host Connection PingOozieHostTest.MalformedUrl.Desc=Unable to connect to Oozie. PingOozieHostTest.MalformedUrl.Message=We are unable to connect to Oozie at {0}. Please verify the Oozie URL and network access. ================================================ FILE: impl/clusterTests/src/main/resources/org/pentaho/big/data/impl/cluster/tests/zookeeper/messages/messages_en_US.properties ================================================ PingZookeeperEnsembleTest.Name=Zookeeper Ensemble Connection PingZookeeperEnsembleTest.BlankHost.Desc=One or more Zookeeper hostnames were not set. PingZookeeperEnsembleTest.BlankHost.Message=Please specify at least one Zookeeper hostname. Separate multiple hostnames with a comma. PingZookeeperEnsembleTest.BlankPort.Desc=The Zookeeper port was not set. PingZookeeperEnsembleTest.BlankPort.Message=Please specify the Zookeeper port. PingZookeeperEnsembleTest.NoNodesSucceeded.Desc=Unable to connect to the Zookeeper Ensemble. PingZookeeperEnsembleTest.NoNodesSucceeded.Message=Unable to connect to any Zookeeper host. We tried to connect to these hosts: {0}. Please verify Zookeeper settings. PingZookeeperEnsembleTest.AllNodesSucceeded.Desc=Connected to all Zookeeper nodes. PingZookeeperEnsembleTest.AllNodesSucceeded.Message=Successfully connected to all Zookeeper nodes for all ports. PingZookeeperEnsembleTest.SomeNodesFailed.Desc=Unable to connect to all Zookeper nodes. PingZookeeperEnsembleTest.SomeNodesFailed.Message=Unable to connect to the following Zookeeper nodes: {0}. Please verify Zookeeper settings. GatewayPingZookeeperEnsembleTest.ZookeeperNotSupport.Desc=Access to zookeeper services is not supported with knox. GatewayPingZookeeperEnsembleTest.ZookeeperNotSupport.Message=Access to zookeeper services is not supported with knox. ================================================ FILE: impl/clusterTests/src/test/java/org/pentaho/big/data/impl/cluster/tests/hdfs/ListDirectoryTestTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.hadoop.shim.api.hdfs.exceptions.AccessControlException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileStatus; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemPath; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import java.io.IOException; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.pentaho.runtime.test.RuntimeTestEntryUtil.verifyRuntimeTestResultEntry; /** * Created by bryan on 8/21/15. */ public class ListDirectoryTestTest { private TestMessageGetterFactory testMessageGetterFactory; private MessageGetter messageGetter; private HadoopFileSystemLocator hadoopFileSystemLocator; private String directory; private String id; private String name; private ListDirectoryTest listDirectoryTest; private NamedCluster namedCluster; private HadoopFileSystem hadoopFileSystem; private String namedClusterName; private HadoopFileSystemPath directoryPath; private HadoopFileSystemPath homeDirectoryPath; @Before public void setup() throws ClusterInitializationException { testMessageGetterFactory = new TestMessageGetterFactory(); messageGetter = testMessageGetterFactory.create( ListDirectoryTest.class ); hadoopFileSystemLocator = mock( HadoopFileSystemLocator.class ); directory = "directory"; id = "id"; name = "name"; namedCluster = mock( NamedCluster.class ); namedClusterName = "namedCluster"; when( namedCluster.getName() ).thenReturn( namedClusterName ); hadoopFileSystem = mock( HadoopFileSystem.class ); directoryPath = mock( HadoopFileSystemPath.class ); when( hadoopFileSystem.getPath( directory ) ).thenReturn( directoryPath ); homeDirectoryPath = mock( HadoopFileSystemPath.class ); when( hadoopFileSystem.getHomeDirectory() ).thenReturn( homeDirectoryPath ); when( hadoopFileSystemLocator.getHadoopFilesystem( namedCluster ) ).thenReturn( hadoopFileSystem ); init(); } private void init() { listDirectoryTest = new ListDirectoryTest( testMessageGetterFactory, hadoopFileSystemLocator, directory, id, name ); } @Test public void testNullHadoopFileSystem() { hadoopFileSystemLocator = mock( HadoopFileSystemLocator.class ); init(); RuntimeTestResultSummary runtimeTestResultSummary = listDirectoryTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( ListDirectoryTest.LIST_DIRECTORY_TEST_COULDNT_GET_FILE_SYSTEM_DESC ), messageGetter .getMessage( ListDirectoryTest.LIST_DIRECTORY_TEST_COULDNT_GET_FILE_SYSTEM_MESSAGE, directory ) ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testClusterInitializationException() throws ClusterInitializationException { hadoopFileSystemLocator = mock( HadoopFileSystemLocator.class ); when( hadoopFileSystemLocator.getHadoopFilesystem( namedCluster ) ) .thenThrow( new ClusterInitializationException( null ) ); init(); RuntimeTestResultSummary runtimeTestResultSummary = listDirectoryTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( ListDirectoryTest.LIST_DIRECTORY_TEST_ERROR_INITIALIZING_CLUSTER_DESC ), messageGetter .getMessage( ListDirectoryTest.LIST_DIRECTORY_TEST_ERROR_INITIALIZING_CLUSTER_MESSAGE, namedClusterName ), ClusterInitializationException.class ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testAccessControlException() throws IOException { when( hadoopFileSystem.listStatus( directoryPath ) ).thenThrow( new AccessControlException( null, null ) ); RuntimeTestResultSummary runtimeTestResultSummary = listDirectoryTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( ListDirectoryTest.LIST_DIRECTORY_TEST_ACCESS_CONTROL_EXCEPTION_DESC ), messageGetter .getMessage( ListDirectoryTest.LIST_DIRECTORY_TEST_ACCESS_CONTROL_EXCEPTION_MESSAGE, directoryPath.toString() ), AccessControlException.class ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testIOException() throws IOException { when( hadoopFileSystem.listStatus( directoryPath ) ).thenThrow( new IOException() ); RuntimeTestResultSummary runtimeTestResultSummary = listDirectoryTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( ListDirectoryTest.LIST_DIRECTORY_TEST_ERROR_LISTING_DIRECTORY_DESC ), messageGetter .getMessage( ListDirectoryTest.LIST_DIRECTORY_TEST_ERROR_LISTING_DIRECTORY_MESSAGE, directoryPath.toString() ), IOException.class ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testHomeDirectorySuccess() throws IOException { directory = ""; HadoopFileStatus hadoopFileStatus1 = mock( HadoopFileStatus.class ); HadoopFileStatus hadoopFileStatus2 = mock( HadoopFileStatus.class ); HadoopFileSystemPath hadoopFileSystemPath1 = mock( HadoopFileSystemPath.class ); HadoopFileSystemPath hadoopFileSystemPath2 = mock( HadoopFileSystemPath.class ); when( hadoopFileStatus1.getPath() ).thenReturn( hadoopFileSystemPath1 ); when( hadoopFileStatus2.getPath() ).thenReturn( hadoopFileSystemPath2 ); HadoopFileStatus[] hadoopFileStatuses = { hadoopFileStatus1, hadoopFileStatus2 }; when( hadoopFileSystem.listStatus( homeDirectoryPath ) ).thenReturn( hadoopFileStatuses ); init(); RuntimeTestResultSummary runtimeTestResultSummary = listDirectoryTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( ListDirectoryTest.LIST_DIRECTORY_TEST_SUCCESS_DESC ), messageGetter .getMessage( ListDirectoryTest.LIST_DIRECTORY_TEST_SUCCESS_MESSAGE, hadoopFileSystemPath1.toString() + ", " + hadoopFileSystemPath2.toString() ) ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } } ================================================ FILE: impl/clusterTests/src/test/java/org/pentaho/big/data/impl/cluster/tests/hdfs/ListHomeDirectoryTestTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; /** * Created by bryan on 8/24/15. */ public class ListHomeDirectoryTestTest { private MessageGetterFactory messageGetterFactory; private HadoopFileSystemLocator hadoopFileSystemLocator; private ListHomeDirectoryTest listHomeDirectoryTest; private MessageGetter messageGetter; @Before public void setup() { messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( ListHomeDirectoryTest.class ); hadoopFileSystemLocator = mock( HadoopFileSystemLocator.class ); listHomeDirectoryTest = new ListHomeDirectoryTest( messageGetterFactory, hadoopFileSystemLocator ); } @Test public void testGetName() { assertEquals( messageGetter.getMessage( ListHomeDirectoryTest.LIST_HOME_DIRECTORY_TEST_NAME ), listHomeDirectoryTest.getName() ); } } ================================================ FILE: impl/clusterTests/src/test/java/org/pentaho/big/data/impl/cluster/tests/hdfs/ListRootDirectoryTestTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; /** * Created by bryan on 8/24/15. */ public class ListRootDirectoryTestTest { private MessageGetterFactory messageGetterFactory; private HadoopFileSystemLocator hadoopFileSystemLocator; private ListRootDirectoryTest listRootDirectoryTest; private MessageGetter messageGetter; @Before public void setup() { messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( ListRootDirectoryTest.class ); hadoopFileSystemLocator = mock( HadoopFileSystemLocator.class ); listRootDirectoryTest = new ListRootDirectoryTest( messageGetterFactory, hadoopFileSystemLocator ); } @Test public void testGetName() { assertEquals( messageGetter.getMessage( ListRootDirectoryTest.LIST_ROOT_DIRECTORY_TEST_NAME ), listRootDirectoryTest.getName() ); } } ================================================ FILE: impl/clusterTests/src/test/java/org/pentaho/big/data/impl/cluster/tests/hdfs/PingFileSystemEntryPointTestTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTest; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Created by bryan on 8/24/15. */ public class PingFileSystemEntryPointTestTest { private MessageGetterFactory messageGetterFactory; private ConnectivityTestFactory connectivityTestFactory; private PingFileSystemEntryPointTest fileSystemEntryPointTest; private NamedCluster namedCluster; private MessageGetter messageGetter; private String hdfsHost; private String hdfsPort; @Before public void setup() { messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( PingFileSystemEntryPointTest.class ); connectivityTestFactory = mock( ConnectivityTestFactory.class ); fileSystemEntryPointTest = new PingFileSystemEntryPointTest( messageGetterFactory, connectivityTestFactory ); hdfsHost = "hdfsHost"; hdfsPort = "8025"; namedCluster = mock( NamedCluster.class ); when( namedCluster.getHdfsHost() ).thenReturn( hdfsHost ); when( namedCluster.getHdfsPort() ).thenReturn( hdfsPort ); when( namedCluster.isMapr() ).thenReturn( false ); } @Test public void testGetName() { assertEquals( messageGetter.getMessage( PingFileSystemEntryPointTest.PING_FILE_SYSTEM_ENTRY_POINT_TEST_NAME ), fileSystemEntryPointTest.getName() ); } @Test public void testSuccess() { RuntimeTestResultEntry results = mock( RuntimeTestResultEntry.class ); String testDescription = "test-description"; when( results.getDescription() ).thenReturn( testDescription ); ConnectivityTest connectivityTest = mock( ConnectivityTest.class ); when( connectivityTestFactory.create( messageGetterFactory, hdfsHost, hdfsPort, true ) ) .thenReturn( connectivityTest ); when( connectivityTest.runTest() ).thenReturn( results ); RuntimeTestResultSummary runtimeTestResultSummary = fileSystemEntryPointTest.runTest( namedCluster ); assertEquals( testDescription, runtimeTestResultSummary.getOverallStatusEntry().getDescription() ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testIsMapR() { when( namedCluster.isMapr() ).thenReturn( true ); RuntimeTestResultSummary runtimeTestResultSummary = fileSystemEntryPointTest.runTest( namedCluster ); RuntimeTestResultEntry results = runtimeTestResultSummary.getOverallStatusEntry(); assertEquals( RuntimeTestEntrySeverity.INFO, results.getSeverity() ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } } ================================================ FILE: impl/clusterTests/src/test/java/org/pentaho/big/data/impl/cluster/tests/hdfs/WriteToAndDeleteFromUsersHomeFolderTestTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.hdfs; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemPath; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.isA; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.pentaho.runtime.test.RuntimeTestEntryUtil.verifyRuntimeTestResultEntry; /** * Created by bryan on 8/21/15. */ public class WriteToAndDeleteFromUsersHomeFolderTestTest { private TestMessageGetterFactory messageGetterFactory; private MessageGetter messageGetter; private HadoopFileSystemLocator hadoopFileSystemLocator; private WriteToAndDeleteFromUsersHomeFolderTest writeToAndDeleteFromUsersHomeFolderTest; private NamedCluster namedCluster; private String namedClusterName; private HadoopFileSystem hadoopFileSystem; private HadoopFileSystemPath hadoopFileSystemPath; private HadoopFileSystemPath qualifiedPath; @Before public void setup() throws ClusterInitializationException { messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( WriteToAndDeleteFromUsersHomeFolderTest.class ); hadoopFileSystemLocator = mock( HadoopFileSystemLocator.class ); namedCluster = mock( NamedCluster.class ); namedClusterName = "namedClusterName"; when( namedCluster.getName() ).thenReturn( namedClusterName ); hadoopFileSystem = mock( HadoopFileSystem.class ); when( hadoopFileSystemLocator.getHadoopFilesystem( namedCluster ) ).thenReturn( hadoopFileSystem ); hadoopFileSystemPath = mock( HadoopFileSystemPath.class ); when( hadoopFileSystem.getPath( WriteToAndDeleteFromUsersHomeFolderTest.PENTAHO_SHIM_TEST_FILE_TEST ) ) .thenReturn( hadoopFileSystemPath ); qualifiedPath = mock( HadoopFileSystemPath.class ); when( hadoopFileSystem.makeQualified( hadoopFileSystemPath ) ).thenReturn( qualifiedPath ); when( qualifiedPath.getName() ).thenReturn( WriteToAndDeleteFromUsersHomeFolderTest.PENTAHO_SHIM_TEST_FILE_TEST ); when( qualifiedPath.getPath() ).thenReturn( "" ); init(); } private void init() { writeToAndDeleteFromUsersHomeFolderTest = new WriteToAndDeleteFromUsersHomeFolderTest( messageGetterFactory, hadoopFileSystemLocator ); } @Test public void testNullFileSystem() { hadoopFileSystemLocator = mock( HadoopFileSystemLocator.class ); init(); RuntimeTestResultSummary runtimeTestResultSummary = writeToAndDeleteFromUsersHomeFolderTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_COULDNT_GET_FILE_SYSTEM_DESC ), messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_COULDNT_GET_FILE_SYSTEM_MESSAGE, namedClusterName ) ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testClusterInitializationException() throws ClusterInitializationException { hadoopFileSystemLocator = mock( HadoopFileSystemLocator.class ); when( hadoopFileSystemLocator.getHadoopFilesystem( namedCluster ) ) .thenThrow( new ClusterInitializationException( null ) ); init(); RuntimeTestResultSummary runtimeTestResultSummary = writeToAndDeleteFromUsersHomeFolderTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_INITIALIZING_CLUSTER_DESC ), messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_INITIALIZING_CLUSTER_MESSAGE, namedClusterName ), ClusterInitializationException.class ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testIOExceptionExists() throws ClusterInitializationException, IOException { when( hadoopFileSystem.exists( hadoopFileSystemPath ) ).thenThrow( new IOException() ); RuntimeTestResultSummary runtimeTestResultSummary = writeToAndDeleteFromUsersHomeFolderTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CHECKING_IF_FILE_EXISTS_DESC ), messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CHECKING_IF_FILE_EXISTS_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ), IOException.class ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testIOExceptionCreate() throws ClusterInitializationException, IOException { when( hadoopFileSystem.create( hadoopFileSystemPath ) ).thenThrow( new IOException() ); RuntimeTestResultSummary runtimeTestResultSummary = writeToAndDeleteFromUsersHomeFolderTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CREATING_FILE_DESC ), messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_CREATING_FILE_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ), IOException.class ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testIOExceptionWrite() throws ClusterInitializationException, IOException { OutputStream outputStream = mock( OutputStream.class ); when( hadoopFileSystem.create( hadoopFileSystemPath ) ).thenReturn( outputStream ); when( hadoopFileSystem.delete( hadoopFileSystemPath, false ) ).thenReturn( true ); doThrow( new IOException() ).when( outputStream ).write( isA( byte[].class ) ); RuntimeTestResultSummary runtimeTestResultSummary = writeToAndDeleteFromUsersHomeFolderTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_TO_FILE_DESC ), messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_WRITING_TO_FILE_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ), IOException.class ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testIOExceptionDelete() throws ClusterInitializationException, IOException { OutputStream outputStream = mock( OutputStream.class ); when( hadoopFileSystem.create( hadoopFileSystemPath ) ).thenReturn( outputStream ); when( hadoopFileSystem.delete( hadoopFileSystemPath, false ) ).thenThrow( new IOException() ); RuntimeTestResultSummary runtimeTestResultSummary = writeToAndDeleteFromUsersHomeFolderTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_DELETING_FILE_DESC ), messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_ERROR_DELETING_FILE_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ), IOException.class ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testPathExists() throws IOException { when( hadoopFileSystem.exists( hadoopFileSystemPath ) ).thenReturn( true ); RuntimeTestResultSummary runtimeTestResultSummary = writeToAndDeleteFromUsersHomeFolderTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_FILE_EXISTS_DESC ), messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_FILE_EXISTS_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ) ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testUnableToDelete() throws IOException { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); when( hadoopFileSystem.create( hadoopFileSystemPath ) ).thenReturn( byteArrayOutputStream ); when( hadoopFileSystem.delete( hadoopFileSystemPath, false ) ).thenReturn( false ); RuntimeTestResultSummary runtimeTestResultSummary = writeToAndDeleteFromUsersHomeFolderTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_UNABLE_TO_DELETE_DESC ), messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_UNABLE_TO_DELETE_MESSAGE, qualifiedPath.getName(), qualifiedPath.getPath() ) ); assertEquals( WriteToAndDeleteFromUsersHomeFolderTest.HELLO_CLUSTER, byteArrayOutputStream.toString( WriteToAndDeleteFromUsersHomeFolderTest.UTF8.name() ) ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testSuccess() throws IOException { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); when( hadoopFileSystem.create( hadoopFileSystemPath ) ).thenReturn( byteArrayOutputStream ); when( hadoopFileSystem.delete( hadoopFileSystemPath, false ) ).thenReturn( true ); RuntimeTestResultSummary runtimeTestResultSummary = writeToAndDeleteFromUsersHomeFolderTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_SUCCESS_DESC ), messageGetter.getMessage( WriteToAndDeleteFromUsersHomeFolderTest .WRITE_TO_AND_DELETE_FROM_USERS_HOME_FOLDER_TEST_SUCCESS_MESSAGE, qualifiedPath.toString() ) ); assertEquals( WriteToAndDeleteFromUsersHomeFolderTest.HELLO_CLUSTER, byteArrayOutputStream.toString( WriteToAndDeleteFromUsersHomeFolderTest.UTF8.name() ) ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } } ================================================ FILE: impl/clusterTests/src/test/java/org/pentaho/big/data/impl/cluster/tests/kafka/KafkaConnectTestTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.kafka; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.config.SaslConfigs; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.hadoop.shim.api.jaas.JaasConfigService; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import java.util.Collections; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith( MockitoJUnitRunner.class ) public class KafkaConnectTestTest { @Mock Consumer consumer; @Mock NamedClusterServiceLocator namedClusterServiceLocator; @Mock NamedCluster namedCluster; @Mock MessageGetter messageGetter; @Mock MessageGetterFactory messageGetterFactory; @Mock JaasConfigService jaasConfigService; @Before public void setUp() throws Exception { when( messageGetterFactory.create( KafkaConnectTest.PKG ) ).thenReturn( messageGetter ); when( messageGetterFactory.create( ClusterRuntimeTestEntry.class ) ).thenReturn( messageGetter ); } @Test public void testSuccess() throws Exception { when( consumer.listTopics() ).thenReturn( Collections.emptyMap() ); when( namedCluster.getKafkaBootstrapServers() ).thenReturn( "kafkaHost:9092" ); when( messageGetter.getMessage( anyString() ) ).thenReturn( "success message" ); KafkaConnectTest kafkaConnectTest = new KafkaConnectTest( messageGetterFactory, (map) -> consumer, namedClusterServiceLocator ); RuntimeTestResultSummary summary = kafkaConnectTest.runTest( namedCluster ); assertEquals( RuntimeTestEntrySeverity.INFO, summary.getOverallStatusEntry().getSeverity() ); assertEquals( "success message", summary.getOverallStatusEntry().getMessage() ); } @Test public void testSuccessKerberos() throws Exception { when( consumer.listTopics() ).thenReturn( Collections.emptyMap() ); when( namedCluster.getKafkaBootstrapServers() ).thenReturn( "kafkaHost:9092" ); when( messageGetter.getMessage( anyString() ) ).thenReturn( "success message" ); when( namedClusterServiceLocator.getService( namedCluster, JaasConfigService.class ) ) .thenReturn( jaasConfigService ); when( jaasConfigService.isKerberos() ).thenReturn( true ); when( jaasConfigService.getJaasConfig() ).thenReturn( "pretend-jaas-config" ); KafkaConnectTest kafkaConnectTest = new KafkaConnectTest( messageGetterFactory, this::assertConsumer, namedClusterServiceLocator ); RuntimeTestResultSummary summary = kafkaConnectTest.runTest( namedCluster ); assertEquals( RuntimeTestEntrySeverity.INFO, summary.getOverallStatusEntry().getSeverity() ); assertEquals( "success message", summary.getOverallStatusEntry().getMessage() ); } @Test public void testError() throws Exception { when( consumer.listTopics() ).thenThrow( new KafkaException( "oops" ) ); when( namedCluster.getKafkaBootstrapServers() ).thenReturn( "kafkaHost:9092" ); when( messageGetter.getMessage( anyString(), eq( "kafkaHost:9092" ) ) ).thenReturn( "error message" ); KafkaConnectTest kafkaConnectTest = new KafkaConnectTest( messageGetterFactory, (map) -> consumer, namedClusterServiceLocator ); RuntimeTestResultSummary summary = kafkaConnectTest.runTest( namedCluster ); assertEquals( RuntimeTestEntrySeverity.ERROR, summary.getOverallStatusEntry().getSeverity() ); assertEquals( "error message", summary.getOverallStatusEntry().getMessage() ); } @Test public void testSkip() throws Exception { when( namedCluster.getKafkaBootstrapServers() ).thenReturn( " " ); when( messageGetter.getMessage( anyString() ) ).thenReturn( "skipped message" ); KafkaConnectTest kafkaConnectTest = new KafkaConnectTest( messageGetterFactory, (map) -> consumer, namedClusterServiceLocator ); RuntimeTestResultSummary summary = kafkaConnectTest.runTest( namedCluster ); assertEquals( RuntimeTestEntrySeverity.SKIPPED, summary.getOverallStatusEntry().getSeverity() ); assertEquals( "skipped message", summary.getOverallStatusEntry().getMessage() ); verify( consumer, never() ).listTopics(); } private Consumer assertConsumer( Map actualMap ) { assertEquals( "pretend-jaas-config", actualMap.get( SaslConfigs.SASL_JAAS_CONFIG ) ); return consumer; } } ================================================ FILE: impl/clusterTests/src/test/java/org/pentaho/big/data/impl/cluster/tests/mr/PingJobTrackerTestTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.mr; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTest; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Created by bryan on 8/24/15. */ public class PingJobTrackerTestTest { private MessageGetterFactory messageGetterFactory; private ConnectivityTestFactory connectivityTestFactory; private PingJobTrackerTest pingJobTrackerTest; private NamedCluster namedCluster; private MessageGetter messageGetter; private String jobTrackerHost; private String jobTrackerPort; @Before public void setup() { messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( PingJobTrackerTest.class ); connectivityTestFactory = mock( ConnectivityTestFactory.class ); pingJobTrackerTest = new PingJobTrackerTest( messageGetterFactory, connectivityTestFactory ); jobTrackerHost = "jobTrackerHost"; jobTrackerPort = "829"; namedCluster = mock( NamedCluster.class ); when( namedCluster.getJobTrackerHost() ).thenReturn( jobTrackerHost ); when( namedCluster.getJobTrackerPort() ).thenReturn( jobTrackerPort ); } @Test public void testGetName() { assertEquals( messageGetter.getMessage( PingJobTrackerTest.PING_JOB_TRACKER_TEST_NAME ), pingJobTrackerTest.getName() ); } @Test public void testSuccess() { RuntimeTestResultEntry results = mock( RuntimeTestResultEntry.class ); String testDescription = "test-description"; when( results.getDescription() ).thenReturn( testDescription ); ConnectivityTest connectivityTest = mock( ConnectivityTest.class ); when( connectivityTestFactory.create( messageGetterFactory, jobTrackerHost, jobTrackerPort, true ) ) .thenReturn( connectivityTest ); when( connectivityTest.runTest() ).thenReturn( results ); RuntimeTestResultSummary runtimeTestResultSummary = pingJobTrackerTest.runTest( namedCluster ); assertEquals( testDescription, runtimeTestResultSummary.getOverallStatusEntry().getDescription() ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testIsMapR() { when( namedCluster.isMapr() ).thenReturn( true ); RuntimeTestResultSummary runtimeTestResultSummary = pingJobTrackerTest.runTest( namedCluster ); RuntimeTestResultEntry results = runtimeTestResultSummary.getOverallStatusEntry(); assertEquals( RuntimeTestEntrySeverity.INFO, results.getSeverity() ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } } ================================================ FILE: impl/clusterTests/src/test/java/org/pentaho/big/data/impl/cluster/tests/oozie/PingOozieHostTestTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.oozie; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTest; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import java.net.MalformedURLException; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.pentaho.runtime.test.RuntimeTestEntryUtil.verifyRuntimeTestResultEntry; /** * Created by bryan on 8/24/15. */ public class PingOozieHostTestTest { private MessageGetterFactory messageGetterFactory; private ConnectivityTestFactory connectivityTestFactory; private PingOozieHostTest pingOozieHostTest; private NamedCluster namedCluster; private MessageGetter messageGetter; private String oozieUrl; private String oozieHost; private String ooziePort; @Before public void setup() { messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( PingOozieHostTest.class ); connectivityTestFactory = mock( ConnectivityTestFactory.class ); pingOozieHostTest = new PingOozieHostTest( messageGetterFactory, connectivityTestFactory ); oozieHost = "oozieHost"; ooziePort = "8080"; oozieUrl = "http://" + oozieHost + ":" + ooziePort + "/oozie"; namedCluster = mock( NamedCluster.class ); when( namedCluster.getOozieUrl() ).thenReturn( oozieUrl ); } @Test public void testGetName() { assertEquals( messageGetter.getMessage( PingOozieHostTest.PING_OOZIE_HOST_TEST_NAME ), pingOozieHostTest.getName() ); } @Test public void testMalformedURLException() { oozieUrl = "one-malformed-url"; namedCluster = mock( NamedCluster.class ); when( namedCluster.getOozieUrl() ).thenReturn( oozieUrl ); RuntimeTestResultSummary runtimeTestResultSummary = pingOozieHostTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( PingOozieHostTest.PING_OOZIE_HOST_TEST_MALFORMED_URL_DESC ), messageGetter.getMessage( PingOozieHostTest.PING_OOZIE_HOST_TEST_MALFORMED_URL_MESSAGE, oozieUrl ), MalformedURLException.class ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testSuccess() { RuntimeTestResultEntry results = mock( RuntimeTestResultEntry.class ); String testDescription = "test-description"; when( results.getDescription() ).thenReturn( testDescription ); ConnectivityTest connectivityTest = mock( ConnectivityTest.class ); when( connectivityTestFactory.create( messageGetterFactory, oozieHost, ooziePort, false ) ) .thenReturn( connectivityTest ); when( connectivityTest.runTest() ).thenReturn( results ); RuntimeTestResultSummary runtimeTestResultSummary = pingOozieHostTest.runTest( namedCluster ); assertEquals( testDescription, runtimeTestResultSummary.getOverallStatusEntry().getDescription() ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } } ================================================ FILE: impl/clusterTests/src/test/java/org/pentaho/big/data/impl/cluster/tests/zookeeper/PingZookeeperEnsembleTestTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.cluster.tests.zookeeper; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.network.ConnectivityTest; import org.pentaho.runtime.test.network.ConnectivityTestFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import java.util.List; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.pentaho.runtime.test.RuntimeTestEntryUtil.verifyRuntimeTestResultEntry; /** * Created by bryan on 8/24/15. */ public class PingZookeeperEnsembleTestTest { private MessageGetterFactory messageGetterFactory; private ConnectivityTestFactory connectivityTestFactory; private PingZookeeperEnsembleTest pingZookeeperEnsembleTest; private NamedCluster namedCluster; private String zookeeperHosts; private String zookeeperPort; private MessageGetter messageGetter; private String host1; private String host2; @Before public void setup() { messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( PingZookeeperEnsembleTest.class ); connectivityTestFactory = mock( ConnectivityTestFactory.class ); pingZookeeperEnsembleTest = new PingZookeeperEnsembleTest( messageGetterFactory, connectivityTestFactory ); host1 = "host1"; host2 = "host2"; zookeeperHosts = host1 + "," + host2; zookeeperPort = "2181"; namedCluster = mock( NamedCluster.class ); when( namedCluster.getZooKeeperHost() ).thenReturn( zookeeperHosts ); when( namedCluster.getZooKeeperPort() ).thenReturn( zookeeperPort ); } @Test public void testBlankHost() { namedCluster = mock( NamedCluster.class ); RuntimeTestResultSummary runtimeTestResultSummary = pingZookeeperEnsembleTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( PingZookeeperEnsembleTest.PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_HOST_DESC ), messageGetter.getMessage( PingZookeeperEnsembleTest.PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_HOST_MESSAGE ) ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testBlankPort() { namedCluster = mock( NamedCluster.class ); when( namedCluster.getZooKeeperHost() ).thenReturn( zookeeperHosts ); RuntimeTestResultSummary runtimeTestResultSummary = pingZookeeperEnsembleTest.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( PingZookeeperEnsembleTest.PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_PORT_DESC ), messageGetter.getMessage( PingZookeeperEnsembleTest.PING_ZOOKEEPER_ENSEMBLE_TEST_BLANK_PORT_MESSAGE ) ); } @Test public void testNoFailures() { ConnectivityTest connectivityTest = mock( ConnectivityTest.class ); when( connectivityTestFactory .create( messageGetterFactory, host1, zookeeperPort, false, RuntimeTestEntrySeverity.WARNING ) ).thenReturn( connectivityTest ); when( connectivityTestFactory .create( messageGetterFactory, host2, zookeeperPort, false, RuntimeTestEntrySeverity.WARNING ) ).thenReturn( connectivityTest ); RuntimeTestResultEntry clusterTestResultEntry = mock( RuntimeTestResultEntry.class ); when( clusterTestResultEntry.getSeverity() ).thenReturn( RuntimeTestEntrySeverity.INFO ); when( connectivityTest.runTest() ).thenReturn( clusterTestResultEntry ); String testDescription = "test-description"; when( clusterTestResultEntry.getDescription() ).thenReturn( testDescription ); RuntimeTestResultSummary runtimeTestResultSummary = pingZookeeperEnsembleTest.runTest( namedCluster ); List clusterTestResultEntries = runtimeTestResultSummary .getRuntimeTestResultEntries(); assertEquals( 2, clusterTestResultEntries.size() ); assertEquals( testDescription, clusterTestResultEntries.get( 0 ).getDescription() ); assertEquals( testDescription, clusterTestResultEntries.get( 1 ).getDescription() ); } @Test public void testOneFailure() { ConnectivityTest connectivityTest = mock( ConnectivityTest.class ); ConnectivityTest connectivityTest2 = mock( ConnectivityTest.class ); when( connectivityTestFactory .create( messageGetterFactory, host1, zookeeperPort, false, RuntimeTestEntrySeverity.WARNING ) ).thenReturn( connectivityTest ); when( connectivityTestFactory .create( messageGetterFactory, host2, zookeeperPort, false, RuntimeTestEntrySeverity.WARNING ) ).thenReturn( connectivityTest2 ); RuntimeTestResultEntry clusterTestResultEntry = mock( RuntimeTestResultEntry.class ); when( clusterTestResultEntry.getSeverity() ).thenReturn( RuntimeTestEntrySeverity.INFO ); when( connectivityTest.runTest() ).thenReturn( clusterTestResultEntry ); RuntimeTestResultEntry clusterTestResultEntry2 = mock( RuntimeTestResultEntry.class ); when( clusterTestResultEntry2.getSeverity() ).thenReturn( RuntimeTestEntrySeverity.WARNING ); when( connectivityTest2.runTest() ).thenReturn( clusterTestResultEntry2 ); String testDescription = "test-description"; when( clusterTestResultEntry.getDescription() ).thenReturn( testDescription ); String testDescription2 = "test-description2"; when( clusterTestResultEntry2.getDescription() ).thenReturn( testDescription2 ); RuntimeTestResultSummary runtimeTestResultSummary = pingZookeeperEnsembleTest.runTest( namedCluster ); List clusterTestResultEntries = runtimeTestResultSummary .getRuntimeTestResultEntries(); assertEquals( 2, clusterTestResultEntries.size() ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( PingZookeeperEnsembleTest.PING_ZOOKEEPER_ENSEMBLE_TEST_SOME_NODES_FAILED_DESC ), messageGetter.getMessage( PingZookeeperEnsembleTest.PING_ZOOKEEPER_ENSEMBLE_TEST_SOME_NODES_FAILED_MESSAGE, host2 ) ); assertEquals( testDescription, clusterTestResultEntries.get( 0 ).getDescription() ); assertEquals( testDescription2, clusterTestResultEntries.get( 1 ).getDescription() ); } @Test public void testAllFailures() { ConnectivityTest connectivityTest = mock( ConnectivityTest.class ); ConnectivityTest connectivityTest2 = mock( ConnectivityTest.class ); when( connectivityTestFactory .create( messageGetterFactory, host1, zookeeperPort, false, RuntimeTestEntrySeverity.WARNING ) ).thenReturn( connectivityTest ); when( connectivityTestFactory .create( messageGetterFactory, host2, zookeeperPort, false, RuntimeTestEntrySeverity.WARNING ) ).thenReturn( connectivityTest2 ); RuntimeTestResultEntry clusterTestResultEntry = mock( RuntimeTestResultEntry.class ); when( clusterTestResultEntry.getSeverity() ).thenReturn( RuntimeTestEntrySeverity.WARNING ); when( connectivityTest.runTest() ).thenReturn( clusterTestResultEntry ); RuntimeTestResultEntry clusterTestResultEntry2 = mock( RuntimeTestResultEntry.class ); when( clusterTestResultEntry2.getSeverity() ).thenReturn( RuntimeTestEntrySeverity.WARNING ); when( connectivityTest2.runTest() ).thenReturn( clusterTestResultEntry2 ); String testDescription = "test-description"; when( clusterTestResultEntry.getDescription() ).thenReturn( testDescription ); String testDescription2 = "test-description2"; when( clusterTestResultEntry2.getDescription() ).thenReturn( testDescription2 ); RuntimeTestResultSummary runtimeTestResultSummary = pingZookeeperEnsembleTest.runTest( namedCluster ); List clusterTestResultEntries = runtimeTestResultSummary .getRuntimeTestResultEntries(); assertEquals( 2, clusterTestResultEntries.size() ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.FATAL, messageGetter.getMessage( PingZookeeperEnsembleTest.PING_ZOOKEEPER_ENSEMBLE_TEST_NO_NODES_SUCCEEDED_DESC ), messageGetter.getMessage( PingZookeeperEnsembleTest.PING_ZOOKEEPER_ENSEMBLE_TEST_NO_NODES_SUCCEEDED_MESSAGE, host1 + ", " + host2 ) ); assertEquals( testDescription, clusterTestResultEntries.get( 0 ).getDescription() ); assertEquals( testDescription2, clusterTestResultEntries.get( 1 ).getDescription() ); } } ================================================ FILE: impl/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-parent 11.1.0.0-SNAPSHOT pentaho-big-data-impl 11.1.0.0-SNAPSHOT pom cluster clusterTests shim vfs-hdfs ================================================ FILE: impl/shim/jaas/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-impl-shim 11.1.0.0-SNAPSHOT pentaho-big-data-impl-shim-jaas 11.1.0.0-SNAPSHOT jar Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com site org.pentaho shim-api ${pentaho-hadoop-shims.version} pentaho pentaho-big-data-legacy ${project.version} org.slf4j slf4j-api junit junit ${dependency.junit.revision} test org.mockito mockito-all ${dependency.mockito.revision} test ================================================ FILE: impl/shim/jaas/src/main/java/org/pentaho/big/data/impl/shim/jaas/JaasConfigServiceFactory.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.shim.jaas; import org.pentaho.hadoop.shim.api.jaas.JaasConfigService; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceFactory; import java.util.Properties; public class JaasConfigServiceFactory implements NamedClusterServiceFactory { public JaasConfigServiceFactory( @SuppressWarnings( "unused" ) boolean isActiveConfiguration, Object hadoopConfiguration ) { } @Override public Class getServiceClass() { return JaasConfigService.class; } @Override public boolean canHandle( NamedCluster namedCluster ) { return true; } @Override public JaasConfigService create( NamedCluster namedCluster ) { return new JaasConfigServiceImpl( new Properties() ); } } ================================================ FILE: impl/shim/jaas/src/main/java/org/pentaho/big/data/impl/shim/jaas/JaasConfigServiceImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.shim.jaas; import org.pentaho.hadoop.shim.api.jaas.JaasConfigService; import java.util.Properties; public class JaasConfigServiceImpl implements JaasConfigService { public static final String KERBEROS_PRINCIPAL = "pentaho.authentication.default.kerberos.principal"; public static final String KERBEROS_KEYTAB = "pentaho.authentication.default.kerberos.keytabLocation"; private Properties configProperties; public JaasConfigServiceImpl( Properties configProperties ) { this.configProperties = configProperties; } @Override public String getJaasConfig() { return "com.sun.security.auth.module.Krb5LoginModule required\n" + "useKeyTab=true\n" + "serviceName=kafka\n" + "keyTab=\"" + configProperties.getProperty( KERBEROS_KEYTAB ) + "\"\n" + "principal=\"" + configProperties.getProperty( KERBEROS_PRINCIPAL ) + "\";"; } @Override public boolean isKerberos() { Object principal = configProperties.get( KERBEROS_PRINCIPAL ); Object keytab = configProperties.get( KERBEROS_KEYTAB ); return principal != null && keytab != null && !"".equals( principal ) && !"".equals( keytab ); } } ================================================ FILE: impl/shim/jaas/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: impl/shim/jaas/src/main/resources/org/pentaho/big/data/impl/shim/jaas/messages.properties ================================================ jaas.config.service.load.error=Unable to register JaasConfigService for ? shim ================================================ FILE: impl/shim/jaas/src/test/java/org/pentaho/big/data/impl/shim/jaas/JaasConfigServiceFactoryTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.shim.jaas; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import java.util.Properties; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; public class JaasConfigServiceFactoryTest { @Test public void testCreatesAJaasConfig() { NamedCluster namedCluster = mock( NamedCluster.class ); JaasConfigServiceFactory factory = new JaasConfigServiceFactory( true, null ); Properties configProperties = new Properties(); configProperties.setProperty( JaasConfigServiceImpl.KERBEROS_PRINCIPAL, "three@domain.com" ); configProperties.setProperty( JaasConfigServiceImpl.KERBEROS_KEYTAB, "/user/two/file.keytab" ); assertTrue( factory.canHandle( namedCluster ) ); assertEquals( "com.sun.security.auth.module.Krb5LoginModule required\n" + "useKeyTab=true\n" + "serviceName=kafka\n" + "keyTab=\"/user/two/file.keytab\"\n" + "principal=\"three@domain.com\";", factory.create( namedCluster ).getJaasConfig() ); } } ================================================ FILE: impl/shim/jaas/src/test/java/org/pentaho/big/data/impl/shim/jaas/JaasConfigServiceImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.shim.jaas; import org.junit.Test; import java.util.Properties; import static org.junit.Assert.*; public class JaasConfigServiceImplTest { @Test public void testEmptyPrincipalIsNotKerberos() throws Exception { Properties configProperties = new Properties(); configProperties.setProperty( JaasConfigServiceImpl.KERBEROS_KEYTAB, "/user/path/file.keytab" ); JaasConfigServiceImpl service = new JaasConfigServiceImpl( configProperties ); assertFalse( service.isKerberos() ); } @Test public void testEmptyKeytabIsNotKerberos() throws Exception { Properties configProperties = new Properties(); configProperties.setProperty( JaasConfigServiceImpl.KERBEROS_PRINCIPAL, "me@host.com" ); JaasConfigServiceImpl service = new JaasConfigServiceImpl( configProperties ); assertFalse( service.isKerberos() ); } @Test public void testJaasWithKerberosKeytab() throws Exception { Properties configProperties = new Properties(); configProperties.setProperty( JaasConfigServiceImpl.KERBEROS_PRINCIPAL, "user@domain.com" ); configProperties.setProperty( JaasConfigServiceImpl.KERBEROS_KEYTAB, "/user/path/file.keytab" ); JaasConfigServiceImpl service = new JaasConfigServiceImpl( configProperties ); assertTrue( service.isKerberos() ); assertEquals( "com.sun.security.auth.module.Krb5LoginModule required\n" + "useKeyTab=true\n" + "serviceName=kafka\n" + "keyTab=\"/user/path/file.keytab\"\n" + "principal=\"user@domain.com\";", service.getJaasConfig() ); } } ================================================ FILE: impl/shim/pig/pdi-testName ================================================ ================================================ FILE: impl/shim/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-impl 11.1.0.0-SNAPSHOT pentaho-big-data-impl-shim 11.1.0.0-SNAPSHOT pom shimTests ================================================ FILE: impl/shim/shimTests/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-impl-shim 11.1.0.0-SNAPSHOT pentaho-big-data-impl-shim-shimTests 11.1.0.0-SNAPSHOT jar Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com site pentaho pentaho-big-data-impl-clusterTests ${project.version} pentaho pentaho-big-data-legacy ${project.version} org.apache.hadoop hadoop-core 0.20.2 provided org.apache.aries.blueprint org.apache.aries.blueprint.core provided org.osgi osgi.core org.slf4j slf4j-api provided junit junit ${dependency.junit.revision} test org.mockito mockito-all ${dependency.mockito.revision} test pentaho pentaho-big-data-api-runtimeTest ${project.version} tests test org.pentaho shim-api ${pentaho-hadoop-shims.version} ================================================ FILE: impl/shim/shimTests/src/main/java/org/pentaho/big/data/impl/shim/tests/TestShimConfig.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.shim.tests; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.di.core.Const; import org.pentaho.di.core.variables.Variables; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.BaseRuntimeTest; import java.util.Arrays; import java.util.HashSet; /** * Created by mburgess on 9/1/15. */ public class TestShimConfig extends BaseRuntimeTest { public static final String HADOOP_CONFIGURATION_TEST_SHIM_CONFIG = "hadoopConfigurationTestShimXConfig"; public static final String TEST_SHIM_CONFIG_NAME = "TestShimConfig.Name"; public static final String TEST_SHIM_CONFIG_FS_MATCH_DESC = "TestShimConfig.FileSystemMatch.Desc"; public static final String TEST_SHIM_CONFIG_FS_MATCH_MESSAGE = "TestShimConfig.FileSystemMatch.Message"; public static final String TEST_SHIM_CONFIG_FS_NOMATCH_DESC = "TestShimConfig.FileSystemNoMatch.Desc"; public static final String TEST_SHIM_CONFIG_FS_NOMATCH_MESSAGE = "TestShimConfig.FileSystemNoMatch.Message"; private static final Class PKG = TestShimConfig.class; private final MessageGetterFactory messageGetterFactory; private final MessageGetter messageGetter; private HadoopFileSystemLocator hadoopFileSystemLocator; public TestShimConfig( HadoopFileSystemLocator hadoopFileSystemLocator, MessageGetterFactory messageGetterFactory ) { super( NamedCluster.class, TestShimLoad.HADOOP_CONFIGURATION_MODULE, HADOOP_CONFIGURATION_TEST_SHIM_CONFIG, messageGetterFactory.create( PKG ).getMessage( TEST_SHIM_CONFIG_NAME ), true, new HashSet<>( Arrays.asList( TestShimLoad.HADOOP_CONFIGURATION_TEST_SHIM_LOAD ) ) ); this.messageGetterFactory = messageGetterFactory; messageGetter = messageGetterFactory.create( PKG ); this.hadoopFileSystemLocator = hadoopFileSystemLocator; } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { try { // Get the active shim NamedCluster namedCluster = (NamedCluster) objectUnderTest; HadoopFileSystem hadoopFileSystem = hadoopFileSystemLocator.getHadoopFilesystem( namedCluster ); String defaultFS = hadoopFileSystem.getFsDefaultName(); // Get the named cluster // The connection information might be parameterized. Since we aren't tied to a transformation or job, in order to // use a parameter, the value would have to be set as a system property or in kettle.properties, etc. // Here we try to resolve the parameters if we can: Variables variables = new Variables(); variables.initializeVariablesFrom( null ); // Build up a "defaultFS" property to check against the config StringBuilder ncFS = new StringBuilder( namedCluster.getStorageScheme() + "://" ); ncFS.append( variables.environmentSubstitute( namedCluster.getHdfsHost() ) ); String port = variables.environmentSubstitute( namedCluster.getHdfsPort() ); if ( !Const.isEmpty( port ) ) { ncFS.append( ":" ); ncFS.append( port ); } if ( !ncFS.toString().equalsIgnoreCase( defaultFS ) ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.WARNING, messageGetter.getMessage( TEST_SHIM_CONFIG_FS_NOMATCH_DESC ), messageGetter.getMessage( TEST_SHIM_CONFIG_FS_NOMATCH_MESSAGE, ncFS.toString() ), ClusterRuntimeTestEntry.DocAnchor.SHIM_LOAD ) ); } return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( TEST_SHIM_CONFIG_FS_MATCH_DESC ), messageGetter.getMessage( TEST_SHIM_CONFIG_FS_MATCH_MESSAGE ), ClusterRuntimeTestEntry.DocAnchor.SHIM_LOAD ) ); } catch ( Exception e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.ERROR, messageGetter.getMessage( TestShimLoad.TEST_SHIM_LOAD_NO_SHIM_SPECIFIED_DESC ), e.getMessage(), e, ClusterRuntimeTestEntry.DocAnchor.SHIM_LOAD ) ); } } } ================================================ FILE: impl/shim/shimTests/src/main/java/org/pentaho/big/data/impl/shim/tests/TestShimLoad.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.shim.tests; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.impl.cluster.tests.ClusterRuntimeTestEntry; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import org.pentaho.runtime.test.result.org.pentaho.runtime.test.result.impl.RuntimeTestResultSummaryImpl; import org.pentaho.runtime.test.test.impl.BaseRuntimeTest; import java.util.HashSet; /** * Created by bryan on 8/14/15. */ public class TestShimLoad extends BaseRuntimeTest { public static final String HADOOP_CONFIGURATION_TEST_SHIM_LOAD = "hadoopConfigurationTestShimLoad"; public static final String TEST_SHIM_LOAD_NAME = "TestShimLoad.Name"; public static final String TEST_SHIM_LOAD_SHIM_LOADED_DESC = "TestShimLoad.ShimLoaded.Desc"; public static final String TEST_SHIM_LOAD_SHIM_LOADED_MESSAGE = "TestShimLoad.ShimLoaded.Message"; public static final String TEST_SHIM_LOAD_NO_SHIM_SPECIFIED_DESC = "TestShimLoad.NoShimSpecified.Desc"; public static final String TEST_SHIM_LOAD_UNABLE_TO_LOAD_SHIM_DESC = "TestShimLoad.UnableToLoadShim.Desc"; public static final String HADOOP_CONFIGURATION_MODULE = "Hadoop Configuration"; private static final Class PKG = TestShimLoad.class; private final MessageGetterFactory messageGetterFactory; private final MessageGetter messageGetter; public TestShimLoad( MessageGetterFactory messageGetterFactory ) { super( NamedCluster.class, HADOOP_CONFIGURATION_MODULE, HADOOP_CONFIGURATION_TEST_SHIM_LOAD, messageGetterFactory.create( PKG ).getMessage( TEST_SHIM_LOAD_NAME ), true, new HashSet() ); this.messageGetterFactory = messageGetterFactory; messageGetter = messageGetterFactory.create( PKG ); } @Override public RuntimeTestResultSummary runTest( Object objectUnderTest ) { try { NamedCluster namedCluster = (NamedCluster) objectUnderTest; String shimIdentifier = namedCluster.getShimIdentifier(); return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( TEST_SHIM_LOAD_SHIM_LOADED_DESC, shimIdentifier ), messageGetter.getMessage( TEST_SHIM_LOAD_SHIM_LOADED_MESSAGE, shimIdentifier ), ClusterRuntimeTestEntry.DocAnchor.SHIM_LOAD ) ); } catch ( Exception e ) { return new RuntimeTestResultSummaryImpl( new ClusterRuntimeTestEntry( messageGetterFactory, RuntimeTestEntrySeverity.ERROR, messageGetter.getMessage( TEST_SHIM_LOAD_NO_SHIM_SPECIFIED_DESC ), e.getMessage(), e, ClusterRuntimeTestEntry.DocAnchor.SHIM_LOAD ) ); } } } ================================================ FILE: impl/shim/shimTests/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: impl/shim/shimTests/src/main/resources/org/pentaho/big/data/impl/shim/tests/messages/messages_en_US.properties ================================================ TestShimLoad.Name=Active Shim Load TestShimLoad.ShimLoaded.Desc=Successfully loaded the {0} shim. TestShimLoad.ShimLoaded.Message=Successfully loaded the {0} shim. TestShimLoad.NoShimSpecified.Desc=The Active Shim has not been set. TestShimLoad.UnableToLoadShim.Desc=Unable to load the {0} Shim. TestShimConfig.Name=Shim Configuration Verification TestShimConfig.FileSystemMatch.Desc=The Hadoop File System URL matches the Active shim. TestShimConfig.FileSystemMatch.Message=The Hadoop File System URL matches the URL in the shim configuration file. TestShimConfig.FileSystemNoMatch.Desc=The Hadoop File System URL does not match the URL in the shim's core-site.xml. TestShimConfig.FileSystemNoMatch.Message=The Hadoop File System URL {0} does not match the defaultFS Hadoop config property in the shim's core-site.xml. Be sure to get the site configuration files from the Hadoop cluster. ================================================ FILE: impl/shim/shimTests/src/test/java/org/pentaho/big/data/impl/shim/tests/TestShimLoadTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.shim.tests; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.ConfigurationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.runtime.test.TestMessageGetterFactory; import org.pentaho.runtime.test.i18n.MessageGetter; import org.pentaho.runtime.test.i18n.MessageGetterFactory; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResultSummary; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.pentaho.runtime.test.RuntimeTestEntryUtil.verifyRuntimeTestResultEntry; /** * Created by bryan on 8/24/15. */ public class TestShimLoadTest { private MessageGetterFactory messageGetterFactory; private MessageGetter messageGetter; private TestShimLoad testShimLoad; private NamedCluster namedCluster; @Before public void setup() { messageGetterFactory = new TestMessageGetterFactory(); messageGetter = messageGetterFactory.create( TestShimLoad.class ); testShimLoad = new TestShimLoad( messageGetterFactory ); namedCluster = mock( NamedCluster.class ); } @Test public void testGetName() { assertEquals( messageGetter.getMessage( TestShimLoad.TEST_SHIM_LOAD_NAME ), testShimLoad.getName() ); } @Test public void testConfigurationException() throws ConfigurationException { String testMessage = "testMessage"; when( namedCluster.getShimIdentifier() ).thenThrow( new RuntimeException( testMessage ) ); RuntimeTestResultSummary runtimeTestResultSummary = testShimLoad.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.ERROR, messageGetter.getMessage( TestShimLoad.TEST_SHIM_LOAD_NO_SHIM_SPECIFIED_DESC ), testMessage, RuntimeException.class ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } @Test public void testSuccess() throws ConfigurationException { String testShim = "testShim"; when( namedCluster.getShimIdentifier() ).thenReturn( testShim ); RuntimeTestResultSummary runtimeTestResultSummary = testShimLoad.runTest( namedCluster ); verifyRuntimeTestResultEntry( runtimeTestResultSummary.getOverallStatusEntry(), RuntimeTestEntrySeverity.INFO, messageGetter.getMessage( TestShimLoad.TEST_SHIM_LOAD_SHIM_LOADED_DESC, testShim ), messageGetter.getMessage( TestShimLoad.TEST_SHIM_LOAD_SHIM_LOADED_MESSAGE, testShim ) ); assertEquals( 0, runtimeTestResultSummary.getRuntimeTestResultEntries().size() ); } } ================================================ FILE: impl/vfs-hdfs/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-impl 11.1.0.0-SNAPSHOT pentaho-big-data-impl-vfs-hdfs-core 11.1.0.0-SNAPSHOT jar Pentaho Community Edition Project: ${project.artifactId} site 5.14.2 11.1.0.0-SNAPSHOT pentaho-kettle kettle-core ${pdi.version} provided org.pentaho.di.plugins pentaho-metastore-locator-api ${pdi.version} provided org.pentaho shim-api ${pentaho-hadoop-shims.version} pentaho metastore ${metastore.version} pentaho pentaho-big-data-impl-cluster ${project.version} org.apache.commons commons-vfs2 org.apache.commons commons-lang3 ${commons-lang3.version} junit junit ${dependency.junit.revision} org.mockito mockito-core ${mockito.version} test org.apache.logging.log4j log4j-1.2-api ${log4j.version} ================================================ FILE: impl/vfs-hdfs/src/main/java/org/pentaho/big/data/impl/vfs/hdfs/AzureHdInsightsFileNameParser.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.provider.URLFileNameParser; @SuppressWarnings( "deprecation" ) public class AzureHdInsightsFileNameParser extends URLFileNameParser { public static final String EMPTY_HOSTNAME = ""; private static final AzureHdInsightsFileNameParser INSTANCE = new AzureHdInsightsFileNameParser(); private AzureHdInsightsFileNameParser() { super( -1 ); } public static AzureHdInsightsFileNameParser getInstance() { return INSTANCE; } /** * Extracts the hostname from a URI. * * @param name string buffer with the "scheme://[userinfo@]" part has been removed already. Will be modified. * @return the host name or null. */ @Override protected String extractHostName( StringBuilder name ) { final String hostname = super.extractHostName( name ); // Trick the URLFileNameParser into thinking we have a hostname so we don't have to refactor it. return hostname == null ? EMPTY_HOSTNAME : hostname; } } ================================================ FILE: impl/vfs-hdfs/src/main/java/org/pentaho/big/data/impl/vfs/hdfs/HDFSFileNameParser.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.provider.URLFileName; import org.apache.commons.vfs2.provider.URLFileNameParser; import org.apache.commons.vfs2.provider.UriParser; import org.apache.commons.vfs2.provider.VfsComponentContext; public class HDFSFileNameParser extends URLFileNameParser { private static final HDFSFileNameParser INSTANCE = new HDFSFileNameParser(); private HDFSFileNameParser() { super( -1 ); } public static HDFSFileNameParser getInstance() { return INSTANCE; } @Override public FileName parseUri( VfsComponentContext context, FileName base, String filename ) throws FileSystemException { URLFileName fileNameURLFileName = (URLFileName) super.parseUri( context, base, filename ); return new URLFileName( fileNameURLFileName.getScheme(), getHostNameCaseSensitive( filename ), fileNameURLFileName.getPort(), fileNameURLFileName.getDefaultPort(), fileNameURLFileName.getUserName(), fileNameURLFileName.getPassword(), fileNameURLFileName.getPath(), fileNameURLFileName.getType(), fileNameURLFileName.getQueryString() ); } /** * PDI-15565 *

* the same logic as for extracting in org.apache.commons.vfs2.provider.HostFileNameParser.extractToPath * * @param fileUri file uri for hdfs file * @return case sensitive host name * @throws FileSystemException when format of url is not correct */ private String getHostNameCaseSensitive( String fileUri ) throws FileSystemException { StringBuilder fullNameBuilder = new StringBuilder(); UriParser.extractScheme( fileUri, fullNameBuilder ); if ( fullNameBuilder.length() < 2 || fullNameBuilder.charAt( 0 ) != '/' || fullNameBuilder.charAt( 1 ) != '/' ) { throw new FileSystemException( "vfs.provider/missing-double-slashes.error", fileUri ); } fullNameBuilder.delete( 0, 2 ); extractPort( fullNameBuilder, fileUri ); extractUserInfo( fullNameBuilder ); return extractHostName( fullNameBuilder ); } } ================================================ FILE: impl/vfs-hdfs/src/main/java/org/pentaho/big/data/impl/vfs/hdfs/HDFSFileObject.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileType; import org.apache.commons.vfs2.provider.AbstractFileName; import org.apache.commons.vfs2.provider.AbstractFileObject; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileStatus; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import java.io.InputStream; import java.io.OutputStream; public class HDFSFileObject extends AbstractFileObject { private HadoopFileSystem hdfs; public HDFSFileObject( final AbstractFileName name, final HDFSFileSystem fileSystem ) throws FileSystemException { super( name, fileSystem ); hdfs = fileSystem.getHDFSFileSystem(); } @Override protected long doGetContentSize() throws Exception { return hdfs.getFileStatus( hdfs.getPath( getName().getPath() ) ).getLen(); } @Override protected OutputStream doGetOutputStream( boolean append ) throws Exception { OutputStream out; if ( append ) { out = hdfs.append( hdfs.getPath( getName().getPath() ) ); } else { out = hdfs.create( hdfs.getPath( getName().getPath() ) ); } return out; } @Override protected InputStream doGetInputStream() throws Exception { return hdfs.open( hdfs.getPath( getName().getPath() ) ); } @Override protected InputStream doGetInputStream( final int bufferSize ) throws Exception { return this.doGetInputStream(); } @Override protected FileType doGetType() throws Exception { HadoopFileStatus status = null; if ( null == hdfs ) { throw new IllegalStateException( "No HDFS file system present" ); } try { status = hdfs.getFileStatus( hdfs.getPath( getName().getPath() ) ); } catch ( Exception ex ) { // Ignore } if ( status == null ) { return FileType.IMAGINARY; } else if ( status.isDir() ) { return FileType.FOLDER; } else { return FileType.FILE; } } @Override public void doCreateFolder() throws Exception { hdfs.mkdirs( hdfs.getPath( getName().getPath() ) ); } @Override public void doDelete() throws Exception { hdfs.delete( hdfs.getPath( getName().getPath() ), true ); } @Override protected void doRename( FileObject newfile ) throws Exception { hdfs.rename( hdfs.getPath( getName().getPath() ), hdfs.getPath( newfile.getName().getPath() ) ); } @Override protected long doGetLastModifiedTime() throws Exception { return hdfs.getFileStatus( hdfs.getPath( getName().getPath() ) ).getModificationTime(); } @Override protected boolean doSetLastModifiedTime( long modtime ) throws Exception { hdfs.setTimes( hdfs.getPath( getName().getPath() ), modtime, System.currentTimeMillis() ); return true; } @Override protected String[] doListChildren() throws Exception { HadoopFileStatus[] statusList = hdfs.listStatus( hdfs.getPath( getName().getPath() ) ); String[] children = new String[ statusList.length ]; for ( int i = 0; i < statusList.length; i++ ) { children[ i ] = statusList[ i ].getPath().getName(); } return children; } } ================================================ FILE: impl/vfs-hdfs/src/main/java/org/pentaho/big/data/impl/vfs/hdfs/HDFSFileProvider.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.Capability; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileSystem; import org.apache.commons.vfs2.FileSystemConfigBuilder; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemOptions; import org.apache.commons.vfs2.UserAuthenticationData; import org.apache.commons.vfs2.impl.DefaultFileSystemManager; import org.apache.commons.vfs2.provider.AbstractOriginatingFileProvider; import org.apache.commons.vfs2.provider.FileNameParser; import org.apache.commons.vfs2.provider.GenericFileName; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.pentaho.big.data.impl.vfs.hdfs.nc.NamedClusterConfigBuilder; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import java.net.URI; import java.util.Arrays; import java.util.Collection; import java.util.Collections; public class HDFSFileProvider extends AbstractOriginatingFileProvider { protected static Logger logger = LogManager.getLogger( HDFSFileProvider.class ); private MetastoreLocator metaStoreService; /** * The scheme this provider was designed to support */ public static final String SCHEME = "hdfs"; public static final String MAPRFS = "maprfs"; /** * User Information. */ public static final String ATTR_USER_INFO = "UI"; /** * Authentication types. */ public static final UserAuthenticationData.Type[] AUTHENTICATOR_TYPES = new UserAuthenticationData.Type[] { UserAuthenticationData.USERNAME, UserAuthenticationData.PASSWORD }; /** * The provider's capabilities. */ public static final Collection capabilities = Collections.unmodifiableCollection( Arrays.asList( new Capability[] { Capability.CREATE, Capability.DELETE, Capability.RENAME, Capability.GET_TYPE, Capability.LIST_CHILDREN, Capability.READ_CONTENT, Capability.URI, Capability.WRITE_CONTENT, Capability.GET_LAST_MODIFIED, Capability.SET_LAST_MODIFIED_FILE, Capability.RANDOM_ACCESS_READ } ) ); protected final HadoopFileSystemLocator hadoopFileSystemLocator; protected final NamedClusterService namedClusterService; @Deprecated public HDFSFileProvider( HadoopFileSystemLocator hadoopFileSystemLocator, NamedClusterService namedClusterService, MetastoreLocator metaStore ) throws FileSystemException { this( hadoopFileSystemLocator, namedClusterService, (DefaultFileSystemManager) KettleVFS.getInstance().getFileSystemManager(), metaStore ); } @Deprecated public HDFSFileProvider( HadoopFileSystemLocator hadoopFileSystemLocator, NamedClusterService namedClusterService, DefaultFileSystemManager fileSystemManager, MetastoreLocator metaStore ) throws FileSystemException { this( hadoopFileSystemLocator, namedClusterService, fileSystemManager, HDFSFileNameParser.getInstance(), new String[] { SCHEME, MAPRFS }, metaStore ); } public HDFSFileProvider( HadoopFileSystemLocator hadoopFileSystemLocator, String schema, FileNameParser fileNameParser ) throws FileSystemException { this( hadoopFileSystemLocator, NamedClusterManager.getInstance(), fileNameParser, schema ); } public HDFSFileProvider( HadoopFileSystemLocator hadoopFileSystemLocator, NamedClusterService namedClusterService, FileNameParser fileNameParser, String schema ) throws FileSystemException { this( hadoopFileSystemLocator, namedClusterService, (DefaultFileSystemManager) KettleVFS.getInstance().getFileSystemManager(), fileNameParser, new String[] { schema }, null ); } public HDFSFileProvider( HadoopFileSystemLocator hadoopFileSystemLocator, NamedClusterService namedClusterService, DefaultFileSystemManager fileSystemManager, FileNameParser fileNameParser, String[] schemes, MetastoreLocator metaStore ) throws FileSystemException { super(); this.hadoopFileSystemLocator = hadoopFileSystemLocator; this.namedClusterService = namedClusterService; this.metaStoreService = metaStore; setFileNameParser( fileNameParser ); fileSystemManager.addProvider( schemes, this ); } protected synchronized MetastoreLocator getMetastoreLocator() { if ( this.metaStoreService == null ) { try { Collection metastoreLocators = PluginServiceLoader.loadServices( MetastoreLocator.class ); this.metaStoreService = metastoreLocators.stream().findFirst().get(); } catch ( Exception e ) { logger.error( "Error getting MetastoreLocator", e ); } } return this.metaStoreService; } @Override protected FileSystem doCreateFileSystem( final FileName name, final FileSystemOptions fileSystemOptions ) throws FileSystemException { GenericFileName genericFileName = (GenericFileName) name.getRoot(); String hostName = genericFileName.getHostName(); int port = genericFileName.getPort(); NamedCluster namedCluster = resolveNamedCluster( hostName, port, name ); try { return new HDFSFileSystem( name, fileSystemOptions, hadoopFileSystemLocator.getHadoopFilesystem( namedCluster, URI.create( name.getURI() == null ? "" : name.getURI() ) ) ); } catch ( ClusterInitializationException e ) { throw new FileSystemException( e ); } } @Override public Collection getCapabilities() { return capabilities; } @Override public FileSystemConfigBuilder getConfigBuilder() { return NamedClusterConfigBuilder.getInstance( getMetastoreLocator(), namedClusterService ); } private NamedCluster resolveNamedCluster( String hostName, int port, final FileName name ) { NamedCluster namedCluster = namedClusterService.getNamedClusterByHost( hostName, getMetastoreLocator().getMetastore() ); if ( namedCluster == null ) { namedClusterService.updateNamedClusterTemplate( hostName, port, MAPRFS.equals( name.getScheme() ) ); namedCluster = namedClusterService.getClusterTemplate(); } return namedCluster; } } ================================================ FILE: impl/vfs-hdfs/src/main/java/org/pentaho/big/data/impl/vfs/hdfs/HDFSFileSystem.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.Capability; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystem; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemOptions; import org.apache.commons.vfs2.provider.AbstractFileName; import org.apache.commons.vfs2.provider.AbstractFileSystem; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import java.util.Collection; public class HDFSFileSystem extends AbstractFileSystem implements FileSystem { private final HadoopFileSystem hdfs; public HDFSFileSystem( final FileName rootName, final FileSystemOptions fileSystemOptions, HadoopFileSystem hdfs ) { super( rootName, null, fileSystemOptions ); this.hdfs = hdfs; } @Override @SuppressWarnings( { "unchecked", "rawtypes" } ) protected void addCapabilities( Collection caps ) { caps.addAll( HDFSFileProvider.capabilities ); // Adding capabilities depending on configuration settings try { if ( getHDFSFileSystem() != null && Boolean.parseBoolean( getHDFSFileSystem().getProperty( "dfs.support.append", "true" ) ) ) { caps.add( Capability.APPEND_CONTENT ); } } catch ( FileSystemException e ) { throw new RuntimeException( e ); } } @Override protected FileObject createFile( AbstractFileName name ) throws Exception { return new HDFSFileObject( name, this ); } public HadoopFileSystem getHDFSFileSystem() throws FileSystemException { return hdfs; } } ================================================ FILE: impl/vfs-hdfs/src/main/java/org/pentaho/big/data/impl/vfs/hdfs/MapRFileNameParser.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.provider.URLFileNameParser; public class MapRFileNameParser extends URLFileNameParser { public static final String EMPTY_HOSTNAME = ""; private static final MapRFileNameParser INSTANCE = new MapRFileNameParser(); private MapRFileNameParser() { super( -1 ); } public static MapRFileNameParser getInstance() { return INSTANCE; } /** * Extracts the hostname from a URI. * * @param name string buffer with the "scheme://[userinfo@]" part has been removed already. Will be modified. * @return the host name or null. */ @Override protected String extractHostName( StringBuilder name ) { final String hostname = super.extractHostName( name ); // Trick the URLFileNameParser into thinking we have a hostname so we don't have to refactor it. return hostname == null ? EMPTY_HOSTNAME : hostname; } } ================================================ FILE: impl/vfs-hdfs/src/main/java/org/pentaho/big/data/impl/vfs/hdfs/nc/NamedClusterConfigBuilder.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs.nc; import org.apache.commons.vfs2.FileSystem; import org.apache.commons.vfs2.FileSystemConfigBuilder; import org.apache.commons.vfs2.FileSystemOptions; import org.pentaho.big.data.impl.vfs.hdfs.HDFSFileSystem; import org.pentaho.di.core.vfs.configuration.KettleGenericFileSystemConfigBuilder; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.util.List; public class NamedClusterConfigBuilder extends KettleGenericFileSystemConfigBuilder { private static final NamedClusterConfigBuilder BUILDER = new NamedClusterConfigBuilder(); private static final String EMBEDDED_METASTORE_KEY_PROPERTY = "embeddedMetaStoreKey"; private final MetastoreLocator metastoreLocator; private final NamedClusterService namedClusterService; public NamedClusterConfigBuilder() { this( null, null ); } public NamedClusterConfigBuilder( MetastoreLocator metastoreLocator, NamedClusterService namedClusterService ) { this.metastoreLocator = metastoreLocator; this.namedClusterService = namedClusterService; } /** * @return NamedClusterConfigBuilder instance */ public static NamedClusterConfigBuilder getInstance() { return BUILDER; } public static FileSystemConfigBuilder getInstance( MetastoreLocator metastoreLocator, NamedClusterService namedClusterService ) { return new NamedClusterConfigBuilder( metastoreLocator, namedClusterService ); } /** * @return HDFSFileSystem */ @Override protected Class getConfigClass() { return HDFSFileSystem.class; } public void snapshotNamedClusterToMetaStore( IMetaStore snapshotMetaStore ) throws MetaStoreException { IMetaStore metaStore = metastoreLocator.getMetastore(); List ncList = namedClusterService.list( metaStore ); if ( ncList != null ) { for ( NamedCluster nc : ncList ) { namedClusterService.create( nc, snapshotMetaStore ); } } } public void setEmbeddedMetastoreKey( final FileSystemOptions opts, final String embeddedMetaStoreKey ) { setParam( opts, EMBEDDED_METASTORE_KEY_PROPERTY, embeddedMetaStoreKey ); } public String getEmbeddedMetastoreKey( final FileSystemOptions opts ) { return (String) getParam( opts, EMBEDDED_METASTORE_KEY_PROPERTY ); } } ================================================ FILE: impl/vfs-hdfs/src/main/java/org/pentaho/big/data/impl/vfs/hdfs/nc/NamedClusterFileObject.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs.nc; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.Selectors; import org.apache.commons.vfs2.provider.AbstractFileName; import org.pentaho.big.data.impl.vfs.hdfs.HDFSFileObject; import org.pentaho.di.core.vfs.AliasedFileObject; public class NamedClusterFileObject extends HDFSFileObject implements AliasedFileObject { private final String realFileSystemURI; public NamedClusterFileObject( final AbstractFileName name, final NamedClusterFileSystem fileSystem ) throws FileSystemException { super( name, fileSystem ); realFileSystemURI = fileSystem.getRealFileSystemURI().toString(); } @Override public String getOriginalURIString() { return realFileSystemURI + getName().getPath(); } @Override public String getAELSafeURIString() { return getOriginalURIString(); } @Override public boolean delete() throws FileSystemException { return delete( Selectors.SELECT_SELF_AND_CHILDREN ) > 0; } } ================================================ FILE: impl/vfs-hdfs/src/main/java/org/pentaho/big/data/impl/vfs/hdfs/nc/NamedClusterFileSystem.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs.nc; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemOptions; import org.apache.commons.vfs2.provider.AbstractFileName; import org.pentaho.big.data.impl.vfs.hdfs.HDFSFileSystem; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import java.net.URI; public class NamedClusterFileSystem extends HDFSFileSystem { private final URI realFileSystemURI; public NamedClusterFileSystem( final FileName rootName, final URI realFileSystemURI, final FileSystemOptions fileSystemOptions, HadoopFileSystem hdfs ) { super( rootName, fileSystemOptions, hdfs ); this.realFileSystemURI = realFileSystemURI; } @Override protected FileObject createFile( AbstractFileName name ) throws Exception { return new NamedClusterFileObject( name, this ); } public URI getRealFileSystemURI() { return realFileSystemURI; } } ================================================ FILE: impl/vfs-hdfs/src/main/java/org/pentaho/big/data/impl/vfs/hdfs/nc/NamedClusterProvider.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs.nc; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileSystem; import org.apache.commons.vfs2.FileSystemConfigBuilder; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemOptions; import org.apache.commons.vfs2.impl.DefaultFileSystemManager; import org.apache.commons.vfs2.provider.FileNameParser; import org.apache.commons.vfs2.provider.GenericFileName; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.big.data.impl.vfs.hdfs.HDFSFileProvider; import org.pentaho.di.core.osgi.api.VfsEmbeddedFileSystemCloser; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.net.URI; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * Created by dstepanov on 11/05/17. */ public class NamedClusterProvider extends HDFSFileProvider implements VfsEmbeddedFileSystemCloser { private Map> cacheEntries = Collections.synchronizedMap( new HashMap<>() ); public NamedClusterProvider( HadoopFileSystemLocator hadoopFileSystemLocator, NamedClusterService namedClusterService, FileNameParser fileNameParser, String[] schemes, MetastoreLocator metaStore ) throws FileSystemException { this( hadoopFileSystemLocator, namedClusterService, (DefaultFileSystemManager) KettleVFS.getInstance().getFileSystemManager(), fileNameParser, schemes, metaStore ); } public NamedClusterProvider( HadoopFileSystemLocator hadoopFileSystemLocator, String schema , FileNameParser fileNameParser) throws FileSystemException { this( hadoopFileSystemLocator, NamedClusterManager.getInstance(), fileNameParser, schema ); } public NamedClusterProvider( HadoopFileSystemLocator hadoopFileSystemLocator, NamedClusterService namedClusterService, FileNameParser fileNameParser, String schema ) throws FileSystemException { this( hadoopFileSystemLocator, namedClusterService, (DefaultFileSystemManager) KettleVFS.getInstance().getFileSystemManager(), fileNameParser, new String[] { schema }, null ); } public NamedClusterProvider( HadoopFileSystemLocator hadoopFileSystemLocator, NamedClusterService namedClusterService, DefaultFileSystemManager fileSystemManager, FileNameParser fileNameParser, String[] schemes, MetastoreLocator metaStore ) throws FileSystemException { super( hadoopFileSystemLocator, namedClusterService, fileSystemManager, fileNameParser, schemes, metaStore ); } @Override protected FileSystem doCreateFileSystem( FileName name, FileSystemOptions fileSystemOptions ) throws FileSystemException { GenericFileName genericFileName = (GenericFileName) name.getRoot(); String clusterName = genericFileName.getHostName(); String path = genericFileName.getPath(); NamedCluster namedCluster = getNamedClusterByName( clusterName, fileSystemOptions ); try { if ( namedCluster == null ) { namedCluster = namedClusterService.getClusterTemplate(); } String generatedUrl = namedCluster .processURLsubstitution( path == null ? "" : path, getMetastore( clusterName, fileSystemOptions ), new Variables() ); URI uri = URI.create( generatedUrl ); return new NamedClusterFileSystem( name, uri, fileSystemOptions, hadoopFileSystemLocator.getHadoopFilesystem( namedCluster, uri ) ); } catch ( ClusterInitializationException e ) { throw new FileSystemException( e ); } } @Override public FileSystemConfigBuilder getConfigBuilder() { return NamedClusterConfigBuilder.getInstance( getMetastoreLocator(), namedClusterService ); } /** * package visibility for test purpose only * @param clusterNameToResolve - name of namedcluster for resolve namedcluster * @param filesSystemOptions - The fileSystemOptions for the file system in play * @return named cluster from metastore or null * @throws FileSystemException */ NamedCluster getNamedClusterByName( String clusterNameToResolve, FileSystemOptions fileSystemOptions ) throws FileSystemException { IMetaStore metaStore = getMetastore( clusterNameToResolve, fileSystemOptions ); NamedCluster namedCluster = null; try { namedCluster = namedClusterService.read( clusterNameToResolve, metaStore ); } catch ( MetaStoreException e ) { throw new FileSystemException( e ); } return namedCluster; } protected synchronized FileSystem getFileSystem( final FileName rootName, final FileSystemOptions fileSystemOptions ) throws FileSystemException { FileSystem fs = findFileSystem( rootName, fileSystemOptions ); if ( fs == null ) { // Need to create the file system, and cache it fs = doCreateFileSystem( rootName, fileSystemOptions ); addCacheEntry( rootName, fs ); } return fs; } private String getFileSystemKey( String rootName, FileSystemOptions fileSystemOptions ) { return getEmbeddedMetastoreKey( fileSystemOptions ) == null ? rootName : rootName + getEmbeddedMetastoreKey( fileSystemOptions ); } private String getEmbeddedMetastoreKey( FileSystemOptions fileSystemOptions ) { return ( (NamedClusterConfigBuilder) getConfigBuilder() ).getEmbeddedMetastoreKey( fileSystemOptions ); } private IMetaStore getMetastore( String clusterNameToResolve, FileSystemOptions fileSystemOptions ) { String embeddedMetastoreKey = getEmbeddedMetastoreKey( fileSystemOptions ); IMetaStore metaStore = ( embeddedMetastoreKey != null ) ? getMetastoreLocator().getMetastore( embeddedMetastoreKey ) : getMetastoreLocator().getMetastore(); if ( metaStore != null ) { try { if ( namedClusterService.read( clusterNameToResolve, metaStore ) != null ) { return metaStore; // The namedCluster agnostic metaStore has this namedCluster, return it. } } catch ( MetaStoreException e ) { // fall through and return the embedded metastore } if ( getMetastoreLocator().getExplicitMetastore( embeddedMetastoreKey ) != null ) { metaStore = getMetastoreLocator().getExplicitMetastore( embeddedMetastoreKey ); } } return metaStore; } private void addCacheEntry( FileName rootName, FileSystem fs ) throws FileSystemException { addFileSystem( getFileSystemKey( rootName.toString(), fs.getFileSystemOptions() ), fs ); String embeddedMetastoreKey = getEmbeddedMetastoreKey( fs.getFileSystemOptions() ); Set fsSet = cacheEntries.get( embeddedMetastoreKey ); if ( fsSet == null ) { fsSet = Collections.synchronizedSet( new HashSet() ); cacheEntries.put( embeddedMetastoreKey, fsSet ); } fsSet.add( fs ); } public void closeFileSystem( String embeddedMetastoreKey ) { IMetaStore defaultMetastore = getMetastoreLocator().getMetastore(); IMetaStore embeddedMetastore = getMetastoreLocator().getExplicitMetastore( embeddedMetastoreKey ); if ( cacheEntries.get( embeddedMetastoreKey ) != null ) { for ( FileSystem fs : cacheEntries.get( embeddedMetastoreKey ) ) { closeFileSystem( fs ); } } cacheEntries.remove( embeddedMetastoreKey ); namedClusterService.close( defaultMetastore ); if ( defaultMetastore != embeddedMetastore ) { namedClusterService.close( embeddedMetastore ); } } protected synchronized FileSystem findFileSystem( final Comparable key, final FileSystemOptions fileSystemProps ) { String editedKey = getFileSystemKey( key.toString(), fileSystemProps ); return super.findFileSystem( editedKey, fileSystemProps ); } } ================================================ FILE: impl/vfs-hdfs/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: impl/vfs-hdfs/src/test/java/org/pentaho/big/data/impl/vfs/hdfs/AzureFileNameParserTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileSystemManager; import org.apache.commons.vfs2.VFS; import org.apache.commons.vfs2.provider.FileNameParser; import org.apache.commons.vfs2.provider.UriParser; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.mockito.stubbing.Answer; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class AzureFileNameParserTest { private static final String BASE_PATH = "//"; private static final String WASB_PREFIX = "wasb"; private static final String WASB_BASE_URI = WASB_PREFIX + ":" + BASE_PATH; private static final String ABFS_PREFIX = "abfs"; private static final String ABFS_BASE_URI = ABFS_PREFIX + ":" + BASE_PATH; private FileSystemManager fsm; private MockedStatic vfsMockedStatic; private MockedStatic uriParserMockedStatic; @Before public void setUp() { vfsMockedStatic = Mockito.mockStatic( VFS.class ); uriParserMockedStatic = Mockito.mockStatic( UriParser.class ); uriParserMockedStatic.when( () -> UriParser.encode( anyString(), any( char[].class ) ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.decode( anyString() ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.appendEncoded( any( StringBuilder.class ), anyString(), any( char[].class ) ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.canonicalizePath( any( StringBuilder.class ), anyInt(), anyInt(), any( FileNameParser.class ) ) ).thenCallRealMethod(); fsm = mock( FileSystemManager.class ); vfsMockedStatic.when( VFS::getManager ).thenReturn( fsm ); } @After public void cleanup() { vfsMockedStatic.close(); uriParserMockedStatic.close(); Mockito.validateMockitoUsage(); } @Test public void testDefaultPort() { assertEquals( -1, AzureHdInsightsFileNameParser.getInstance().getDefaultPort() ); } @Test public void rootPathNoClusterNameWasb() throws Exception { final String FILEPATH = "/"; final String URI = WASB_BASE_URI + FILEPATH; buildExtractSchemeMocks( WASB_PREFIX, URI, BASE_PATH + FILEPATH ); FileNameParser parser = AzureHdInsightsFileNameParser.getInstance(); FileName name = parser.parseUri( null, null, URI ); assertEquals( URI, name.getURI() ); assertEquals( WASB_PREFIX, name.getScheme() ); } @Test public void withPathWasb() throws Exception { final String FILEPATH = "/my/file/path"; final String URI = WASB_BASE_URI + FILEPATH; buildExtractSchemeMocks( WASB_PREFIX, URI, BASE_PATH + FILEPATH ); FileNameParser parser = AzureHdInsightsFileNameParser.getInstance(); FileName name = parser.parseUri( null, null, URI ); assertEquals( URI, name.getURI() ); assertEquals( WASB_PREFIX, name.getScheme() ); assertEquals( FILEPATH, name.getPath() ); } @Test public void withPathAndClusterNameWasb() throws Exception { final String HOST = "cluster2"; final String FILEPATH = "/my/file/path"; final String URI = WASB_BASE_URI + HOST + FILEPATH; buildExtractSchemeMocks( WASB_PREFIX, URI, BASE_PATH + HOST + FILEPATH ); FileNameParser parser = AzureHdInsightsFileNameParser.getInstance(); FileName name = parser.parseUri( null, null, URI ); assertEquals( URI, name.getURI() ); assertEquals( WASB_PREFIX, name.getScheme() ); assertTrue( name.getURI().startsWith( WASB_PREFIX + ":" + BASE_PATH + HOST ) ); assertEquals( FILEPATH, name.getPath() ); } @Test public void rootPathNoClusterNameAbfs() throws Exception { final String FILEPATH = "/"; final String URI = ABFS_BASE_URI + FILEPATH; buildExtractSchemeMocks( ABFS_PREFIX, URI, BASE_PATH + FILEPATH ); FileNameParser parser = AzureHdInsightsFileNameParser.getInstance(); FileName name = parser.parseUri( null, null, URI ); assertEquals( URI, name.getURI() ); assertEquals( ABFS_PREFIX, name.getScheme() ); } @Test public void withPathAbfs() throws Exception { final String FILEPATH = "/my/file/path"; final String URI = ABFS_BASE_URI + FILEPATH; buildExtractSchemeMocks( ABFS_PREFIX, URI, BASE_PATH + FILEPATH ); FileNameParser parser = AzureHdInsightsFileNameParser.getInstance(); FileName name = parser.parseUri( null, null, URI ); assertEquals( URI, name.getURI() ); assertEquals( ABFS_PREFIX, name.getScheme() ); assertEquals( FILEPATH, name.getPath() ); } @Test public void withPathAndClusterNameAbfs() throws Exception { final String HOST = "cluster2"; final String FILEPATH = "/my/file/path"; final String URI = ABFS_BASE_URI + HOST + FILEPATH; buildExtractSchemeMocks( ABFS_PREFIX, URI, BASE_PATH + HOST + FILEPATH ); FileNameParser parser = AzureHdInsightsFileNameParser.getInstance(); FileName name = parser.parseUri( null, null, URI ); assertEquals( URI, name.getURI() ); assertEquals( ABFS_PREFIX, name.getScheme() ); assertTrue( name.getURI().startsWith( ABFS_PREFIX + ":" + BASE_PATH + HOST ) ); assertEquals( FILEPATH, name.getPath() ); } private Answer buildSchemeAnswer( String prefix, String buildPath ) { return invocation -> { Object[] args = invocation.getArguments(); ( ( StringBuilder ) args[2] ).append( buildPath ); return prefix; }; } private void buildExtractSchemeMocks( String prefix, String fullPath, String pathWithoutPrefix ) { String[] schemes = {"wasb", "abfs"}; when( fsm.getSchemes() ).thenReturn( schemes ); uriParserMockedStatic.when( () -> UriParser.extractScheme( eq( schemes ), eq( fullPath ), any( StringBuilder.class ) ) ) .thenAnswer( buildSchemeAnswer( prefix, pathWithoutPrefix ) ); } } ================================================ FILE: impl/vfs-hdfs/src/test/java/org/pentaho/big/data/impl/vfs/hdfs/HDFSFileNameParserTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemManager; import org.apache.commons.vfs2.VFS; import org.apache.commons.vfs2.provider.FileNameParser; import org.apache.commons.vfs2.provider.URLFileName; import org.apache.commons.vfs2.provider.URLFileNameParser; import org.apache.commons.vfs2.provider.UriParser; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.mockito.stubbing.Answer; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Created by bryan on 8/7/15. */ @RunWith(MockitoJUnitRunner.class) public class HDFSFileNameParserTest { private static final String PREFIX = "hdfs"; private static final String BASE_PATH = "//"; private static final String BASE_URI = PREFIX + ":" + BASE_PATH; private FileSystemManager fsm; private MockedStatic vfsMockedStatic; private MockedStatic uriParserMockedStatic; @Rule public final ExpectedException exception = ExpectedException.none(); @Before public void setUp() { vfsMockedStatic = Mockito.mockStatic( VFS.class ); uriParserMockedStatic = Mockito.mockStatic( UriParser.class ); uriParserMockedStatic.when( () -> UriParser.encode( anyString(), any( char[].class ) ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.decode( anyString() ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.appendEncoded( any( StringBuilder.class ), anyString(), any( char[].class ) ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.canonicalizePath( any( StringBuilder.class ), anyInt(), anyInt(), any( FileNameParser.class ) ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.extractQueryString( any( StringBuilder.class ) ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.fixSeparators( any( StringBuilder.class ) ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.extractScheme( anyString(), any( StringBuilder.class ) ) ).thenCallRealMethod(); fsm = mock( FileSystemManager.class ); vfsMockedStatic.when( VFS::getManager ).thenReturn( fsm ); } @After public void cleanup() { vfsMockedStatic.close(); uriParserMockedStatic.close(); Mockito.validateMockitoUsage(); } @Test public void testDefaultPort() { assertEquals( -1, HDFSFileNameParser.getInstance().getDefaultPort() ); } @Test public void testParseUriNullInput() throws Exception { final String FILEPATH = "test"; final String URI = BASE_URI + FILEPATH; buildExtractSchemeMocks( PREFIX, URI, BASE_PATH + FILEPATH ); HDFSFileNameParser.getInstance().parseUri( null, null, URI ); } @Test public void testParseUriMixedCase() throws Exception { final String FILEPATH = "testUpperCaseHost"; final String URI = BASE_URI + FILEPATH; buildExtractSchemeMocks( PREFIX, URI, BASE_PATH + FILEPATH ); URLFileName urlFileName = ( URLFileName ) HDFSFileNameParser.getInstance().parseUri( null, null, URI ); assertEquals( "testUpperCaseHost", urlFileName.getHostName() ); } @Test public void testParseUriMixedCaseLongName() throws Exception { final String FILEPATH = "testUpperCaseHost/long/test/name"; final String URI = BASE_URI + FILEPATH; buildExtractSchemeMocks( PREFIX, URI, BASE_PATH + FILEPATH ); URLFileName urlFileName = ( URLFileName ) HDFSFileNameParser.getInstance().parseUri( null, null, URI ); assertEquals( "testUpperCaseHost", urlFileName.getHostName() ); } @Test public void testParseUriThrowExceptionNoProtocol() throws Exception { final String FILEPATH = "testUpperCaseHost/long/test/name"; exception.expect( FileSystemException.class ); buildExtractSchemeMocks( null, FILEPATH, FILEPATH ); HDFSFileNameParser.getInstance().parseUri( null, null, "testUpperCaseHost/long/test/name" ); } @Test public void testParseUriUserNameFilePath() throws Exception { final String FILEPATH = "root:password@testUpperCaseHost:8080/long/test/name"; final String URI = BASE_URI + FILEPATH; buildExtractSchemeMocks( PREFIX, URI, BASE_PATH + FILEPATH ); URLFileName hdfsFileName = ( URLFileName ) HDFSFileNameParser.getInstance() .parseUri( null, null, URI ); URLFileName urlFileName = ( URLFileName ) new URLFileNameParser( 7000 ).parseUri( null, null, URI ); assertEquals( 8080, hdfsFileName.getPort() ); assertEquals( "root", hdfsFileName.getUserName() ); assertEquals( "/long/test/name", hdfsFileName.getPath() ); assertEquals( "password", hdfsFileName.getPassword() ); assertEquals( urlFileName.getType(), hdfsFileName.getType() ); assertEquals( urlFileName.getQueryString(), hdfsFileName.getQueryString() ); } private Answer buildSchemeAnswer( String prefix, String buildPath ) { return invocation -> { Object[] args = invocation.getArguments(); ( ( StringBuilder ) args[2] ).append( buildPath ); return prefix; }; } private void buildExtractSchemeMocks( String prefix, String fullPath, String pathWithoutPrefix ) { String[] schemes = {"hdfs"}; when( fsm.getSchemes() ).thenReturn( schemes ); uriParserMockedStatic.when( () -> UriParser.extractScheme( eq( schemes ), eq( fullPath ), any( StringBuilder.class ) ) ) .thenAnswer( buildSchemeAnswer( prefix, pathWithoutPrefix ) ); } } ================================================ FILE: impl/vfs-hdfs/src/test/java/org/pentaho/big/data/impl/vfs/hdfs/HDFSFileObjectTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileType; import org.apache.commons.vfs2.provider.AbstractFileName; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileStatus; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemPath; import java.io.InputStream; import java.io.OutputStream; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by bryan on 8/7/15. */ public class HDFSFileObjectTest { private AbstractFileName abstractFileName; private HDFSFileSystem hdfsFileSystem; private HadoopFileSystem hadoopFileSystem; private HDFSFileObject hdfsFileObject; private HadoopFileSystemPath hadoopFileSystemPath; @Before public void setup() throws FileSystemException { abstractFileName = mock( AbstractFileName.class ); hadoopFileSystem = mock( HadoopFileSystem.class ); hdfsFileSystem = new HDFSFileSystem( mock( AbstractFileName.class ), null, hadoopFileSystem ); hdfsFileObject = new HDFSFileObject( abstractFileName, hdfsFileSystem ); String path = "fake-path"; hadoopFileSystemPath = mock( HadoopFileSystemPath.class ); when( abstractFileName.getPath() ).thenReturn( path ); when( hadoopFileSystem.getPath( path ) ).thenReturn( hadoopFileSystemPath ); } @Test public void testGetContentSize() throws Exception { long len = 321L; HadoopFileStatus hadoopFileStatus = mock( HadoopFileStatus.class ); when( hadoopFileSystem.getFileStatus( hadoopFileSystemPath ) ).thenReturn( hadoopFileStatus ); when( hadoopFileStatus.getLen() ).thenReturn( len ); assertEquals( len, hdfsFileObject.doGetContentSize() ); } @Test public void testDoGetOutputStreamAppend() throws Exception { OutputStream outputStream = mock( OutputStream.class ); when( hadoopFileSystem.append( hadoopFileSystemPath ) ).thenReturn( outputStream ); assertEquals( outputStream, hdfsFileObject.doGetOutputStream( true ) ); } @Test public void testDoGetOutputStreamCreate() throws Exception { OutputStream outputStream = mock( OutputStream.class ); when( hadoopFileSystem.create( hadoopFileSystemPath ) ).thenReturn( outputStream ); assertEquals( outputStream, hdfsFileObject.doGetOutputStream( false ) ); } @Test public void testDoGetInputStream() throws Exception { InputStream inputStream = mock( InputStream.class ); when( hadoopFileSystem.open( hadoopFileSystemPath ) ).thenReturn( inputStream ); assertEquals( inputStream, hdfsFileObject.doGetInputStream() ); } @Test public void testDoGetTypeFile() throws Exception { HadoopFileStatus hadoopFileStatus = mock( HadoopFileStatus.class ); when( hadoopFileSystem.getFileStatus( hadoopFileSystemPath ) ).thenReturn( hadoopFileStatus ); when( hadoopFileStatus.isDir() ).thenReturn( false ); assertEquals( FileType.FILE, hdfsFileObject.doGetType() ); } @Test public void testDoGetTypeFolder() throws Exception { HadoopFileStatus hadoopFileStatus = mock( HadoopFileStatus.class ); when( hadoopFileSystem.getFileStatus( hadoopFileSystemPath ) ).thenReturn( hadoopFileStatus ); when( hadoopFileStatus.isDir() ).thenReturn( true ); assertEquals( FileType.FOLDER, hdfsFileObject.doGetType() ); } @Test public void testDoGetTypeImaginary() throws Exception { assertEquals( FileType.IMAGINARY, hdfsFileObject.doGetType() ); } @Test public void testDoCreateFolder() throws Exception { hdfsFileObject.doCreateFolder(); verify( hadoopFileSystem ).mkdirs( hadoopFileSystemPath ); } @Test public void testDoRename() throws Exception { FileObject fileObject = mock( FileObject.class ); FileName fileName = mock( FileName.class ); when( fileObject.getName() ).thenReturn( fileName ); String path2 = "fake-path-2"; when( fileName.getPath() ).thenReturn( path2 ); HadoopFileSystemPath newPath = mock( HadoopFileSystemPath.class ); when( hadoopFileSystem.getPath( path2 ) ).thenReturn( newPath ); hdfsFileObject.doRename( fileObject ); verify( hadoopFileSystem ).rename( hadoopFileSystemPath, newPath ); } @Test public void testDoGetLastModifiedTime() throws Exception { long modificationTime = 8988L; HadoopFileStatus hadoopFileStatus = mock( HadoopFileStatus.class ); when( hadoopFileSystem.getFileStatus( hadoopFileSystemPath ) ).thenReturn( hadoopFileStatus ); when( hadoopFileStatus.getModificationTime() ).thenReturn( modificationTime ); assertEquals( modificationTime, hdfsFileObject.doGetLastModifiedTime() ); } @Test public void testDoSetLastModifiedTime() throws Exception { long modtime = 48933L; long start = System.currentTimeMillis(); assertTrue( hdfsFileObject.doSetLastModifiedTime( modtime ) ); ArgumentCaptor longArgumentCaptor = ArgumentCaptor.forClass( Long.class ); verify( hadoopFileSystem ).setTimes( eq( hadoopFileSystemPath ), eq( modtime ), longArgumentCaptor.capture() ); Long accessTime = longArgumentCaptor.getValue(); assertTrue( start <= accessTime ); assertTrue( accessTime <= System.currentTimeMillis() ); } @Test public void testDoListChildren() throws Exception { String childPathName = "fake-path-child"; testDoListChildrenInternal( childPathName ); } @Test public void testDoListChildrenWithSpaces() throws Exception { String childPathName = "fake path child with spaces"; testDoListChildrenInternal( childPathName ); } private void testDoListChildrenInternal( String childPathName ) throws Exception { HadoopFileStatus hadoopFileStatus = mock( HadoopFileStatus.class ); HadoopFileStatus[] hadoopFileStatuses = { hadoopFileStatus }; HadoopFileSystemPath childPath = mock( HadoopFileSystemPath.class ); when( hadoopFileStatus.getPath() ).thenReturn( childPath ); when( childPath.getName() ).thenReturn( childPathName ); when( hadoopFileSystem.listStatus( hadoopFileSystemPath ) ).thenReturn( hadoopFileStatuses ); String[] children = hdfsFileObject.doListChildren(); assertEquals( 1, children.length ); assertEquals( childPathName, children[ 0 ] ); } } ================================================ FILE: impl/vfs-hdfs/src/test/java/org/pentaho/big/data/impl/vfs/hdfs/HDFSFileProviderTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.impl.DefaultFileSystemManager; import org.apache.commons.vfs2.provider.FileNameParser; import org.apache.commons.vfs2.provider.GenericFileName; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.net.URI; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by bryan on 8/7/15. */ public class HDFSFileProviderTest { private HadoopFileSystemLocator hadoopFileSystemLocator; private NamedClusterService namedClusterService; private DefaultFileSystemManager defaultFileSystemManager; private HDFSFileProvider hdfsFileProvider; private NamedCluster namedCluster; private MetastoreLocator metaStoreLocator; private FileNameParser fileNameParser; @Before public void setup() throws FileSystemException { hadoopFileSystemLocator = mock( HadoopFileSystemLocator.class ); namedClusterService = mock( NamedClusterService.class ); namedCluster = mock( NamedCluster.class ); when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); defaultFileSystemManager = mock( DefaultFileSystemManager.class ); metaStoreLocator = mock( MetastoreLocator.class ); fileNameParser = mock( FileNameParser.class ); hdfsFileProvider = new HDFSFileProvider( hadoopFileSystemLocator, namedClusterService, defaultFileSystemManager, fileNameParser, new String[] { HDFSFileProvider.SCHEME, HDFSFileProvider.MAPRFS }, metaStoreLocator ); ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass( String[].class ); verify( defaultFileSystemManager ) .addProvider( argumentCaptor.capture(), eq( hdfsFileProvider ) ); String[] schemes = argumentCaptor.getValue(); assertEquals( 2, schemes.length ); assertEquals( HDFSFileProvider.SCHEME, schemes[ 0 ] ); assertEquals( HDFSFileProvider.MAPRFS, schemes[ 1 ] ); } @Test public void testDoCreateFileSystemNoPort() throws FileSystemException, ClusterInitializationException { String testHostname = "testHostname"; FileName fileName = mock( FileName.class ); GenericFileName genericFileName = mock( GenericFileName.class ); when( fileName.getURI() ).thenReturn( "" ); when( fileName.getRoot() ).thenReturn( genericFileName ); when( fileName.getScheme() ).thenReturn( HDFSFileProvider.MAPRFS ); when( genericFileName.getHostName() ).thenReturn( testHostname ); when( genericFileName.getPort() ).thenReturn( -1 ); assertTrue( hdfsFileProvider.doCreateFileSystem( fileName, null ) instanceof HDFSFileSystem ); verify( hadoopFileSystemLocator ).getHadoopFilesystem( namedCluster, URI.create( "" ) ); verify( namedClusterService ).updateNamedClusterTemplate( testHostname, -1, true ); } @Test public void testGetCapabilities() { assertEquals( HDFSFileProvider.capabilities, hdfsFileProvider.getCapabilities() ); } } ================================================ FILE: impl/vfs-hdfs/src/test/java/org/pentaho/big/data/impl/vfs/hdfs/HDFSFileSystemTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.Capability; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.provider.AbstractFileName; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by bryan on 8/7/15. */ public class HDFSFileSystemTest { private FileName rootName; private HadoopFileSystem hadoopFileSystem; private HDFSFileSystem hdfsFileSystem; @Before public void setup() { rootName = mock( FileName.class ); hadoopFileSystem = mock( HadoopFileSystem.class ); hdfsFileSystem = new HDFSFileSystem( rootName, null, hadoopFileSystem ); } @Test public void testAddCapabilities() { Collection caps = mock( Collection.class ); hdfsFileSystem.addCapabilities( caps ); verify( caps ).addAll( HDFSFileProvider.capabilities ); } @Test public void testAddAppendCapabilities() { Collection caps = new ArrayList( ); when( hadoopFileSystem.getProperty( eq( "dfs.support.append" ), anyString() ) ).thenReturn( "false" ); hdfsFileSystem.addCapabilities( caps ); Collection res = new ArrayList( HDFSFileProvider.capabilities ); assertArrayEquals( caps.toArray(), Collections.unmodifiableCollection( res ).toArray() ); caps = new ArrayList( ); when( hadoopFileSystem.getProperty( eq( "dfs.support.append" ), anyString() ) ).thenReturn( "true" ); hdfsFileSystem.addCapabilities( caps ); res.add( Capability.APPEND_CONTENT ); assertArrayEquals( caps.toArray(), Collections.unmodifiableCollection( res ).toArray() ); } @Test public void testCreateFile() throws Exception { assertTrue( hdfsFileSystem.createFile( mock( AbstractFileName.class ) ) instanceof HDFSFileObject ); } @Test public void testGetHDFSFileSystem() throws FileSystemException { assertEquals( hadoopFileSystem, hdfsFileSystem.getHDFSFileSystem() ); } } ================================================ FILE: impl/vfs-hdfs/src/test/java/org/pentaho/big/data/impl/vfs/hdfs/MapRFileNameParserTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileSystemManager; import org.apache.commons.vfs2.VFS; import org.apache.commons.vfs2.provider.FileNameParser; import org.apache.commons.vfs2.provider.UriParser; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.mockito.stubbing.Answer; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class MapRFileNameParserTest { private static final String PREFIX = "maprfs"; private static final String BASE_PATH = "//"; private static final String BASE_URI = PREFIX + ":" + BASE_PATH; private FileSystemManager fsm; private MockedStatic vfsMockedStatic; private MockedStatic uriParserMockedStatic; @Before public void setUp() { vfsMockedStatic = Mockito.mockStatic( VFS.class ); uriParserMockedStatic = Mockito.mockStatic( UriParser.class ); uriParserMockedStatic.when( () -> UriParser.encode( anyString(), any( char[].class ) ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.decode( anyString() ) ).thenCallRealMethod(); uriParserMockedStatic.when( () -> UriParser.appendEncoded( any( StringBuilder.class ), anyString(), any( char[].class ) ) ).thenCallRealMethod(); fsm = mock( FileSystemManager.class ); vfsMockedStatic.when( VFS::getManager ).thenReturn( fsm ); } @After public void cleanup() { vfsMockedStatic.close(); uriParserMockedStatic.close(); Mockito.validateMockitoUsage(); } @Test public void testDefaultPort() { assertEquals( -1, MapRFileNameParser.getInstance().getDefaultPort() ); } @Test public void rootPathNoClusterName() throws Exception { final String FILEPATH = "/"; final String URI = BASE_URI + FILEPATH; buildExtractSchemeMocks( PREFIX, URI, BASE_PATH + FILEPATH ); FileNameParser parser = MapRFileNameParser.getInstance(); FileName name = parser.parseUri( null, null, URI ); assertEquals( URI, name.getURI() ); assertEquals( PREFIX, name.getScheme() ); } @Test public void withPath() throws Exception { final String FILEPATH = "/my/file/path"; final String URI = BASE_URI + FILEPATH; buildExtractSchemeMocks( PREFIX, URI, BASE_PATH + FILEPATH ); FileNameParser parser = MapRFileNameParser.getInstance(); FileName name = parser.parseUri( null, null, URI ); assertEquals( URI, name.getURI() ); assertEquals( PREFIX, name.getScheme() ); assertEquals( FILEPATH, name.getPath() ); } @Test public void withPathAndClusterName() throws Exception { final String HOST = "cluster2"; final String FILEPATH = "/my/file/path"; final String URI = BASE_URI + HOST + FILEPATH; buildExtractSchemeMocks( PREFIX, URI, BASE_PATH + HOST + FILEPATH ); FileNameParser parser = MapRFileNameParser.getInstance(); FileName name = parser.parseUri( null, null, URI ); assertEquals( URI, name.getURI() ); assertEquals( PREFIX, name.getScheme() ); assertTrue( name.getURI().startsWith( PREFIX + ":" + BASE_PATH + HOST ) ); assertEquals( FILEPATH, name.getPath() ); } private Answer buildSchemeAnswer( String prefix, String buildPath ) { return invocation -> { Object[] args = invocation.getArguments(); ( ( StringBuilder ) args[2] ).append( buildPath ); return prefix; }; } private void buildExtractSchemeMocks( String prefix, String fullPath, String pathWithoutPrefix ) { String[] schemes = {"maprfs"}; when( fsm.getSchemes() ).thenReturn( schemes ); uriParserMockedStatic.when( () -> UriParser.extractScheme( eq( schemes ), eq( fullPath ), any( StringBuilder.class ) ) ) .thenAnswer( buildSchemeAnswer( prefix, pathWithoutPrefix ) ); } } ================================================ FILE: impl/vfs-hdfs/src/test/java/org/pentaho/big/data/impl/vfs/hdfs/nc/NamedClusterConfigBuilderTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs.nc; import org.apache.commons.vfs2.FileSystemConfigBuilder; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.util.Arrays; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class NamedClusterConfigBuilderTest { private NamedClusterService namedClusterService = mock( NamedClusterService.class ); private MetastoreLocator metastoreLocator = mock( MetastoreLocator.class ); private IMetaStore metastore = mock( IMetaStore.class ); private NamedCluster namedCluster = mock( NamedCluster.class ); @Before public void setUp() throws MetaStoreException { when( metastoreLocator.getMetastore() ).thenReturn( metastore ); } @Test public void testSnapshotNamedClusterToMetaStore() throws MetaStoreException { when( namedClusterService.list( eq( metastore ) ) ).thenReturn( Arrays.asList( namedCluster ) ); NamedClusterConfigBuilder builder = new NamedClusterConfigBuilder( metastoreLocator, namedClusterService ); builder.snapshotNamedClusterToMetaStore( metastore ); verify( namedClusterService ).create( eq( namedCluster ), eq( metastore ) ); } @Test public void testSnapshotNamedClusterToMetaStore_staticInit() throws MetaStoreException { when( namedClusterService.list( eq( metastore ) ) ).thenReturn( Arrays.asList( namedCluster ) ); FileSystemConfigBuilder builder = NamedClusterConfigBuilder.getInstance( metastoreLocator, namedClusterService ); assertTrue( builder instanceof NamedClusterConfigBuilder ); NamedClusterConfigBuilder ncbuilder = (NamedClusterConfigBuilder) builder; ncbuilder.snapshotNamedClusterToMetaStore( metastore ); verify( namedClusterService ).create( eq( namedCluster ), eq( metastore ) ); } @Test public void testSnapshotNamedClusterToMetaStore_MetastoreDoesNotHaveNC() throws MetaStoreException { when( namedClusterService.list( eq( metastore ) ) ).thenReturn( null ); NamedClusterConfigBuilder builder = new NamedClusterConfigBuilder( metastoreLocator, namedClusterService ); builder.snapshotNamedClusterToMetaStore( metastore ); verify( namedClusterService, never() ).create( eq( namedCluster ), eq( metastore ) ); } @Test public void testSnapshotNamedClusterToMetaStore_staticInit_MetastoreDoesNotHaveNC() throws MetaStoreException { when( namedClusterService.list( eq( metastore ) ) ).thenReturn( null ); FileSystemConfigBuilder builder = NamedClusterConfigBuilder.getInstance( metastoreLocator, namedClusterService ); assertTrue( builder instanceof NamedClusterConfigBuilder ); NamedClusterConfigBuilder ncbuilder = (NamedClusterConfigBuilder) builder; ncbuilder.snapshotNamedClusterToMetaStore( metastore ); verify( namedClusterService, never() ).create( eq( namedCluster ), eq( metastore ) ); } } ================================================ FILE: impl/vfs-hdfs/src/test/java/org/pentaho/big/data/impl/vfs/hdfs/nc/NamedClusterProviderTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.vfs.hdfs.nc; import org.apache.commons.vfs2.FileSystem; import org.apache.commons.vfs2.FileSystemConfigBuilder; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.impl.DefaultFileSystemManager; import org.apache.commons.vfs2.provider.FileNameParser; import org.apache.commons.vfs2.provider.url.UrlFileName; import org.junit.Before; import org.junit.Test; import org.mockito.AdditionalMatchers; import org.pentaho.big.data.impl.vfs.hdfs.HDFSFileSystem; import org.pentaho.di.core.variables.Variables; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.net.URI; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class NamedClusterProviderTest { private final NamedClusterService ncService = mock( NamedClusterService.class ); private final MetastoreLocator metastoreLocator = mock( MetastoreLocator.class ); private final IMetaStore metastore = mock( IMetaStore.class ); private final NamedCluster nc = mock( NamedCluster.class ); private final NamedCluster ncTemplate = mock( NamedCluster.class ); private final HadoopFileSystemLocator hdfsLocator = mock( HadoopFileSystemLocator.class ); private final FileNameParser fileNameParser = mock( FileNameParser.class ); private final DefaultFileSystemManager fileSystemManager = mock( DefaultFileSystemManager.class ); private final HadoopFileSystem hfs = mock( HadoopFileSystem.class ); private final String[] scheme = new String[] { "test" }; private final String ncName = "ncName"; private final String path = "/samplePath"; @Before public void setUp() throws MetaStoreException, ClusterInitializationException { when( ncService.read( eq( ncName ), eq( metastore ) ) ).thenReturn( nc ); when( ncService.getClusterTemplate() ).thenReturn( ncTemplate ); when( ncTemplate.processURLsubstitution( anyString(), AdditionalMatchers.or( any( IMetaStore.class ), isNull() ), any( Variables.class ) ) ).thenReturn( "nc://" + ncName + path ); when( nc.processURLsubstitution( anyString(), AdditionalMatchers.or( any( IMetaStore.class ), isNull() ), any( Variables.class ) ) ).thenReturn( "nc://" + ncName + path ); when( hdfsLocator.getHadoopFilesystem( any( NamedCluster.class ), any( URI.class ) ) ).thenReturn( hfs ); } @Test public void testGetNamedClusterByName_metastoreExist() throws FileSystemException, MetaStoreException { when( metastoreLocator.getMetastore() ).thenReturn( metastore ); String ncName = "ncName"; NamedClusterProvider provider = new NamedClusterProvider( hdfsLocator, ncService, fileSystemManager, fileNameParser, scheme, metastoreLocator ); assertEquals( nc, provider.getNamedClusterByName( ncName, null ) ); verify( ncService, times( 2 ) ).read( eq( ncName ), eq( metastore ) ); } @Test public void testGetNamedClusterByName_metastoreNotExist() throws FileSystemException, MetaStoreException { String ncName = "ncName"; NamedClusterProvider provider = new NamedClusterProvider( hdfsLocator, ncService, fileSystemManager, fileNameParser, scheme, metastoreLocator ); //should be null because we do not have metastore assertNull( provider.getNamedClusterByName( ncName, null ) ); verify( ncService, never() ).read( eq( ncName ), eq( metastore ) ); } @Test public void testGetConfigBuilder() throws FileSystemException { NamedClusterProvider provider = new NamedClusterProvider( hdfsLocator, ncService, fileSystemManager, fileNameParser, scheme, metastoreLocator ); FileSystemConfigBuilder builder = provider.getConfigBuilder(); assertNotNull( builder ); assertTrue( builder instanceof NamedClusterConfigBuilder ); } @Test public void testDoCreateFileSystem() throws FileSystemException, ClusterInitializationException { when( metastoreLocator.getMetastore() ).thenReturn( metastore ); UrlFileName name = new UrlFileName( "hc", ncName, 0, 0, null, null, path, null, null ); NamedClusterProvider provider = new NamedClusterProvider( hdfsLocator, ncService, fileSystemManager, fileNameParser, scheme, metastoreLocator ); FileSystem fs = provider.doCreateFileSystem( name, null ); assertTrue( fs instanceof HDFSFileSystem ); HDFSFileSystem hdfsFS = (HDFSFileSystem) fs; assertEquals( hfs, hdfsFS.getHDFSFileSystem() ); verify( nc ).processURLsubstitution( anyString(), eq( metastore ), any( Variables.class ) ); verify( hdfsLocator ).getHadoopFilesystem( eq( nc ), any( URI.class ) ); } @Test public void testDoCreateFileSystem_NCTemplate() throws FileSystemException, MetaStoreException, ClusterInitializationException { UrlFileName name = new UrlFileName( "hc", ncName, 0, 0, null, null, path, null, null ); NamedClusterProvider provider = new NamedClusterProvider( hdfsLocator, ncService, fileSystemManager, fileNameParser, scheme, metastoreLocator ); FileSystem fs = provider.doCreateFileSystem( name, null ); assertTrue( fs instanceof HDFSFileSystem ); HDFSFileSystem hdfsFS = (HDFSFileSystem) fs; assertEquals( hfs, hdfsFS.getHDFSFileSystem() ); verify( ncService, never() ).read( eq( ncName ), eq( metastore ) ); verify( hdfsLocator ).getHadoopFilesystem( eq( ncTemplate ), any( URI.class ) ); } } ================================================ FILE: kettle-plugins/browse/pom.xml ================================================ pentaho-big-data-kettle-plugins pentaho 11.1.0.0-SNAPSHOT 4.0.0 pentaho-big-data-kettle-plugins-browse 11.1.0.0-SNAPSHOT jar pentaho pentaho-big-data-impl-cluster ${project.version} commons-io commons-io ${commons-io.version} provided org.pentaho.di.plugins pentaho-metastore-locator-api ${pdi.version} provided org.pentaho.di.plugins file-open-save-new-api ${pdi.version} provided org.pentaho shim-api ${pentaho-hadoop-shims.version} pentaho-kettle kettle-core ${pdi.version} provided org.apache.logging.log4j log4j-api ${log4j.version} ================================================ FILE: kettle-plugins/browse/src/main/java/org/pentaho/big/data/impl/browse/NamedClusterProvider.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.browse; import org.apache.commons.compress.utils.IOUtils; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileType; import org.apache.commons.vfs2.Selectors; import org.pentaho.big.data.impl.browse.model.NamedClusterDirectory; import org.pentaho.big.data.impl.browse.model.NamedClusterFile; import org.pentaho.big.data.impl.browse.model.NamedClusterTree; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.plugins.fileopensave.api.overwrite.OverwriteStatus; import org.pentaho.di.plugins.fileopensave.api.providers.BaseFileProvider; import org.pentaho.di.plugins.fileopensave.api.providers.File; import org.pentaho.di.plugins.fileopensave.api.providers.Tree; import org.pentaho.di.plugins.fileopensave.api.providers.Utils; import org.pentaho.di.plugins.fileopensave.api.providers.exception.FileException; import org.pentaho.di.plugins.fileopensave.api.providers.exception.FileNotFoundException; import org.pentaho.di.plugins.fileopensave.api.providers.exception.ProviderServiceInterface; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.List; public class NamedClusterProvider extends BaseFileProvider { public static final String NAME = "Hadoop Clusters"; public static final String TYPE = "clusters"; public static final String SCHEME = "hc"; private NamedClusterService namedClusterManager; private MetastoreLocator metastoreLocator; private Logger logger = LogManager.getLogger( NamedClusterProvider.class ); private boolean initialized = false; public NamedClusterProvider() { this( NamedClusterManager.getInstance() ); lazilyInitialize(); } public NamedClusterProvider( NamedClusterService namedClusterManager ) { this.namedClusterManager = namedClusterManager; } private void lazilyInitialize() { try { Collection metastoreLocators = PluginServiceLoader.loadServices( MetastoreLocator.class ); this.metastoreLocator = metastoreLocators.stream().findFirst().get(); } catch ( Exception e ) { logger.error( "Error getting MetastoreLocator", e ); } try { Collection providerServiceInterfaces = PluginServiceLoader.loadServices( ProviderServiceInterface.class ); providerServiceInterfaces.stream().findFirst().get().addProviderService( this ); initialized = true; } catch ( Exception e ) { logger.error( "Error registering Hadoop Clusters file provider", e ); } } private void ensureInitialized() { if ( !initialized ) { lazilyInitialize(); } } @Override public String getName() { return NAME; } @Override public String getType() { return TYPE; } @Override public Class getFileClass() { return NamedClusterFile.class; } @Override public boolean isAvailable() { return true; } @Override public Tree getTree( Bowl bowl ) { ensureInitialized(); NamedClusterTree namedClusterTree = new NamedClusterTree( NAME ); try { List names = namedClusterManager.listNames( metastoreLocator.getMetastore() ); names.forEach( name -> { NamedClusterDirectory namedClusterDirectory = new NamedClusterDirectory(); namedClusterDirectory.setName( name ); namedClusterDirectory.setPath( SCHEME + "://" + name ); namedClusterDirectory.setRoot( NAME ); namedClusterDirectory.setHasChildren( true ); namedClusterDirectory.setCanDelete( false ); namedClusterTree.addChild( namedClusterDirectory ); } ); } catch ( MetaStoreException me ) { // ignored } return namedClusterTree; } @Override public List getFiles( Bowl bowl, NamedClusterFile file, String filters, VariableSpace space ) throws FileException { ensureInitialized(); FileObject fileObject; try { fileObject = KettleVFS.getInstance( bowl ).getFileObject( file.getPath() ); if ( !fileObject.exists() ) { throw new FileNotFoundException( file.getPath(), TYPE ); } } catch ( KettleFileException | FileSystemException e ) { throw new FileNotFoundException( file.getPath(), TYPE ); } return populateChildren( file, fileObject, filters ); } /** * Check if a file object has children * * @param fileObject * @return */ private boolean hasChildren( FileObject fileObject ) { try { return fileObject != null && fileObject.getType().hasChildren(); } catch ( FileSystemException e ) { return false; } } /** * Get the children if they are available, if an error return an empty list * * @param fileObject * @return */ private FileObject[] getChildren( FileObject fileObject ) { try { return fileObject != null ? fileObject.getChildren() : new FileObject[] {}; } catch ( FileSystemException e ) { return new FileObject[] {}; } } /** * Populate Named Cluster file objects from named cluster FileObject types * * @param parent * @param fileObject * @param filters * @return */ private List populateChildren( NamedClusterFile parent, FileObject fileObject, String filters ) { List files = new ArrayList<>(); if ( fileObject != null && hasChildren( fileObject ) ) { FileObject[] children = getChildren( fileObject ); for ( FileObject child : children ) { if ( hasChildren( child ) ) { files.add( NamedClusterDirectory.create( parent.getPath(), child ) ); } else { if ( child != null && Utils.matches( child.getName().getBaseName(), filters ) ) { files.add( NamedClusterFile.create( parent.getPath(), child ) ); } } } } return files; } @Override public List delete( Bowl bowl, List files, VariableSpace space ) throws FileException { ensureInitialized(); List deletedFiles = new ArrayList<>(); for ( NamedClusterFile file : files ) { try { FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( file.getPath() ); if ( fileObject.delete() ) { deletedFiles.add( file ); } } catch ( KettleFileException | FileSystemException kfe ) { // Ignore don't add } } return deletedFiles; } @Override public NamedClusterFile add( Bowl bowl, NamedClusterFile folder, VariableSpace space ) throws FileException { ensureInitialized(); try { FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( folder.getPath() ); fileObject.createFolder(); String parent = folder.getPath().substring( 0, folder.getPath().length() - 1 ); return NamedClusterFile.create( parent, fileObject ); } catch ( KettleFileException | FileSystemException ignored ) { // Ignored } return null; } @Override public NamedClusterFile getFile( Bowl bowl, NamedClusterFile file, VariableSpace space ) { ensureInitialized(); try { FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( file.getPath() ); if ( fileObject.getType().equals( FileType.FOLDER ) ) { return NamedClusterDirectory.create( null, fileObject ); } else { return NamedClusterFile.create( null, fileObject ); } } catch ( KettleFileException | FileSystemException e ) { // File does not exist } return null; } @Override public boolean fileExists( Bowl bowl, NamedClusterFile dir, String path, VariableSpace space ) throws FileException { ensureInitialized(); path = sanitizeName( bowl, dir, path ); try { FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( path ); return fileObject.exists(); } catch ( KettleFileException | FileSystemException e ) { throw new FileException(); } } @Override public String getNewName( Bowl bowl, NamedClusterFile destDir, String newPath, VariableSpace space ) throws FileException { ensureInitialized(); String extension = Utils.getExtension( newPath ); String parent = Utils.getParent( newPath, "/" ); String name = Utils.getName( newPath, "/" ).replace( "." + extension, "" ); int i = 1; String testName = sanitizeName( bowl, destDir, newPath ); try { while ( KettleVFS.getInstance( bowl ).getFileObject( testName ).exists() ) { if ( Utils.isValidExtension( extension ) ) { testName = sanitizeName( bowl, destDir, parent + name + " " + i + "." + extension ); } else { testName = sanitizeName( bowl, destDir, newPath + " " + i ); } i++; } } catch ( KettleFileException | FileSystemException e ) { return testName; } return testName; } @Override public boolean isSame( Bowl bowl, File file1, File file2 ) { return file1 instanceof NamedClusterFile && file2 instanceof NamedClusterFile; } @Override public NamedClusterFile rename( Bowl bowl, NamedClusterFile file, String newPath, OverwriteStatus overwriteStatus, VariableSpace space ) throws FileException { return doMove( bowl, file, newPath, overwriteStatus ); } @Override public NamedClusterFile copy( Bowl bowl, NamedClusterFile file, String toPath, OverwriteStatus overwriteStatus, VariableSpace space ) throws FileException { ensureInitialized(); try { FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( file.getPath() ); FileObject copyObject = KettleVFS.getInstance( bowl ).getFileObject( toPath ); copyObject.copyFrom( fileObject, Selectors.SELECT_ALL ); if ( file instanceof NamedClusterDirectory ) { return NamedClusterDirectory.create( copyObject.getParent().getPublicURIString(), fileObject ); } else { return NamedClusterFile.create( copyObject.getParent().getPublicURIString(), fileObject ); } } catch ( KettleFileException | FileSystemException e ) { throw new FileException(); } } @Override public NamedClusterFile move( Bowl bowl, NamedClusterFile namedClusterFile, String s, OverwriteStatus overwriteStatus, VariableSpace space ) throws FileException { return null; } private NamedClusterFile doMove( Bowl bowl, NamedClusterFile file, String newPath, OverwriteStatus overwriteStatus ) { ensureInitialized(); try { FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( file.getPath() ); FileObject renameObject = KettleVFS.getInstance( bowl ).getFileObject( newPath ); if ( renameObject.exists() ) { overwriteStatus.promptOverwriteIfNecessary( file.getPath(), file.getType() ); if ( overwriteStatus.isOverwrite() ) { renameObject.delete(); } else if ( overwriteStatus.isCancel() || overwriteStatus.isSkip() ) { return null; } else if ( overwriteStatus.isRename() ) { NamedClusterDirectory namedClusterDir = NamedClusterDirectory.create( renameObject.getParent().getPath().toString(), renameObject ); newPath = getNewName( bowl, namedClusterDir, newPath, new Variables() ); renameObject = KettleVFS.getInstance( bowl ).getFileObject( newPath ); } } fileObject.moveTo( renameObject ); if ( file instanceof NamedClusterDirectory ) { return NamedClusterDirectory.create( renameObject.getParent().getPublicURIString(), renameObject ); } else { return NamedClusterFile.create( renameObject.getParent().getPublicURIString(), renameObject ); } } catch ( KettleFileException | FileSystemException | FileException e ) { return null; } } @Override public InputStream readFile( Bowl bowl, NamedClusterFile file, VariableSpace space ) throws FileException { ensureInitialized(); try { FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( file.getPath() ); return fileObject.getContent().getInputStream(); } catch ( KettleFileException | FileSystemException e ) { return null; } } @Override public NamedClusterFile writeFile( Bowl bowl, InputStream inputStream, NamedClusterFile destDir, String path, OverwriteStatus overwriteStatus, VariableSpace space ) throws FileException { ensureInitialized(); FileObject fileObject = null; try { fileObject = KettleVFS.getInstance( bowl ).getFileObject( path ); } catch ( KettleFileException ke ) { throw new FileException(); } if ( fileObject != null ) { try ( OutputStream outputStream = fileObject.getContent().getOutputStream(); ) { IOUtils.copy( inputStream, outputStream ); outputStream.flush(); return NamedClusterFile.create( destDir.getPath(), fileObject ); } catch ( IOException e ) { return null; } } return null; } @Override public NamedClusterFile getParent( Bowl bowl, NamedClusterFile file ) { NamedClusterFile vfsFile = new NamedClusterFile(); vfsFile.setPath( file.getParent() ); return vfsFile; } @Override public void clearProviderCache() { // Nothing to clear } @Override public File getFile( Bowl bowl, String path, boolean isDirectory ) { ensureInitialized(); FileObject fileObject = null; try { fileObject = KettleVFS.getInstance( bowl ).getFileObject( path ); if ( isDirectory ) { if ( fileObject.exists() && !fileObject.getType().equals( FileType.FOLDER ) ) { throwIllegalArgumentException( path, "is not a directory" ); } return NamedClusterDirectory.create( null, fileObject ); } else { if ( fileObject.exists() && !fileObject.getType().equals( FileType.FILE ) ) { throwIllegalArgumentException( path, "is a directory" ); } return NamedClusterFile.create( null, fileObject ); } } catch ( KettleFileException | FileSystemException e ) { throwIllegalArgumentException( path, "could not create a VFSFile object" ); } return null; //Will never be executed but compiler complained } private void throwIllegalArgumentException( String path, String message ) { throw new IllegalArgumentException( "\"" + path + "\" " + message ); } @Override public NamedClusterFile createDirectory( Bowl bowl, String parentPath, NamedClusterFile file, String newDirectoryName ) { ensureInitialized(); NamedClusterDirectory namedClusterDir = null; try { FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( parentPath + "/" + newDirectoryName ); namedClusterDir = NamedClusterDirectory.create( null, fileObject ); add( bowl, namedClusterDir,null ); } catch ( KettleFileException | FileException e ) { e.printStackTrace(); return null; } return namedClusterDir; } } ================================================ FILE: kettle-plugins/browse/src/main/java/org/pentaho/big/data/impl/browse/model/NamedClusterDirectory.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.browse.model; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.pentaho.big.data.impl.browse.NamedClusterProvider; import org.pentaho.di.plugins.fileopensave.api.providers.Directory; import org.pentaho.di.plugins.fileopensave.api.providers.EntityType; import java.util.ArrayList; import java.util.Date; import java.util.List; public class NamedClusterDirectory extends NamedClusterFile implements Directory { private boolean hasChildren; private boolean canAddChildren; private List children = new ArrayList<>(); public static final String DIRECTORY = "folder"; @Override public String getType() { return DIRECTORY; } public boolean hasChildren() { return hasChildren; } public void setHasChildren( boolean hasChildren ) { this.hasChildren = hasChildren; } public List getChildren() { return children; } public void setChildren( List children ) { this.children = children; } public void addChild( NamedClusterFile file ) { this.children.add( file ); } public boolean isHasChildren() { return hasChildren; } public void setCanAddChildren( boolean canAddChildren ) { this.canAddChildren = canAddChildren; } @Override public boolean isCanAddChildren() { return this.canAddChildren; } public static NamedClusterDirectory create( String parent, FileObject fileObject ) { NamedClusterDirectory namedClusterDirectory = new NamedClusterDirectory(); namedClusterDirectory.setName( fileObject.getName().getBaseName() ); namedClusterDirectory.setPath( fileObject.getName().getFriendlyURI() ); namedClusterDirectory.setParent( parent ); namedClusterDirectory.setRoot( NamedClusterProvider.NAME ); namedClusterDirectory.setCanEdit( true ); namedClusterDirectory.setHasChildren( true ); namedClusterDirectory.setCanAddChildren( true ); try { namedClusterDirectory.setDate( new Date( fileObject.getContent().getLastModifiedTime() ) ); } catch ( FileSystemException e ) { namedClusterDirectory.setDate( new Date() ); } return namedClusterDirectory; } @Override public EntityType getEntityType(){ return EntityType.NAMED_CLUSTER_DIRECTORY; } } ================================================ FILE: kettle-plugins/browse/src/main/java/org/pentaho/big/data/impl/browse/model/NamedClusterFile.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.browse.model; import org.apache.commons.lang.StringUtils; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.pentaho.big.data.impl.browse.NamedClusterProvider; import org.pentaho.di.plugins.fileopensave.api.providers.BaseEntity; import org.pentaho.di.plugins.fileopensave.api.providers.EntityType; import org.pentaho.di.plugins.fileopensave.api.providers.File; import java.util.Date; public class NamedClusterFile extends BaseEntity implements File { public static final String TYPE = "file"; public NamedClusterFile() { // Needed for JSON marshalling } @Override public String getType() { return TYPE; } @Override public String getProvider() { return NamedClusterProvider.TYPE; } public static NamedClusterFile create( String parent, FileObject fileObject ) { NamedClusterFile namedClusterFile = new NamedClusterFile(); namedClusterFile.setName( fileObject.getName().getBaseName() ); namedClusterFile.setPath( fileObject.getName().getFriendlyURI() ); namedClusterFile.setParent( parent ); namedClusterFile.setRoot( NamedClusterProvider.NAME ); namedClusterFile.setCanEdit( true ); try { namedClusterFile.setDate( new Date( fileObject.getContent().getLastModifiedTime() ) ); } catch ( FileSystemException ignored ) { namedClusterFile.setDate( new Date() ); } return namedClusterFile; } @Override public boolean equals( Object obj ) { // If the object is compared with itself then return true if ( obj == this ) { return true; } if ( !( obj instanceof NamedClusterFile ) ) { return false; } NamedClusterFile compare = (NamedClusterFile) obj; // This comparison depends on `getProvider()` to always return a hardcoded value return compare.getProvider().equals( getProvider() ) && StringUtils.equals( compare.getPath(), getPath() ); } @Override public EntityType getEntityType(){ return EntityType.NAMED_CLUSTER_FILE; } } ================================================ FILE: kettle-plugins/browse/src/main/java/org/pentaho/big/data/impl/browse/model/NamedClusterTree.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.impl.browse.model; import org.pentaho.big.data.impl.browse.NamedClusterProvider; import org.pentaho.di.plugins.fileopensave.api.providers.Tree; import java.util.ArrayList; import java.util.List; public class NamedClusterTree implements Tree { private static final int ORDER = 4; private String name; private List namedClusters = new ArrayList<>(); public NamedClusterTree( String name ) { this.name = name; } @Override public String getName() { return name; } @Override public List getChildren() { return namedClusters; } @Override public void addChild( NamedClusterFile namedClusterFile ) { namedClusters.add( namedClusterFile ); } @Override public boolean isCanAddChildren() { return false; } @Override public int getOrder() { return ORDER; } @Override public String getProvider() { return NamedClusterProvider.TYPE; } } ================================================ FILE: kettle-plugins/browse/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: kettle-plugins/common/job/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-common 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-common-job 11.1.0.0-SNAPSHOT jar Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com site pentaho-kettle kettle-ui-swt ${pdi.version} provided org.eclipse.core commands 3.3.0-I20070605-0010 test junit junit ${dependency.junit.revision} test org.mockito mockito-all ${dependency.mockito.revision} test org.apache.logging.log4j log4j-core provided org.pentaho shim-api ${pentaho-hadoop-shims.version} pentaho pentaho-big-data-kettle-plugins-common-ui ${project.version} com.h2database h2 ${h2.version} test pentaho-kettle kettle-core ${pdi.version} tests test ================================================ FILE: kettle-plugins/common/job/src/main/java/org/pentaho/big/data/kettle/plugins/job/AbstractJobEntry.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.Result; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.w3c.dom.Node; import java.util.List; /** * User: RFellows Date: 6/5/12 */ public abstract class AbstractJobEntry extends JobEntryBase implements Cloneable, JobEntryInterface { protected T jobConfig = null; public AbstractJobEntry() { this( null ); } protected AbstractJobEntry( LogChannelInterface logChannelInterface ) { if ( logChannelInterface != null ) { this.log = logChannelInterface; } jobConfig = createJobConfig(); } public T getJobConfig() { jobConfig.setJobEntryName( getName() ); return jobConfig; } public void setJobConfig( T jobConfig ) { this.jobConfig = jobConfig; setName( jobConfig.getJobEntryName() ); } /** * @return {@code true} if this job entry yields a success or failure result */ @Override public boolean evaluates() { return true; } /** * @return {@code true} if this job entry supports and unconditional hop from it */ @Override public boolean isUnconditional() { return true; } /** * @return an portion of XML describing the current state of this job entry */ @Override public String getXML() { StringBuffer buffer = new StringBuffer( 1024 ); buffer.append( super.getXML() ); JobEntrySerializationHelper.write( getJobConfig(), 1, buffer ); return buffer.toString(); } /** * Set the state of this job entry from an XML document node containing a previous state. * * @param node * @param databaseMetas * @param slaveServers * @param repository * @throws KettleXMLException */ @Override public void loadXML( Node node, List databaseMetas, List slaveServers, Repository repository ) throws KettleXMLException { super.loadXML( node, databaseMetas, slaveServers ); T loaded = createJobConfig(); JobEntrySerializationHelper.read( loaded, node ); setJobConfig( loaded ); } /** * Load the state of this job entry from a repository. * * @param rep * @param id_jobentry * @param databases * @param slaveServers * @throws KettleException */ @Override public void loadRep( Repository rep, ObjectId id_jobentry, List databases, List slaveServers ) throws KettleException { super.loadRep( rep, id_jobentry, databases, slaveServers ); T loaded = createJobConfig(); JobEntrySerializationHelper.loadRep( loaded, rep, id_jobentry, databases, slaveServers ); setJobConfig( loaded ); } /** * Save the state of this job entry to a repository. * * @param rep * @param id_job * @throws KettleException */ @Override public void saveRep( Repository rep, ObjectId id_job ) throws KettleException { JobEntrySerializationHelper.saveRep( getJobConfig(), rep, id_job, getObjectId() ); } @Override public Result execute( Result result, int i ) throws KettleException { if ( !isValid( getJobConfig() ) ) { setJobResultFailed( result ); return result; } final Result jobResult = result; result.setResult( true ); Thread t = new Thread( getExecutionRunnable( jobResult ) ); t.setUncaughtExceptionHandler( new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException( Thread t, Throwable e ) { handleUncaughtThreadException( t, e, jobResult ); } } ); t.start(); if ( JobEntryUtils.asBoolean( getJobConfig().getBlockingExecution(), variables ) ) { while ( !parentJob.isStopped() && t.isAlive() ) { try { t.join( JobEntryUtils.asLong( getJobConfig().getBlockingPollingInterval(), variables ) ); } catch ( InterruptedException ex ) { // ignore break; } } // If the parent job is stopped and the thread is still running make sure to interrupt it if ( t.isAlive() ) { t.interrupt(); setJobResultFailed( result ); } // Wait for thread to die so we get the proper return status set in jobResult before returning try { t.join( 10 * 1000 ); // Don't wait for more than 10 seconds in case the thread is really blocked } catch ( InterruptedException e ) { // ignore } } return result; } /** * Flag a job result as failed * * @param jobResult */ public void setJobResultFailed( Result jobResult ) { jobResult.setNrErrors( 1 ); jobResult.setResult( false ); } /** * Determine if the configuration provide is valid. This will validate all options in one pass. * * @param config * Configuration to validate * @return {@code true} if the configuration contains valid values for all options we use directly in this job entry. */ public boolean isValid( T config ) { List warnings = getValidationWarnings( config ); for ( String warning : warnings ) { logError( warning ); } return warnings.isEmpty(); } public VariableSpace getVariableSpace() { // These variables must be set on this job entry prior to retrieval. // Today this happens as part of job execution via the Kettle job execution engine // or in the controller's open() method. return variables; } /** * Creates a job configuration * * @return */ protected abstract T createJobConfig(); // /** // * Ensures that the configuration is valid for execution // * @param config // * @return // */ // protected abstract boolean isValid(T config); /** * Validate any configuration option we use directly that could be invalid at runtime. * * @param config * Configuration to validate * @return List of warning messages for any invalid configuration options we use directly in this job entry. */ public abstract List getValidationWarnings( T config ); /** * Get the {@link Runnable} that does the execution of the job * * @param jobResult * Job result for the execution to use * @return * @throws KettleException * error obtaining execution runnable */ protected abstract Runnable getExecutionRunnable( final Result jobResult ) throws KettleException; /** * Handle any clean up required when our execution thread encounters an unexpected {@link Exception}. * * @param t * Thread that encountered the uncaught exception * @param e * Exception that was encountered * @param jobResult * Job result for the execution that spawned the thread */ protected abstract void handleUncaughtThreadException( Thread t, Throwable e, Result jobResult ); } ================================================ FILE: kettle-plugins/common/job/src/main/java/org/pentaho/big/data/kettle/plugins/job/AbstractJobEntryController.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.plugins.common.ui.VfsFileChooserHelper; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.plugins.JobEntryPluginType; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.gui.WindowProperty; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.util.HelpUtils; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulException; import org.pentaho.ui.xul.binding.Binding; import org.pentaho.ui.xul.binding.BindingFactory; import org.pentaho.ui.xul.containers.XulDeck; import org.pentaho.ui.xul.containers.XulDialog; import org.pentaho.ui.xul.impl.AbstractXulEventHandler; import org.pentaho.ui.xul.stereotype.Bindable; import org.pentaho.ui.xul.swt.tags.SwtDialog; import com.google.common.annotations.VisibleForTesting; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * User: RFellows Date: 6/6/12 */ public abstract class AbstractJobEntryController> extends AbstractXulEventHandler { public static final String[] DEFAULT_FILE_FILTERS = new String[] { "*.*" }; // Generically typed fields protected C config; // BlockableJobConfig protected E jobEntry; // AbstractJobEntry // common fields protected XulDomContainer container; protected BindingFactory bindingFactory; protected List bindings; protected JobMeta jobMeta; protected JobEntryMode jobEntryMode = JobEntryMode.QUICK_SETUP; @SuppressWarnings( "unchecked" ) public AbstractJobEntryController( JobMeta jobMeta, XulDomContainer container, E jobEntry, BindingFactory bindingFactory ) { super(); this.jobMeta = jobMeta; this.jobEntry = jobEntry; this.container = container; this.config = (C) jobEntry.getJobConfig().clone(); this.bindingFactory = bindingFactory; } /** * @return the simple name for this controller. This controller can be referenced by this name in the XUL document. */ @Override public String getName() { return "controller"; } /** * Opens the dialog * * @return */ public JobEntryInterface open() { XulDialog dialog = (XulDialog) container.getDocumentRoot().getElementById( getDialogElementId() ); // Update the Variable Space so the latest are available when the dialog is show (for test evaluation) jobEntry.copyVariablesFrom( jobMeta ); dialog.show(); return jobEntry; } /** * Initialize the dialog by loading model data, creating bindings and firing initial sync ( * {@link Binding#fireSourceChanged()}. * * @throws XulException * * @throws InvocationTargetException * */ public void init() throws XulException, InvocationTargetException { bindings = new ArrayList(); // override hook beforeInit(); try { createBindings( config, container, bindingFactory, bindings ); syncModel(); for ( Binding binding : bindings ) { binding.fireSourceChanged(); } } finally { // override hook afterInit(); } } /** * Accept and apply the changes made in the dialog. Also, close the dialog */ @Bindable public void accept() { jobEntry.setJobConfig( config ); jobEntry.setChanged(); cancel(); } /** * Close the dialog without saving any changes */ @Bindable public void cancel() { removeBindings(); XulDialog xulDialog = getDialog(); Shell shell = (Shell) xulDialog.getRootObject(); if ( !shell.isDisposed() ) { WindowProperty winprop = new WindowProperty( shell ); PropsUI.getInstance().setScreen( winprop ); ( (Composite) xulDialog.getManagedObject() ).dispose(); shell.dispose(); } } /** * Call help dialog */ public void help() { XulDialog xulDialog = getDialog(); Shell shell = (Shell) xulDialog.getRootObject(); HelpUtils.openHelpDialog( shell, getPlugin() ); } /** * Find a plugin for a corresponding job entry */ protected PluginInterface getPlugin() { return PluginRegistry.getInstance().findPluginWithName( JobEntryPluginType.class, jobEntry.getName() ); } /** * Remove and destroy all bindings from {@link #bindings}. */ protected void removeBindings() { if ( bindings == null ) { return; } for ( Binding binding : bindings ) { binding.destroyBindings(); } bindings.clear(); } /** * Look up the dialog reference from the document. * * @return The dialog element referred to by {@link #getDialogElementId()} */ protected SwtDialog getDialog() { return (SwtDialog) getXulDomContainer().getDocumentRoot().getElementById( getDialogElementId() ); } /** * @return the job entry this controller will modify configuration for */ @VisibleForTesting public E getJobEntry() { return jobEntry; } /** * Override this to execute some code prior to the init function running */ protected void beforeInit() { return; } /** * Override this to execute some code after the init function is complete */ protected void afterInit() { return; } protected boolean showConfirmationDialog( String title, String message ) { return MessageDialog.openConfirm( getShell(), title, message ); } /** * Show an information dialog with the title and message provided. * * @param title * Dialog window title * @param message * Dialog message */ protected void showInfoDialog( String title, String message ) { MessageBox mb = new MessageBox( getShell(), SWT.OK | SWT.ICON_INFORMATION ); mb.setText( title ); mb.setMessage( message ); mb.open(); } /** * Show an error dialog with the title and message provided. * * @param title * Dialog window title * @param message * Dialog message */ protected void showErrorDialog( String title, String message ) { MessageBox mb = new MessageBox( getShell(), SWT.OK | SWT.ICON_ERROR ); mb.setText( title ); mb.setMessage( message ); mb.open(); } /** * Show an error dialog with the title, message, and toggle button to see the entire stacktrace produced by {@code t}. * * @param title * Dialog window title * @param message * Dialog message * @param t * Cause for this error */ protected void showErrorDialog( String title, String message, Throwable t ) { new ErrorDialog( getShell(), title, message, t ); } /** * @return the shell for the currently visible dialog. This will be used to display additional dialogs/popups. */ protected Shell getShell() { return getDialog().getShell(); } /** * Browse for a file or directory with the VFS Browser. * * @param root * Root object * @param initial * Initial file or folder the browser should open to * @param dialogMode * Mode to open dialog in: e.g. * {@link org.pentaho.vfs.ui .VfsFileChooserDialog#VFS_DIALOG_OPEN_FILE_OR_DIRECTORY} * @param schemeRestriction * Scheme to limit the user to browsing from * @param defaultScheme * Scheme to select by default in the selection dropdown * @return The selected file object, {@code null} if no object is selected * @throws KettleFileException * Error accessing the root file using the initial file, when {@code root} is not provided */ protected FileObject browseVfs( FileObject root, FileObject initial, int dialogMode, String schemeRestriction, String defaultScheme, boolean showFileScheme ) throws KettleFileException { String[] schemeRestrictions = new String[1]; schemeRestrictions[0] = schemeRestriction; return browseVfs( root, initial, dialogMode, schemeRestrictions, showFileScheme, defaultScheme ); } protected FileObject browseVfs( FileObject root, FileObject initial, int dialogMode, String[] schemeRestrictions, boolean showFileScheme, String defaultScheme ) throws KettleFileException { return browseVfs( root, initial, dialogMode, schemeRestrictions, showFileScheme, defaultScheme, null ); } protected FileObject browseVfs( FileObject root, FileObject initial, int dialogMode, String[] schemeRestrictions, boolean showFileScheme, String defaultScheme, NamedCluster namedCluster ) throws KettleFileException { return browseVfs( root, initial, dialogMode, schemeRestrictions, showFileScheme, defaultScheme, namedCluster, true, true ); } protected FileObject browseVfs( FileObject root, FileObject initial, int dialogMode, String[] schemeRestrictions, boolean showFileScheme, String defaultScheme, NamedCluster namedCluster, boolean showLocation ) throws KettleFileException { return browseVfs( root, initial, dialogMode, schemeRestrictions, showFileScheme, defaultScheme, namedCluster, showLocation, true ); } protected FileObject browseVfs( FileObject root, FileObject initial, int dialogMode, String[] schemeRestrictions, boolean showFileScheme, String defaultScheme, NamedCluster namedCluster, boolean showLocation, boolean showCustomUI ) throws KettleFileException { Spoon spoon = Spoon.getInstance(); if ( initial == null ) { initial = KettleVFS.getInstance( spoon.getExecutionBowl() ) .getFileObject( Spoon.getInstance().getLastFileOpened() ); } if ( root == null ) { try { root = initial.getFileSystem().getRoot(); } catch ( FileSystemException e ) { throw new KettleFileException( e ); } } VfsFileChooserHelper fileChooserHelper = new VfsFileChooserHelper( getShell(), Spoon.getInstance().getVfsFileChooserDialog( root, initial ), jobEntry ); fileChooserHelper.setDefaultScheme( defaultScheme ); fileChooserHelper.setSchemeRestrictions( schemeRestrictions ); fileChooserHelper.setShowFileScheme( showFileScheme ); if ( namedCluster != null ) { fileChooserHelper.setNamedCluster( namedCluster ); } try { return fileChooserHelper.browse( getFileFilters(), getFileFilterNames(), initial.getName().getURI(), dialogMode, showLocation, showCustomUI ); } catch ( KettleException e ) { throw new KettleFileException( e ); } catch ( FileSystemException e ) { throw new KettleFileException( e ); } } protected String[] getFileFilters() { return DEFAULT_FILE_FILTERS; } /** * Used by browseVfs method as names corresponding to the file filters. Override if {@code getFileFilters} is * overridden. * * @return */ protected String[] getFileFilterNames() { return new String[] { BaseMessages.getString( getClass(), "System.FileType.AllFiles" ) }; } /** * @return the current configuration object. This configuration may be discarded if the dialog is canceled. */ public C getConfig() { return config; } public void setConfig( C config ) { this.config = config; } /** * @return the job meta for the job entry we're editing */ public JobMeta getJobMeta() { return jobMeta; } public void setJobMeta( JobMeta jobMeta ) { this.jobMeta = jobMeta; } public JobEntryMode getJobEntryMode() { return jobEntryMode; } /** * Toggle between Advanced and Basic configuration modes */ public void toggleMode() { JobEntryMode mode = ( jobEntryMode == JobEntryMode.ADVANCED_LIST ? JobEntryMode.QUICK_SETUP : JobEntryMode.ADVANCED_LIST ); setMode( mode ); } protected void setMode( JobEntryMode mode ) { // if switching from Advanced to Quick mode, warn the user that any changes made in Advanced mode will be lost if ( this.jobEntryMode == JobEntryMode.ADVANCED_LIST && mode == JobEntryMode.QUICK_SETUP ) { boolean confirmed = showConfirmationDialog( BaseMessages.getString( AbstractJobEntryController.class, "JobExecutor.Confirm.Toggle.Quick.Mode.Title" ), BaseMessages.getString( AbstractJobEntryController.class, "JobExecutor.Confirm.Toggle.Quick.Mode.Message" ) ); if ( !confirmed ) { return; } } JobEntryMode opposite = mode == JobEntryMode.QUICK_SETUP ? JobEntryMode.ADVANCED_LIST : JobEntryMode.QUICK_SETUP; this.jobEntryMode = mode; XulDeck deck = (XulDeck) getXulDomContainer().getDocumentRoot().getElementById( getModeDeckElementId() ); deck.setSelectedIndex( mode == JobEntryMode.QUICK_SETUP ? 0 : 1 ); // Synchronize the model every time we swap modes so the UI is always up to date. This is required since we don't // set argument item values directly or listen for their changes syncModel(); // Swap the label on the button setModeToggleLabel( opposite ); } /** * The mode deck element defined in your xul. Override this to customize the element id * * @return */ protected String getModeDeckElementId() { return "modeDeck"; } // ////////////////// // abstract methods // ////////////////// protected abstract void syncModel(); protected abstract void createBindings( C config, XulDomContainer container, BindingFactory bindingFactory, Collection bindings ); protected abstract String getDialogElementId(); protected abstract void setModeToggleLabel( JobEntryMode mode ); } ================================================ FILE: kettle-plugins/common/job/src/main/java/org/pentaho/big/data/kettle/plugins/job/BlockableJobConfig.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; import org.pentaho.ui.xul.XulEventSource; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; /** * User: RFellows Date: 6/5/12 */ public class BlockableJobConfig implements XulEventSource, Cloneable { protected transient PropertyChangeSupport pcs = new PropertyChangeSupport( this ); protected String jobEntryName = null; protected String blockingPollingInterval = String.valueOf( 300 ); protected String blockingExecution = Boolean.TRUE.toString(); public static final String JOB_ENTRY_NAME = "jobEntryName"; public static final String BLOCKING_EXECUTION = "blockingExecution"; public static final String BLOCKING_POLLING_INTERVAL = "blockingPollingInterval"; public String getJobEntryName() { return jobEntryName; } public void setJobEntryName( String jobEntryName ) { String old = this.jobEntryName; this.jobEntryName = jobEntryName; pcs.firePropertyChange( JOB_ENTRY_NAME, old, this.jobEntryName ); } public String getBlockingPollingInterval() { return blockingPollingInterval; } public void setBlockingPollingInterval( String blockingPollingInterval ) { String old = this.blockingPollingInterval; this.blockingPollingInterval = blockingPollingInterval; pcs.firePropertyChange( BLOCKING_POLLING_INTERVAL, old, this.blockingPollingInterval ); } public String getBlockingExecution() { return blockingExecution; } public void setBlockingExecution( String blockingExecution ) { String old = this.blockingExecution; this.blockingExecution = blockingExecution; pcs.firePropertyChange( BLOCKING_EXECUTION, old, this.blockingExecution ); } /** * @see {@link PropertyChangeSupport#addPropertyChangeListener(PropertyChangeListener)} */ public void addPropertyChangeListener( PropertyChangeListener l ) { pcs.addPropertyChangeListener( l ); } /** * @see {@link PropertyChangeSupport#addPropertyChangeListener(String, PropertyChangeListener)} */ public void addPropertyChangeListener( String propertyName, PropertyChangeListener l ) { pcs.addPropertyChangeListener( propertyName, l ); } /** * @see {@link PropertyChangeSupport#removePropertyChangeListener(PropertyChangeListener)} */ public void removePropertyChangeListener( PropertyChangeListener l ) { pcs.removePropertyChangeListener( l ); } /** * @see {@link PropertyChangeSupport#removePropertyChangeListener(String, PropertyChangeListener)} */ public void removePropertyChangeListener( String propertyName, PropertyChangeListener l ) { pcs.removePropertyChangeListener( propertyName, l ); } @Override public Object clone() { try { return super.clone(); } catch ( CloneNotSupportedException e ) { throw new RuntimeException( e ); } } @Override public boolean equals( Object o ) { if ( this == o ) { return true; } if ( o == null || getClass() != o.getClass() ) { return false; } BlockableJobConfig that = (BlockableJobConfig) o; if ( blockingExecution != null ? !blockingExecution.equals( that.blockingExecution ) : that.blockingExecution != null ) { return false; } if ( blockingPollingInterval != null ? !blockingPollingInterval.equals( that.blockingPollingInterval ) : that.blockingPollingInterval != null ) { return false; } if ( jobEntryName != null ? !jobEntryName.equals( that.jobEntryName ) : that.jobEntryName != null ) { return false; } return true; } @Override public int hashCode() { int result = jobEntryName != null ? jobEntryName.hashCode() : 0; result = 31 * result + ( blockingPollingInterval != null ? blockingPollingInterval.hashCode() : 0 ); result = 31 * result + ( blockingExecution != null ? blockingExecution.hashCode() : 0 ); return result; } } ================================================ FILE: kettle-plugins/common/job/src/main/java/org/pentaho/big/data/kettle/plugins/job/JobEntryMode.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; /** * Represents visible states of the UI and the execution mode. * * User: RFellows Date: 6/11/12 */ public enum JobEntryMode { QUICK_SETUP, ADVANCED_LIST, ADVANCED_COMMAND_LINE } ================================================ FILE: kettle-plugins/common/job/src/main/java/org/pentaho/big/data/kettle/plugins/job/JobEntrySerializationHelper.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.core.xml.XMLParserFactoryProducer; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.Serializable; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; public class JobEntrySerializationHelper implements Serializable { private static final long serialVersionUID = -3924431164206698711L; private static final String INDENT_STRING = " "; /** * This method will perform the work that used to be done by hand in each kettle input meta for: readData(Node node). * We handle all primitive types, complex user types, arrays, lists and any number of nested object levels, via * recursion of this method. * * @param object * The object to be persisted * @param node * The node to 'attach' our XML to */ public static void read( Object object, Node node ) { // get this classes declared fields, public, private, protected, package, everything, but not super Field[] declaredFields = getAllDeclaredFields( object.getClass() ); for ( Field field : declaredFields ) { // ignore fields which are final, static or transient if ( Modifier.isFinal( field.getModifiers() ) || Modifier.isStatic( field.getModifiers() ) || Modifier.isTransient( field.getModifiers() ) ) { continue; } // if the field is not accessible (private), we'll open it up so we can operate on it boolean accessible = field.isAccessible(); if ( !accessible ) { field.setAccessible( true ); } try { // check if we're going to try to read an array if ( field.getType().isArray() ) { try { // get the node (if available) for the field Node fieldNode = XMLHandler.getSubNode( node, field.getName() ); if ( fieldNode == null ) { // doesn't exist (this is possible if fields were empty/null when persisted) continue; } // get the Java classname for the array elements String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" ); Class clazz = null; // primitive types require special handling if ( fieldClassName.equals( "boolean" ) ) { clazz = boolean.class; } else if ( fieldClassName.equals( "int" ) ) { clazz = int.class; } else if ( fieldClassName.equals( "float" ) ) { clazz = float.class; } else if ( fieldClassName.equals( "double" ) ) { clazz = double.class; } else if ( fieldClassName.equals( "long" ) ) { clazz = long.class; } else { // normal, non primitive array class clazz = Class.forName( fieldClassName ); } // get the child nodes for the field NodeList childrenNodes = fieldNode.getChildNodes(); // create a new, appropriately sized array int arrayLength = 0; for ( int i = 0; i < childrenNodes.getLength(); i++ ) { Node child = childrenNodes.item( i ); // ignore TEXT_NODE, they'll cause us to have a larger count than reality, even if they are empty if ( child.getNodeType() != Node.TEXT_NODE ) { arrayLength++; } } // create a new instance of our array Object array = Array.newInstance( clazz, arrayLength ); // set the new array on the field (on object, passed in) field.set( object, array ); int arrayIndex = 0; for ( int i = 0; i < childrenNodes.getLength(); i++ ) { Node child = childrenNodes.item( i ); if ( child.getNodeType() == Node.TEXT_NODE ) { continue; } // roll through all of our array elements setting them as encountered if ( String.class.isAssignableFrom( clazz ) || Number.class.isAssignableFrom( clazz ) ) { Constructor constructor = clazz.getConstructor( String.class ); Object instance = constructor.newInstance( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, instance ); } else if ( Boolean.class.isAssignableFrom( clazz ) || boolean.class.isAssignableFrom( clazz ) ) { Object value = Boolean.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Integer.class.isAssignableFrom( clazz ) || int.class.isAssignableFrom( clazz ) ) { Object value = Integer.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Float.class.isAssignableFrom( clazz ) || float.class.isAssignableFrom( clazz ) ) { Object value = Float.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Double.class.isAssignableFrom( clazz ) || double.class.isAssignableFrom( clazz ) ) { Object value = Double.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Long.class.isAssignableFrom( clazz ) || long.class.isAssignableFrom( clazz ) ) { Object value = Long.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else { // create an instance of 'fieldClassName' Object instance = clazz.newInstance(); // add the instance to the array Array.set( array, arrayIndex++, instance ); // read child, the same way as the parent read( instance, child ); } } } catch ( Throwable t ) { t.printStackTrace(); // TODO: log this } } else if ( Collection.class.isAssignableFrom( field.getType() ) ) { // handle collections try { // get the node (if available) for the field Node fieldNode = XMLHandler.getSubNode( node, field.getName() ); if ( fieldNode == null ) { // doesn't exist (this is possible if fields were empty/null when persisted) continue; } // get the Java classname for the array elements String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" ); fieldClassName = upgradeName( fieldClassName ); Class clazz = Class.forName( fieldClassName ); // create a new, appropriately sized array, we already know it's a collection @SuppressWarnings( "unchecked" ) Collection collection = (Collection) field.getType().newInstance(); field.set( object, collection ); // iterate over all of the array elements and add them one by one as encountered NodeList childrenNodes = fieldNode.getChildNodes(); for ( int i = 0; i < childrenNodes.getLength(); i++ ) { Node child = childrenNodes.item( i ); if ( child.getNodeType() == Node.TEXT_NODE ) { continue; } // create an instance of 'fieldClassName' if ( String.class.isAssignableFrom( clazz ) || Number.class.isAssignableFrom( clazz ) || Boolean.class.isAssignableFrom( clazz ) ) { Constructor constructor = clazz.getConstructor( String.class ); Object instance = constructor.newInstance( XMLHandler.getTagAttribute( child, "value" ) ); collection.add( instance ); } else { // read child, the same way as the parent Object instance = clazz.newInstance(); // add the instance to the array collection.add( instance ); read( instance, child ); } } } catch ( Throwable t ) { t.printStackTrace(); // TODO: log this } } else { // we're handling a regular field (not an array or list) try { String value = XMLHandler.getTagValue( node, field.getName() ); if ( value == null ) { continue; } if ( field.isAnnotationPresent( Password.class ) ) { value = Encr.decryptPasswordOptionallyEncrypted( value ); } // System.out.println("Setting " + field.getName() + "(" + field.getType().getSimpleName() + ") = " + value // + " on: " + object.getClass().getName()); if ( field.getType().isPrimitive() && "".equals( value ) ) { // skip setting of primitives if we see null continue; } else if ( "".equals( value ) ) { field.set( object, value ); } else if ( field.getType().isPrimitive() ) { // special primitive handling if ( double.class.isAssignableFrom( field.getType() ) ) { field.set( object, Double.parseDouble( value ) ); } else if ( float.class.isAssignableFrom( field.getType() ) ) { field.set( object, Float.parseFloat( value ) ); } else if ( long.class.isAssignableFrom( field.getType() ) ) { field.set( object, Long.parseLong( value ) ); } else if ( int.class.isAssignableFrom( field.getType() ) ) { field.set( object, Integer.parseInt( value ) ); } else if ( byte.class.isAssignableFrom( field.getType() ) ) { field.set( object, value.getBytes() ); } else if ( boolean.class.isAssignableFrom( field.getType() ) ) { field.set( object, "true".equalsIgnoreCase( value ) ); } } else if ( String.class.isAssignableFrom( field.getType() ) || Number.class.isAssignableFrom( field.getType() ) || Boolean.class.isAssignableFrom( field.getType() ) ) { Constructor constructor = field.getType().getConstructor( String.class ); Object instance = constructor.newInstance( value ); field.set( object, instance ); } else { // we don't know what we're handling, but we'll give it a shot Node fieldNode = XMLHandler.getSubNode( node, field.getName() ); if ( fieldNode == null ) { // doesn't exist (this is possible if fields were empty/null when persisted) continue; } // get the Java classname for the array elements String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" ); Class clazz = Class.forName( fieldClassName ); Object instance = clazz.newInstance(); field.set( object, instance ); read( instance, fieldNode ); } } catch ( Throwable t ) { // TODO: log this t.printStackTrace(); } } } finally { if ( !accessible ) { field.setAccessible( false ); } } } } private static String upgradeName( String fieldClassName ) { if ( fieldClassName.equals( "org.pentaho.di.job.PropertyEntry" ) ) { return "org.pentaho.big.data.kettle.plugins.job.PropertyEntry"; } return fieldClassName; } /** * This method will perform the work that used to be done by hand in each kettle input meta for: getXML(). We handle * all primitive types, complex user types, arrays, lists and any number of nested object levels, via recursion of * this method. * * @param object * @param buffer */ public static void write( Object object, int indentLevel, StringBuffer buffer ) { // don't even attempt to persist if ( object == null ) { return; } // get this classes declared fields, public, private, protected, package, everything, but not super Field[] declaredFields = getAllDeclaredFields( object.getClass() ); for ( Field field : declaredFields ) { // ignore fields which are final, static or transient if ( Modifier.isFinal( field.getModifiers() ) || Modifier.isStatic( field.getModifiers() ) || Modifier.isTransient( field.getModifiers() ) ) { continue; } // if the field is not accessible (private), we'll open it up so we can operate on it boolean accessible = field.isAccessible(); if ( !accessible ) { field.setAccessible( true ); } try { Object fieldValue = field.get( object ); // no value? null? skip it! if ( fieldValue == null || "".equals( fieldValue ) ) { continue; } if ( field.isAnnotationPresent( Password.class ) && String.class.isAssignableFrom( field.getType() ) ) { fieldValue = Encr.encryptPasswordIfNotUsingVariables( String.class.cast( fieldValue ) ); } if ( field.getType().isPrimitive() || String.class.isAssignableFrom( field.getType() ) || Number.class.isAssignableFrom( field.getType() ) || Boolean.class.isAssignableFrom( field.getType() ) ) { indent( buffer, indentLevel ); buffer.append( XMLHandler.addTagValue( field.getName(), fieldValue.toString() ) ); } else if ( field.getType().isArray() ) { // write array values int length = Array.getLength( fieldValue ); // open node (add class name attribute) indent( buffer, indentLevel ); buffer.append( "<" + field.getName() + " class=\"" + fieldValue.getClass().getComponentType().getName() + "\">" ) .append( Const.CR ); for ( int i = 0; i < length; i++ ) { Object childObject = Array.get( fieldValue, i ); // handle all strings/numbers if ( String.class.isAssignableFrom( childObject.getClass() ) || Number.class.isAssignableFrom( childObject.getClass() ) ) { indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( fieldValue.getClass().getComponentType().getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else if ( Boolean.class.isAssignableFrom( childObject.getClass() ) || boolean.class.isAssignableFrom( childObject.getClass() ) ) { // handle booleans (special case) indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( fieldValue.getClass().getComponentType().getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else { // array element is a user defined/complex type, recurse into it indent( buffer, indentLevel + 1 ); buffer.append( "<" + fieldValue.getClass().getComponentType().getSimpleName() + ">" ).append( Const.CR ); write( childObject, indentLevel + 1, buffer ); indent( buffer, indentLevel + 1 ); buffer.append( "" ).append( Const.CR ); } } // close node buffer.append( " " ).append( Const.CR ); } else if ( Collection.class.isAssignableFrom( field.getType() ) ) { // write collection values Collection collection = (Collection) fieldValue; if ( collection.size() == 0 ) { continue; } Class listClass = collection.iterator().next().getClass(); // open node (add class name attribute) indent( buffer, indentLevel ); buffer.append( "<" + field.getName() + " class=\"" + listClass.getName() + "\">" ).append( Const.CR ); for ( Object childObject : collection ) { // handle all strings/numbers if ( String.class.isAssignableFrom( childObject.getClass() ) || Number.class.isAssignableFrom( childObject.getClass() ) ) { indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( listClass.getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else if ( Boolean.class.isAssignableFrom( childObject.getClass() ) || boolean.class.isAssignableFrom( childObject.getClass() ) ) { // handle booleans (special case) indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( listClass.getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else { // array element is a user defined/complex type, recurse into it indent( buffer, indentLevel + 1 ); buffer.append( "<" + listClass.getSimpleName() + ">" ).append( Const.CR ); write( childObject, indentLevel + 1, buffer ); indent( buffer, indentLevel + 1 ); buffer.append( "" ).append( Const.CR ); } } // close node indent( buffer, indentLevel ); buffer.append( "" ).append( Const.CR ); } else { // if we don't now what it is, let's treat it like a first class citizen and try to write it out // open node (add class name attribute) indent( buffer, indentLevel ); buffer.append( "<" + field.getName() + " class=\"" + fieldValue.getClass().getName() + "\">" ).append( Const.CR ); write( fieldValue, indentLevel + 1, buffer ); // close node indent( buffer, indentLevel ); buffer.append( "" ).append( Const.CR ); } } catch ( Throwable t ) { t.printStackTrace(); // TODO: log this } finally { if ( !accessible ) { field.setAccessible( false ); } } } } /** * Get all declared fields of the provided class including any inherited class fields. * * @param aClass * Class to look up fields for * @return All declared fields for the class provided */ private static Field[] getAllDeclaredFields( Class aClass ) { List fields = new ArrayList(); while ( aClass != null ) { fields.addAll( Arrays.asList( aClass.getDeclaredFields() ) ); aClass = aClass.getSuperclass(); } return fields.toArray( new Field[0] ); } /** * Handle saving of the input (object) to the kettle repository using the most simple method available, by calling * write and then saving the xml as an attribute. * * @param object * @param rep * @param id_job * @param id_jobentry * @throws KettleException */ public static void saveRep( Object object, Repository rep, ObjectId id_job, ObjectId id_jobentry ) throws KettleException { StringBuffer sb = new StringBuffer( 1024 ); sb.append( "" ); write( object, 0, sb ); sb.append( "" ); rep.saveJobEntryAttribute( id_job, id_jobentry, "job-xml", sb.toString() ); } /** * Handle reading of the input (object) from the kettle repository by getting the xml from the repository attribute * string and then re-hydrate the object with our already existing read method. * * @param object * @param rep * @param id_job * @param databases * @param slaveServers * @throws KettleException */ public static void loadRep( Object object, Repository rep, ObjectId id_job, List databases, List slaveServers ) throws KettleException { try { String xml = rep.getJobEntryAttributeString( id_job, "job-xml" ); ByteArrayInputStream bais = new ByteArrayInputStream( xml.getBytes() ); DocumentBuilderFactory factory = XMLParserFactoryProducer.createSecureDocBuilderFactory(); Document doc = factory.newDocumentBuilder().parse( bais ); read( object, doc.getDocumentElement() ); } catch ( ParserConfigurationException ex ) { throw new KettleException( ex.getMessage(), ex ); } catch ( SAXException ex ) { throw new KettleException( ex.getMessage(), ex ); } catch ( IOException ex ) { throw new KettleException( ex.getMessage(), ex ); } } private static void indent( StringBuffer sb, int indentLevel ) { for ( int i = 0; i < indentLevel; i++ ) { sb.append( INDENT_STRING ); } } } ================================================ FILE: kettle-plugins/common/job/src/main/java/org/pentaho/big/data/kettle/plugins/job/JobEntryUtils.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; import org.pentaho.di.core.variables.VariableSpace; import java.util.Map; /** * User: RFellows Date: 6/7/12 */ public class JobEntryUtils { /** * @return {@code true} if {@link Boolean#parseBoolean(String)} returns {@code true} for * {@link #isBlockingExecution()} */ /** * Determine if the string equates to {@link Boolean#TRUE} after performing a variable substitution. * * @param s * String-encoded boolean value or variable expression * @param variableSpace * Context for variables so we can substitute {@code s} * @return the value returned by {@link Boolean#parseBoolean(String) Boolean.parseBoolean(s)} after substitution */ public static boolean asBoolean( String s, VariableSpace variableSpace ) { String value = variableSpace.environmentSubstitute( s ); return Boolean.parseBoolean( value ); } /** * Parse the string as a {@link Long} after variable substitution. * * @param s * String-encoded {@link Long} value or variable expression that should resolve to a {@link Long} value * @param variableSpace * Context for variables so we can substitute {@code s} * @return the value returned by {@link Long#parseLong(String, int) Long.parseLong(s, 10)} after substitution */ public static Long asLong( String s, VariableSpace variableSpace ) { String value = variableSpace.environmentSubstitute( s ); return value == null ? null : Long.valueOf( value, 10 ); } } ================================================ FILE: kettle-plugins/common/job/src/main/java/org/pentaho/big/data/kettle/plugins/job/Password.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Denotes a field is a password and must be encrypted when serialized. This must be placed on a {@link String} field. */ @Documented @Retention( RetentionPolicy.RUNTIME ) @Target( ElementType.FIELD ) public @interface Password { } ================================================ FILE: kettle-plugins/common/job/src/main/java/org/pentaho/big/data/kettle/plugins/job/PropertyEntry.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; import org.pentaho.ui.xul.XulEventSource; import java.beans.PropertyChangeListener; import java.util.Map; /** * User: RFellows Date: 6/18/12 */ public class PropertyEntry implements Map.Entry, XulEventSource { private String key = null; private String value = null; public PropertyEntry() { this( null, null ); } public PropertyEntry( String key, String value ) { this.key = key; this.value = value; } @Override public String getKey() { return key; } public void setKey( String key ) { this.key = key; } @Override public String getValue() { return value; } @Override public String setValue( String value ) { this.value = value; return value; } @Override public boolean equals( Object o ) { if ( this == o ) { return true; } if ( o == null || getClass() != o.getClass() ) { return false; } PropertyEntry that = (PropertyEntry) o; if ( key != null ? !key.equals( that.key ) : that.key != null ) { return false; } if ( value != null ? !value.equals( that.value ) : that.value != null ) { return false; } return true; } @Override public int hashCode() { int result = key != null ? key.hashCode() : 0; result = 31 * result + ( value != null ? value.hashCode() : 0 ); return result; } @Override public void addPropertyChangeListener( PropertyChangeListener propertyChangeListener ) { } @Override public void removePropertyChangeListener( PropertyChangeListener propertyChangeListener ) { } } ================================================ FILE: kettle-plugins/common/job/src/main/resources/org/pentaho/big/data/kettle/plugins/job/messages/messages_en_US.properties ================================================ JobExecutor.Confirm.Toggle.Quick.Mode.Title=Confirm leaving Advanced Mode JobExecutor.Confirm.Toggle.Quick.Mode.Message=Any changes made in "Advanced" mode will be lost by switching to "Quick Setup" mode.\nAre you sure you want to proceed? ================================================ FILE: kettle-plugins/common/job/src/test/java/org/pentaho/big/data/kettle/plugins/job/AbstractJobEntryTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; import org.junit.Assert; import org.junit.Test; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.Result; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.job.Job; import org.pentaho.di.job.entry.JobEntryCopy; import org.pentaho.di.repository.LongObjectId; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.RepositoryMeta; import org.pentaho.di.repository.kdr.KettleDatabaseRepository; import org.pentaho.di.repository.kdr.KettleDatabaseRepositoryCreationHelper; import org.pentaho.di.repository.kdr.KettleDatabaseRepositoryMeta; import org.w3c.dom.Document; import java.io.File; import java.util.ArrayList; import java.util.List; import static junit.framework.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * User: RFellows Date: 6/5/12 */ public class AbstractJobEntryTest { class TestJobEntry extends AbstractJobEntry { private long waitTime = 0L; TestJobEntry() { } TestJobEntry( long waitTime ) { this.waitTime = waitTime; } @Override protected BlockableJobConfig createJobConfig() { return new BlockableJobConfig(); } @Override public List getValidationWarnings( BlockableJobConfig config ) { return new ArrayList(); } @Override protected Runnable getExecutionRunnable( Result jobResult ) { return new Runnable() { @Override public void run() { try { Thread.sleep( waitTime ); } catch ( InterruptedException e ) { throw new RuntimeException( e ); } } }; } @Override protected void handleUncaughtThreadException( Thread t, Throwable e, Result jobResult ) { logError( "Error executing Job", e ); setJobResultFailed( jobResult ); } }; @Test public void testLoadXml() throws Exception { TestJobEntry jobEntry = new TestJobEntry(); BlockableJobConfig jobConfig = new BlockableJobConfig(); jobConfig.setJobEntryName( "Job Name" ); jobEntry.setJobConfig( jobConfig ); JobEntryCopy jec = new JobEntryCopy( jobEntry ); jec.setLocation( 0, 0 ); String xml = jec.getXML(); Document d = XMLHandler.loadXMLString( xml ); TestJobEntry jobEntry2 = new TestJobEntry(); jobEntry2.loadXML( d.getDocumentElement(), null, null, null ); BlockableJobConfig jobConfig2 = jobEntry2.getJobConfig(); assertEquals( jobConfig.getJobEntryName(), jobConfig2.getJobEntryName() ); } @Test public void testLoadRep() throws Exception { TestJobEntry je = new TestJobEntry(); BlockableJobConfig config = new BlockableJobConfig(); config.setJobEntryName( "testing" ); je.setJobConfig( config ); KettleEnvironment.init(); String filename = File.createTempFile( getClass().getSimpleName() + "-export-dbtest", "" ).getAbsolutePath(); try { DatabaseMeta databaseMeta = new DatabaseMeta( "H2Repo", "H2", "JDBC", null, filename, null, null, null ); RepositoryMeta repositoryMeta = new KettleDatabaseRepositoryMeta( "KettleDatabaseRepository", "H2Repo", "H2 Repository", databaseMeta ); KettleDatabaseRepository repository = new KettleDatabaseRepository(); repository.init( repositoryMeta ); repository.connectionDelegate.connect( true, true ); KettleDatabaseRepositoryCreationHelper helper = new KettleDatabaseRepositoryCreationHelper( repository ); helper.createRepositorySchema( null, false, new ArrayList(), false ); repository.disconnect(); // Test connecting... // repository.connect( "admin", "admin" ); assertTrue( repository.isConnected() ); // A job entry must have an ID if we're going to save it to a repository je.setObjectId( new LongObjectId( 1 ) ); ObjectId id_job = new LongObjectId( 1 ); // Save the original job entry into the repository je.saveRep( repository, id_job ); // Load it back into a new job entry TestJobEntry je2 = new TestJobEntry(); je2.loadRep( repository, id_job, null, null ); // Make sure all settings we set are properly loaded BlockableJobConfig config2 = je2.getJobConfig(); Assert.assertEquals( config.getJobEntryName(), config2.getJobEntryName() ); } finally { // Delete test database new File( filename + ".h2.db" ).delete(); new File( filename + ".trace.db" ).delete(); } } @Test public void testEvaluates() throws Exception { TestJobEntry jobEntry = new TestJobEntry(); assertTrue( jobEntry.evaluates() ); } @Test public void testIsUnconditional() throws Exception { TestJobEntry jobEntry = new TestJobEntry(); assertTrue( jobEntry.isUnconditional() ); } @Test public void execute_blocking() throws KettleException { final long waitTime = 1000; TestJobEntry je = new TestJobEntry( waitTime ); je.setParentJob( new Job( "test", null, null ) ); Result result = new Result(); long start = System.currentTimeMillis(); je.execute( result, 0 ); long end = System.currentTimeMillis(); assertTrue( "Total runtime should be >= the wait time if we are blocking", ( end - start ) >= waitTime ); Assert.assertEquals( 0, result.getNrErrors() ); assertTrue( result.getResult() ); } @Test public void execute_nonblocking() throws KettleException { final long waitTime = 1000; TestJobEntry je = new TestJobEntry( waitTime ); je.setParentJob( new Job( "test", null, null ) ); je.getJobConfig().setBlockingExecution( "false" ); Result result = new Result(); long start = System.currentTimeMillis(); je.execute( result, 0 ); long end = System.currentTimeMillis(); assertTrue( "Total runtime should be less than the wait time if we're not blocking", ( end - start ) < waitTime ); Assert.assertEquals( 0, result.getNrErrors() ); assertTrue( result.getResult() ); } @Test public void execute_interrupted() throws KettleException { final long waitTime = 1000 * 10; final List loggedErrors = new ArrayList(); TestJobEntry je = new TestJobEntry( waitTime ) { @Override public void logError( String message, Throwable e ) { loggedErrors.add( message ); } }; final Job parentJob = new Job( "test", null, null ); Thread t = new Thread() { @Override public void run() { try { Thread.sleep( 1000 ); } catch ( InterruptedException e ) { throw new RuntimeException( e ); } parentJob.stopAll(); } }; je.setParentJob( parentJob ); Result result = new Result(); // Start another thread to stop the parent job and unblock the job entry in 1 second t.start(); long start = System.currentTimeMillis(); je.execute( result, 0 ); long end = System.currentTimeMillis(); assertTrue( "Total runtime should be less than the wait time if we were properly interrupted", ( end - start ) < waitTime ); Assert.assertEquals( 1, result.getNrErrors() ); assertFalse( result.getResult() ); // Make sure when an uncaught exception occurs an error log message is generated Assert.assertEquals( 1, loggedErrors.size() ); } } ================================================ FILE: kettle-plugins/common/job/src/test/java/org/pentaho/big/data/kettle/plugins/job/BlockableJobConfigTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import static junit.framework.Assert.*; import static org.mockito.Matchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; /** * User: RFellows Date: 6/5/12 */ public class BlockableJobConfigTest { @Mock PropertyChangeListener listener; @Captor ArgumentCaptor event; @Before public void init() { MockitoAnnotations.initMocks( this ); } @Test public void testAddPropertyChangeListener() throws Exception { BlockableJobConfig config = new BlockableJobConfig(); // make sure it is capturing property change events config.addPropertyChangeListener( listener ); config.setJobEntryName( "jobName1" ); verify( listener, times( 1 ) ).propertyChange( any( PropertyChangeEvent.class ) ); verify( listener ).propertyChange( event.capture() ); assertEquals( config.getJobEntryName(), event.getValue().getNewValue() ); // remove the listener & verify that it isn't receiving events anymore config.removePropertyChangeListener( listener ); config.setJobEntryName( "jobName2" ); verify( listener, times( 1 ) ).propertyChange( any( PropertyChangeEvent.class ) ); // still 1, from the previous call } @Test public void testAddPropertyChangeListener_propertyName() throws Exception { BlockableJobConfig config = new BlockableJobConfig(); // dummy property name, should not indicate any captured prop change config.addPropertyChangeListener( "dummy", listener ); config.setJobEntryName( "jobName0" ); verify( listener, times( 0 ) ).propertyChange( any( PropertyChangeEvent.class ) ); config.removePropertyChangeListener( "dummy", listener ); // make sure it is capturing property change events config.addPropertyChangeListener( BlockableJobConfig.JOB_ENTRY_NAME, listener ); config.setJobEntryName( "jobName1" ); verify( listener, times( 1 ) ).propertyChange( any( PropertyChangeEvent.class ) ); verify( listener ).propertyChange( event.capture() ); assertEquals( config.getJobEntryName(), event.getValue().getNewValue() ); // remove the listener & verify that it isn't receiving events anymore config.removePropertyChangeListener( BlockableJobConfig.JOB_ENTRY_NAME, listener ); config.setJobEntryName( "jobName2" ); verify( listener, times( 1 ) ).propertyChange( any( PropertyChangeEvent.class ) ); // still 1, from the previous call } @Test public void testGetterAndSetter() throws Exception { BlockableJobConfig config = new BlockableJobConfig(); assertNull( config.getJobEntryName() ); config.setJobEntryName( "jobName" ); assertEquals( "jobName", config.getJobEntryName() ); } @Test public void testClone() throws Exception { BlockableJobConfig configOrig = new BlockableJobConfig(); configOrig.setJobEntryName( "Test" ); BlockableJobConfig configCloned = (BlockableJobConfig) configOrig.clone(); assertNotSame( configOrig, configCloned ); assertEquals( configOrig, configCloned ); configOrig.setJobEntryName( "New Name" ); assertFalse( configOrig.getJobEntryName().equals( configCloned.getJobEntryName() ) ); } } ================================================ FILE: kettle-plugins/common/job/src/test/java/org/pentaho/big/data/kettle/plugins/job/JobEntryUtilsTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.job; import org.junit.Test; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import static org.junit.Assert.*; /** * User: RFellows Date: 6/7/12 */ public class JobEntryUtilsTest { @Test public void asBoolean() { VariableSpace variableSpace = new Variables(); assertFalse( JobEntryUtils.asBoolean( "not-true", variableSpace ) ); assertFalse( JobEntryUtils.asBoolean( Boolean.FALSE.toString(), variableSpace ) ); assertTrue( JobEntryUtils.asBoolean( Boolean.TRUE.toString(), variableSpace ) ); // No variable set, should attempt convert ${booleanValue} as is assertFalse( JobEntryUtils.asBoolean( "${booleanValue}", variableSpace ) ); variableSpace.setVariable( "booleanValue", Boolean.TRUE.toString() ); assertTrue( JobEntryUtils.asBoolean( "${booleanValue}", variableSpace ) ); variableSpace.setVariable( "booleanValue", Boolean.FALSE.toString() ); assertFalse( JobEntryUtils.asBoolean( "${booleanValue}", variableSpace ) ); } @Test public void asLong() { VariableSpace variableSpace = new Variables(); assertNull( JobEntryUtils.asLong( null, variableSpace ) ); assertEquals( Long.valueOf( "10", 10 ), JobEntryUtils.asLong( "10", variableSpace ) ); variableSpace.setVariable( "long", "150" ); assertEquals( Long.valueOf( "150", 10 ), JobEntryUtils.asLong( "${long}", variableSpace ) ); try { JobEntryUtils.asLong( "NaN", variableSpace ); fail( "expected number format exception" ); } catch ( NumberFormatException ex ) { // we're good } } } ================================================ FILE: kettle-plugins/common/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-common 11.1.0.0-SNAPSHOT pom ui job ================================================ FILE: kettle-plugins/common/ui/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-common 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-common-ui 11.1.0.0-SNAPSHOT jar Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com site 11.1.0.0-SNAPSHOT pentaho pentaho-big-data-api-runtimeTest ${project.version} com.google.guava guava ${guava.version} pentaho-kettle kettle-ui-swt ${pdi.version} provided pentaho pentaho-platform-core ${platform.version} provided org.pentaho shim-api-core ${pentaho-hadoop-shims.version} provided org.pentaho shim-api ${pentaho-hadoop-shims.version} provided org.eclipse.core commands 3.3.0-I20070605-0010 test junit junit ${dependency.junit.revision} test org.mockito mockito-all ${dependency.mockito.revision} test ================================================ FILE: kettle-plugins/common/ui/src/main/java/org/pentaho/big/data/plugins/common/ui/ClusterTestDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import org.apache.commons.lang.exception.ExceptionUtils; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Dialog; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.ProgressBar; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.gui.WindowProperty; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.RuntimeTestProgressCallback; import org.pentaho.runtime.test.RuntimeTestStatus; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.module.RuntimeTestModuleResults; import org.pentaho.runtime.test.result.RuntimeTestResult; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import java.util.Iterator; /** * Dialog for testing a Named Cluster * * @see NamedCluster */ public class ClusterTestDialog extends Dialog { private static final Class PKG = ClusterTestDialog.class; private Shell shell; private PropsUI props; private final NamedCluster namedCluster; private final RuntimeTester runtimeTester; private RuntimeTestStatus runtimeTestStatus = null; /** * The log channel for this dialog. */ protected LogChannelInterface log; public static ClusterTestDialog create( Shell parent, NamedCluster namedCluster, RuntimeTester clusterTester ) throws KettleException { return new ClusterTestDialog( parent, namedCluster, clusterTester ); } public ClusterTestDialog( Shell parent, NamedCluster namedCluster, RuntimeTester runtimeTester ) throws KettleException { super( parent ); this.namedCluster = namedCluster; this.runtimeTester = runtimeTester; props = getPropsUIInstance(); this.log = KettleLogStore.getLogChannelInterfaceFactory().create( namedCluster ); } /** * For testing */ protected PropsUI getPropsUIInstance() { return PropsUI.getInstance(); } public RuntimeTestStatus open() { Shell parent = getParent(); final Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.CLOSE | SWT.MAX | SWT.MIN | SWT.ICON ); props.setLook( shell ); shell.setImage( GUIResource.getInstance().getImageSpoon() ); int margin = Const.FORM_MARGIN; FormLayout formLayout = new FormLayout(); formLayout.marginWidth = margin; formLayout.marginHeight = margin; final int shellWidth = 385; final int shellHeight = 160; shell.setSize( shellWidth, shellHeight ); shell.setMinimumSize( shellWidth, shellHeight ); shell.setText( BaseMessages.getString( PKG, "ClusterTestDialog.Title" ) ); shell.setLayout( formLayout ); Label testingClusterLabel = new Label( shell, SWT.NONE ); testingClusterLabel.setText( BaseMessages.getString( PKG, "ClusterTestDialog.ClusterTest.Label" ) ); testingClusterLabel.setForeground( GUIResource.getInstance().getColorCrystalTextPentaho() ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, margin ); fd.top = new FormAttachment( 0, margin ); testingClusterLabel.setLayoutData( fd ); final Label testLabel = new Label( shell, SWT.NONE ); testLabel.setText( "Testing cluster..." ); fd = new FormData(); fd.top = new FormAttachment( testingClusterLabel, 10 ); fd.right = new FormAttachment( 100, -margin ); fd.left = new FormAttachment( 0, margin ); testLabel.setLayoutData( fd ); final ProgressBar progressBar = new ProgressBar( shell, SWT.SMOOTH ); progressBar.setMinimum( 0 ); // Max tests will be set upon first return fd = new FormData(); fd.top = new FormAttachment( testLabel, 10 ); fd.left = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, -margin ); progressBar.setLayoutData( fd ); Button wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) ); wCancel.addListener( SWT.Selection, new Listener() { public void handleEvent( Event e ) { cancel(); } } ); Button[] buttons = new Button[]{ wCancel }; BaseStepDialog.positionBottomRightButtons( shell, buttons, margin, null ); shell.setBackgroundMode( SWT.INHERIT_FORCE ); Rectangle shellBounds = Spoon.getInstance().getShell().getBounds(); shell.open(); shell.setLocation( shellBounds.x + ( shellBounds.width - shellWidth ) / 2, shellBounds.y + ( shellBounds.height - shellHeight ) / 2 ); // Start the cluster tests runtimeTester.runtimeTest( namedCluster, new RuntimeTestProgressCallback() { private int numTests = -1; @Override public void onProgress( final RuntimeTestStatus clusterTestStatus ) { Runnable runnable = getRunnable( progressBar, clusterTestStatus, testLabel ); display.asyncExec( runnable ); } } ); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return runtimeTestStatus; } private void cancel() { runtimeTestStatus = null; dispose(); } public void dispose() { props.setScreen( new WindowProperty( shell ) ); shell.dispose(); } /** * For testing */ Runnable getRunnable( final ProgressBar progressBar, final RuntimeTestStatus clusterTestStatus, final Label testLabel ) { return new Runnable() { private int numTests = -1; @Override public void run() { if ( progressBar.isDisposed() ) { return; } // Calculate the number of tests to be run (only the first time!) if ( numTests == -1 ) { numTests = clusterTestStatus.getTestsDone() + clusterTestStatus.getTestsOutstanding() + clusterTestStatus.getTestsRunning(); progressBar.setMaximum( numTests ); } progressBar.setSelection( clusterTestStatus.getTestsDone() ); for ( RuntimeTestModuleResults results : clusterTestStatus.getModuleResults() ) { Iterator runningTests = results.getRunningTests().iterator(); if ( runningTests.hasNext() ) { testLabel.setText( runningTests.next().getName() ); } } if ( clusterTestStatus.isDone() ) { runtimeTestStatus = clusterTestStatus; testLabel.setText( BaseMessages.getString( PKG, "ClusterTestDialog.TestsFinished" ) ); // Log all the executed tests at the end for ( RuntimeTestModuleResults results : clusterTestStatus.getModuleResults() ) { log.logBasic( BaseMessages.getString( PKG, "ClusterTestDialog.ModuleTest", results.getName() ) ); for ( RuntimeTestResult result : results.getRuntimeTestResults() ) { String clusterTestName = result.getRuntimeTest().getName(); // If there are no entries, that means there was one test and it becomes the summary-level result if ( result.getRuntimeTestResultEntries().isEmpty() ) { RuntimeTestResultEntry entry = result.getOverallStatusEntry(); log.logBasic( BaseMessages.getString( PKG, "ClusterTestDialog.TestResult", clusterTestName, entry.getSeverity().toString(), entry.getDescription() ) ); log.logBasic( "\t" + entry.getMessage() ); if ( entry.getException() != null ) { log.logBasic( ExceptionUtils.getStackTrace( entry.getException() ) ); } } else { for ( RuntimeTestResultEntry entry : result.getRuntimeTestResultEntries() ) { log.logBasic( BaseMessages.getString( PKG, "ClusterTestDialog.TestResult", clusterTestName, entry.getSeverity().toString(), entry.getDescription() ) ); log.logBasic( "\t" + entry.getMessage() ); if ( entry.getException() != null ) { log.logBasic( ExceptionUtils.getStackTrace( entry.getException() ) ); } } } } } ClusterTestDialog.this.dispose(); } } }; } } ================================================ FILE: kettle-plugins/common/ui/src/main/java/org/pentaho/big/data/plugins/common/ui/ClusterTestResultsDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.ScrolledComposite; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Dialog; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Link; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogChannel; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.gui.WindowProperty; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.di.ui.util.HelpUtils; import org.pentaho.runtime.test.RuntimeTestStatus; import org.pentaho.runtime.test.action.RuntimeTestAction; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.runtime.test.module.RuntimeTestModuleResults; import org.pentaho.runtime.test.result.RuntimeTestResult; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; /** * Dialog to display the results of running a suite of tests on a Named Cluster (and its shim/config) */ public class ClusterTestResultsDialog extends Dialog { private static final Class PKG = ClusterTestResultsDialog.class; private Shell shell; private PropsUI props; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTestStatus clusterTestStatus; /** * The log channel for this dialog. */ protected LogChannel log; public ClusterTestResultsDialog( Shell parent, RuntimeTestActionService runtimeTestActionService, RuntimeTestStatus clusterTestStatus ) throws KettleException { super( parent ); this.runtimeTestActionService = runtimeTestActionService; this.clusterTestStatus = clusterTestStatus; props = PropsUI.getInstance(); this.log = new LogChannel( clusterTestStatus ); } public String open() { Shell parent = getParent(); final Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.CLOSE | SWT.MAX | SWT.MIN | SWT.ICON ); props.setLook( shell ); shell.setImage( GUIResource.getInstance().getImageSpoon() ); int margin = Const.FORM_MARGIN; HelpUtils.createHelpButton( shell, BaseMessages.getString( PKG, "ClusterTestResultsDialog.Shell.Doc.Title" ), "https://docs.pentaho.com/pdia-11.0-install/use-hadoop-with-pentaho/big-data-issues", BaseMessages.getString( PKG, "ClusterTestResultsDialog.Shell.Doc.Header" ) ); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = margin; formLayout.marginHeight = margin; final int shellWidth = 585; final int shellHeight = 490; shell.setSize( shellWidth, shellHeight ); shell.setMinimumSize( shellWidth, shellHeight ); shell.setText( BaseMessages.getString( PKG, "ClusterTestResultsDialog.Title" ) ); shell.setLayout( formLayout ); shell.setBackgroundMode( SWT.INHERIT_FORCE ); Label clusterResultsLabel = new Label( shell, SWT.NONE ); clusterResultsLabel.setText( BaseMessages.getString( PKG, "ClusterTestResultsDialog.ClusterTestResults.Label" ) ); clusterResultsLabel.setForeground( GUIResource.getInstance().getColorCrystalTextPentaho() ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, margin ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, -margin ); clusterResultsLabel.setLayoutData( fd ); final ScrolledComposite scrolledComposite = new ScrolledComposite( shell, SWT.V_SCROLL | SWT.BORDER ); fd = new FormData(); fd.left = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, -margin ); fd.bottom = new FormAttachment( 100, -50 ); fd.top = new FormAttachment( clusterResultsLabel, margin ); scrolledComposite.setLayoutData( fd ); final Composite mainComposite = new Composite( scrolledComposite, SWT.NONE ); scrolledComposite.setContent( mainComposite ); scrolledComposite.setExpandHorizontal( true ); FormLayout layout = new FormLayout(); mainComposite.setLayout( layout ); ClassLoader myClassLoader = this.getClass().getClassLoader(); Label separator = null; // Add the test results for ( RuntimeTestModuleResults moduleResults : clusterTestStatus.getModuleResults() ) { for ( RuntimeTestResult testResult : moduleResults.getRuntimeTestResults() ) { RuntimeTestResultEntry summary = testResult.getOverallStatusEntry(); Label image = new Label( mainComposite, SWT.NONE ); switch ( summary.getSeverity() ) { case DEBUG: case INFO: // The above are "Test(s) passed" image.setImage( GUIResource.getInstance().getImage( "ui/images/success_green.svg", myClassLoader, 22, 22 ) ); break; case WARNING: case SKIPPED: // The above are "Test(s) finished with warnings" image.setImage( GUIResource.getInstance().getImage( "ui/images/warning_yellow.svg", myClassLoader, 22, 22 ) ); break; case ERROR: case FATAL: // The above are "Test(s) failed" image.setImage( GUIResource.getInstance().getImage( "ui/images/error_red.svg", myClassLoader, 22, 22 ) ); break; } FormData imageLayoutData = new FormData(); imageLayoutData.left = new FormAttachment( 0, margin ); if ( separator != null ) { imageLayoutData.top = new FormAttachment( separator, margin ); } else { imageLayoutData.top = new FormAttachment( 0, margin ); } image.setLayoutData( imageLayoutData ); Label testName = new Label( mainComposite, SWT.NONE ); testName.setText( testResult.getRuntimeTest().getName() ); FormData layoutData = new FormData(); layoutData.left = new FormAttachment( image, margin ); layoutData.right = new FormAttachment( 100, -margin ); if ( separator != null ) { layoutData.top = new FormAttachment( separator, margin ); } else { layoutData.top = new FormAttachment( 0, margin ); } testName.setLayoutData( layoutData ); // Add test description Label description = new Label( mainComposite, SWT.WRAP ); description.setForeground( GUIResource.getInstance().getColorDarkGray() ); description.setText( summary.getDescription() ); layoutData = new FormData(); layoutData.left = new FormAttachment( image, margin ); layoutData.right = new FormAttachment( 100, -margin ); layoutData.top = new FormAttachment( testName, margin ); description.setLayoutData( layoutData ); Control linkOrNot = description; // Add action link(s) final RuntimeTestAction runtimeTestAction = summary.getAction(); if ( runtimeTestAction != null ) { Link link = new Link( mainComposite, SWT.NONE ); link.setText( "" + runtimeTestAction.getName() + "" ); link.setToolTipText( runtimeTestAction.getDescription() ); link.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent selectionEvent ) { runtimeTestActionService.handle( runtimeTestAction ); } } ); layoutData = new FormData(); layoutData.left = new FormAttachment( image, margin ); layoutData.right = new FormAttachment( 100, -margin ); layoutData.top = new FormAttachment( description, margin ); link.setLayoutData( layoutData ); linkOrNot = link; } // Add separator separator = new Label( mainComposite, SWT.HORIZONTAL | SWT.SEPARATOR ); separator.setForeground( GUIResource.getInstance().getColorLightGray() ); layoutData = new FormData(); layoutData.left = new FormAttachment( 0, margin ); layoutData.right = new FormAttachment( 100, -margin ); layoutData.top = new FormAttachment( linkOrNot, margin ); separator.setLayoutData( layoutData ); } } mainComposite.setSize( mainComposite.computeSize( SWT.DEFAULT, SWT.DEFAULT ) ); Button wOk = new Button( shell, SWT.PUSH ); wOk.setText( BaseMessages.getString( PKG, "System.Button.Close" ) ); wOk.addListener( SWT.Selection, new Listener() { public void handleEvent( Event e ) { ok(); } } ); Button[] buttons = new Button[]{ wOk }; BaseStepDialog.positionBottomRightButtons( shell, buttons, margin, null ); Rectangle shellBounds = Spoon.getInstance().getShell().getBounds(); shell.pack(); shell.open(); shell.setLocation( shellBounds.x + ( shellBounds.width - shellWidth ) / 2, shellBounds.y + ( shellBounds.height - shellHeight ) / 2 ); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return null; } private void ok() { dispose(); } public void dispose() { props.setScreen( new WindowProperty( shell ) ); shell.dispose(); } } ================================================ FILE: kettle-plugins/common/ui/src/main/java/org/pentaho/big/data/plugins/common/ui/CommonDialogFactory.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import org.eclipse.swt.widgets.Shell; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; /** * Created by bryan on 10/19/15. */ public class CommonDialogFactory { public void createErrorDialog( Shell parent, String title, String message, Exception exception ) { new ErrorDialog( parent, title, message, exception ); } public NamedClusterDialogImpl createNamedClusterDialog( Shell parent, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester, NamedCluster namedCluster ) { return new NamedClusterDialogImpl( parent, namedClusterService, runtimeTestActionService, runtimeTester, namedCluster ); } } ================================================ FILE: kettle-plugins/common/ui/src/main/java/org/pentaho/big/data/plugins/common/ui/HadoopClusterDelegateImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import org.apache.commons.io.FileUtils; import org.eclipse.swt.widgets.Shell; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.metastore.SuppliedMetaStore; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.spoon.delegates.SpoonDelegate; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.stores.delegate.DelegatingMetaStore; import org.pentaho.metastore.stores.xml.XmlMetaStore; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; public class HadoopClusterDelegateImpl extends SpoonDelegate { public static final String SPOON_DIALOG_ERROR_DELETING_NAMED_CLUSTER_TITLE = "Spoon.Dialog.ErrorDeletingNamedCluster.Title"; public static final String SPOON_DIALOG_ERROR_DELETING_NAMED_CLUSTER_MESSAGE = "Spoon.Dialog.ErrorDeletingNamedCluster.Message"; public static final String SPOON_VARIOUS_DUPE_NAME = "Spoon.Various.DupeName"; public static final String SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_TITLE = "Spoon.Dialog.ErrorSavingNamedCluster.Title"; public static final String SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_MESSAGE = "Spoon.Dialog.ErrorSavingNamedCluster.Message"; public static Class PKG = HadoopClusterDelegateImpl.class; // for i18n purposes, needed by Translator2!! public static final String STRING_NAMED_CLUSTERS = BaseMessages.getString( PKG, "NamedClusterDialog.HadoopClusters" ); public static final String SPOON_DIALOG_ERROR_ADDING_NEW_CONFIGURATION_FOR_CLUSTER_TITLE = "Spoon.Dialog.ErrorAddingNewConfigurationForCluster.Title"; public static final String SPOON_DIALOG_ERROR_ADDING_NEW_CONFIGURATION_FOR_CLUSTER_MESSAGE = "Spoon.Dialog.ErrorAddingNewConfigurationForCluster.Message"; public static final String SPOON_DIALOG_ERROR_RENAMING_PREVIOUS_CLUSTER_CONFIG_TITLE = "Spoon.Dialog.ErrorRenamingPreviousClusterConfig.Title"; public static final String SPOON_DIALOG_ERROR_RENAMING_PREVIOUS_CLUSTER_CONFIG_MESSAGE = "Spoon.Dialog.ErrorRenamingPreviousClusterConfig.Message"; private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; private final CommonDialogFactory commonDialogFactory; public HadoopClusterDelegateImpl( Spoon spoon, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) { this( spoon, namedClusterService, runtimeTestActionService, runtimeTester, new CommonDialogFactory() ); } public HadoopClusterDelegateImpl( Spoon spoon, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester, CommonDialogFactory commonDialogFactory ) { super( spoon ); this.namedClusterService = namedClusterService; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; this.commonDialogFactory = commonDialogFactory; } public void dupeNamedCluster( IMetaStore metaStore, NamedCluster nc, Shell shell ) { if ( metaStore == null ) { metaStore = spoon.getMetaStore(); } if ( nc == null ) { return; } NamedCluster newNamedCluster = nc.clone(); // The "duplicate name" string comes from Spoon, so use its class to get the resource String duplicateName = BaseMessages.getString( Spoon.class, SPOON_VARIOUS_DUPE_NAME ) + nc.getName(); newNamedCluster.setName( duplicateName ); NamedClusterDialogImpl namedClusterDialogImpl = commonDialogFactory .createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, newNamedCluster ); namedClusterDialogImpl.setNewClusterCheck( true ); String newClusterName = namedClusterDialogImpl.open(); // Check if the process was cancelled if ( newClusterName == null ) { return; } try { XmlMetaStore xmlMetaStore = getXmlMetastore( metaStore ); if ( xmlMetaStore != null ) { if ( newNamedCluster.getName() != null ) { delNamedCluster( metaStore, newNamedCluster ); } File sourceClusterConfigDir = new File( getNamedClusterConfigsRootDir( xmlMetaStore ) + "/" + nc.getName() ); File newClusterConfigDir = new File( getNamedClusterConfigsRootDir( xmlMetaStore ) + "/" + newClusterName ); saveNamedCluster( metaStore, newNamedCluster ); FileUtils.copyDirectory( sourceClusterConfigDir, newClusterConfigDir ); if ( !nc.getShimIdentifier().equals( newNamedCluster.getShimIdentifier() ) ) { addConfigProperties( newNamedCluster ); } } } catch ( Exception e ) { commonDialogFactory.createErrorDialog( spoon.getShell(), BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_TITLE ), BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_MESSAGE, nc.getName() ), e ); spoon.refreshTree(); return; } spoon.refreshTree( STRING_NAMED_CLUSTERS ); } public void delNamedCluster( IMetaStore metaStore, NamedCluster namedCluster ) { if ( metaStore == null ) { metaStore = spoon.getMetaStore(); } deleteNamedCluster( metaStore, namedCluster ); spoon.refreshTree( STRING_NAMED_CLUSTERS ); spoon.setShellText(); } private void backupAndAddShimConfiguration( IMetaStore metaStore, NamedCluster previousNamedCluster, NamedCluster selectedNamedCluster ) { if ( metaStore == null ) { metaStore = spoon.getMetaStore(); } if ( previousNamedCluster == null ) { return; } if ( selectedNamedCluster == null ) { return; } XmlMetaStore xmlMetaStore; try { xmlMetaStore = getXmlMetastore( metaStore ); } catch ( MetaStoreException ex ) { xmlMetaStore = null; } String previousNamedClusterName = previousNamedCluster.getName(); String selectedNamedClusterName = selectedNamedCluster.getName(); // get the previous shim configuration File previousShimConfiguration = new File( getNamedClusterConfigsRootDir( xmlMetaStore ) + File.separator + previousNamedClusterName + File.separator + "config.properties" ); File previousShimConfigurationBackup = new File( getNamedClusterConfigsRootDir( xmlMetaStore ) + File.separator + previousNamedClusterName + File.separator + "old-config.bak" ); try { // backup original configuration Files.move( previousShimConfiguration.toPath(), previousShimConfigurationBackup.toPath(), java.nio.file.StandardCopyOption.REPLACE_EXISTING ); } catch ( IOException e ) { String dialogTitle = BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_RENAMING_PREVIOUS_CLUSTER_CONFIG_TITLE ); String dialogMessage = BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_RENAMING_PREVIOUS_CLUSTER_CONFIG_MESSAGE ); commonDialogFactory.createErrorDialog( spoon.getShell(), dialogTitle, dialogMessage, e ); return; } try { // create new configuration of driver being created addConfigProperties( selectedNamedCluster ); } catch ( Exception e ) { String dialogTitle = BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_ADDING_NEW_CONFIGURATION_FOR_CLUSTER_TITLE ); String dialogMessage = java.text.MessageFormat.format( BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_ADDING_NEW_CONFIGURATION_FOR_CLUSTER_MESSAGE ), selectedNamedClusterName ); commonDialogFactory.createErrorDialog( spoon.getShell(), dialogTitle, dialogMessage, e ); } } public String editNamedCluster( IMetaStore metaStore, NamedCluster namedCluster, Shell shell ) { if ( metaStore == null ) { metaStore = spoon.getMetaStore(); } NamedClusterDialogImpl namedClusterDialogImpl = commonDialogFactory.createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, namedCluster.clone() ); namedClusterDialogImpl.setNewClusterCheck( false ); String result = namedClusterDialogImpl.open(); if ( result == null ) { return null; } NamedCluster selectedNamedCluster = namedClusterDialogImpl.getNamedCluster(); // Create the new cluster saveNamedCluster( metaStore, selectedNamedCluster ); String previousNamedClusterName = namedCluster.getName(); String selectedNamedClusterName = selectedNamedCluster.getName(); String previousShimIdentifier = namedCluster.getShimIdentifier(); String selectedShimIdentifier = selectedNamedCluster.getShimIdentifier(); // if the previous shim identifier differs from the selected shim identifier then backup the old config and add the new one if ( !previousShimIdentifier.equals( selectedShimIdentifier ) ) { backupAndAddShimConfiguration( metaStore, namedCluster, selectedNamedCluster ); } // no name change so return the selected named cluster name if ( previousNamedClusterName != null && previousNamedClusterName.equals( selectedNamedClusterName ) ) { return selectedNamedClusterName; } XmlMetaStore xmlMetaStore; try { xmlMetaStore = getXmlMetastore( metaStore ); } catch ( MetaStoreException ex ) { xmlMetaStore = null; } // Rename the configuration folder to the new name. File source = new File( getNamedClusterConfigsRootDir( xmlMetaStore ) + File.separator + previousNamedClusterName ); File destination = new File( getNamedClusterConfigsRootDir( xmlMetaStore ) + File.separator + selectedNamedClusterName ); try { FileUtils.copyDirectory( source, destination ); } catch ( IOException ex ) { } // Delete the old named cluster. deleteNamedCluster( metaStore, namedCluster ); // If the user changed the shim, create a new config.properties file that corresponds to that shim. String shimIdentifier = namedClusterDialogImpl.getNamedCluster().getShimIdentifier(); if ( !namedCluster.getShimIdentifier().equals( shimIdentifier ) ) { try { addConfigProperties( namedClusterDialogImpl.getNamedCluster() ); } catch ( Exception e ) { // Do nothing. } } spoon.refreshTree( STRING_NAMED_CLUSTERS ); if ( namedClusterDialogImpl.getNamedCluster() != null ) { return namedClusterDialogImpl.getNamedCluster().getName(); } return null; } private XmlMetaStore getXmlMetastore( IMetaStore metaStore ) throws MetaStoreException { XmlMetaStore xmlMetaStore = null; if ( metaStore instanceof DelegatingMetaStore ) { IMetaStore activeMetastore = ( (DelegatingMetaStore) metaStore ).getActiveMetaStore(); if ( activeMetastore instanceof XmlMetaStore ) { xmlMetaStore = (XmlMetaStore) activeMetastore; } } else if ( metaStore instanceof SuppliedMetaStore ) { IMetaStore activeMetastore = ( (SuppliedMetaStore) metaStore ).getCurrentMetaStore(); if ( activeMetastore instanceof XmlMetaStore ) { xmlMetaStore = (XmlMetaStore) activeMetastore; } } else if ( metaStore instanceof XmlMetaStore ) { xmlMetaStore = (XmlMetaStore) metaStore; } return xmlMetaStore; } private String getNamedClusterConfigsRootDir( XmlMetaStore metaStore ) { String configsFolder = null != spoon.getRepository() ? "ServerConfigs" : "Configs"; return System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + configsFolder; } public String newNamedCluster( VariableSpace variableSpace, IMetaStore metaStore, Shell shell ) { if ( metaStore == null ) { metaStore = spoon.getMetaStore(); } NamedCluster nc = namedClusterService.getClusterTemplate(); NamedClusterDialogImpl namedClusterDialogImpl = commonDialogFactory .createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, nc ); namedClusterDialogImpl.setNewClusterCheck( true ); String result = namedClusterDialogImpl.open(); if ( result != null ) { if ( variableSpace != null ) { nc.shareVariablesWith( (VariableSpace) variableSpace ); } else { nc.initializeVariablesFrom( null ); } try { saveNamedCluster( metaStore, nc ); addConfigProperties( nc ); } catch ( Exception e ) { commonDialogFactory.createErrorDialog( spoon.getShell(), BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_TITLE ), BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_MESSAGE, nc.getName() ), e ); spoon.refreshTree(); return nc.getName(); } spoon.refreshTree( STRING_NAMED_CLUSTERS ); return nc.getName(); } return null; } private void addConfigProperties( NamedCluster namedCluster ) throws Exception { Path clusterConfigDirPath = Paths.get( getNamedClusterConfigsRootDir( null ) + "/" + namedCluster.getName() ); Path configPropertiesPath = Paths.get( getNamedClusterConfigsRootDir( null ) + "/" + namedCluster.getName() + "/" + "config.properties" ); Files.createDirectories( clusterConfigDirPath ); String sampleConfigProperties = namedCluster.getShimIdentifier() + "sampleconfig.properties"; InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream( sampleConfigProperties ); if ( inputStream != null ) { Files.copy( inputStream, configPropertiesPath, StandardCopyOption.REPLACE_EXISTING ); } } private void deleteNamedCluster( IMetaStore metaStore, NamedCluster namedCluster ) { try { if ( namedClusterService.read( namedCluster.getName(), metaStore ) != null ) { namedClusterService.delete( namedCluster.getName(), metaStore ); XmlMetaStore xmlMetaStore = getXmlMetastore( metaStore ); if ( xmlMetaStore != null ) { String path = getNamedClusterConfigsRootDir( xmlMetaStore ) + "/" + namedCluster.getName(); try { FileUtils.deleteDirectory( new File( path ) ); } catch ( IOException e ) { // Do nothing. The config directory will be orphaned but functionality will not be impacted. } } } } catch ( MetaStoreException e ) { commonDialogFactory.createErrorDialog( spoon.getShell(), BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_DELETING_NAMED_CLUSTER_TITLE ), BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_DELETING_NAMED_CLUSTER_MESSAGE, namedCluster.getName() ), e ); } } private void saveNamedCluster( IMetaStore metaStore, NamedCluster namedCluster ) { try { namedClusterService.create( namedCluster, metaStore ); } catch ( MetaStoreException e ) { commonDialogFactory.createErrorDialog( spoon.getShell(), BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_TITLE ), BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_MESSAGE, namedCluster.getName() ), e ); } } } ================================================ FILE: kettle-plugins/common/ui/src/main/java/org/pentaho/big/data/plugins/common/ui/NamedClusterComposite.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.StackLayout; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.layout.RowLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Text; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.hadoop.shim.api.core.ShimIdentifierInterface; import org.pentaho.platform.engine.core.system.PentahoSystem; public class NamedClusterComposite extends Composite { private static final String NAMED_CLUSTER_DFS_SCHEME = "named.cluster.dfs.scheme."; private static Class PKG = NamedClusterComposite.class; private PropsUI props; private GridData gridData = new GridData(); private GridData numberGridData = new GridData(); private GridData labelGridData = new GridData(); private GridData userNameLabelGridData = new GridData(); private GridData userNameGridData = new GridData(); private GridData passwordLabelGridData = new GridData(); private GridData passwordGridData = new GridData(); private GridData portLabelGridData = new GridData(); private static final int ONE_COLUMN = 1; private static final int TWO_COLUMNS = 2; private static final int TEXT_FLAGS = SWT.SINGLE | SWT.LEFT | SWT.BORDER; private static final int PASSWORD_FLAGS = TEXT_FLAGS | SWT.PASSWORD; private static final String KETTLE_HADOOP_CLUSTER_GATEWAY_CONNECTION = "KETTLE_HADOOP_CLUSTER_GATEWAY_CONNECTION"; private Text nameOfNamedCluster; private Composite compositeSwitcher; private Composite gatewayComposite; private Composite noGatewayComposite; private Label jtHostLabel; private TextVar jtHostNameText; private Label jtPortLabel; private TextVar jtPortText; private Group hdfsGroup; private Label hdfsHostLabel; private TextVar hdfsHostText; private Label hdfsPortLabel; private TextVar hdfsPortText; private Label hdfsUsernameLabel; private TextVar hdfsUsernameText; private Label hdfsPasswordLabel; private TextVar hdfsPasswordText; private ArrayList schemeValues = new ArrayList<>(); private ArrayList schemeNames = new ArrayList<>(); private StateChangeListener stateChangeListener; private interface Callback { public void invoke( NamedCluster nc, TextVar textVar, String value ); } public NamedClusterComposite( Composite parent, NamedCluster namedCluster, PropsUI props, NamedClusterService namedClusterService ) { super( parent, SWT.NONE ); props.setLook( this ); this.props = props; FormLayout formLayout = new FormLayout(); formLayout.marginWidth = 0; formLayout.marginHeight = 0; setLayout( formLayout ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); setLayoutData( fd ); gridData.widthHint = 270; numberGridData.widthHint = 80; labelGridData.widthHint = 270; portLabelGridData.widthHint = 80; userNameLabelGridData.widthHint = 165; userNameGridData.widthHint = 165; passwordLabelGridData.widthHint = 185; passwordGridData.widthHint = 185; //create head of form Composite confUI = createHeadOfNamedClusterDialog( this, namedCluster ); // Create a horizontal separator Label topSeparator = new Label( this, SWT.HORIZONTAL | SWT.SEPARATOR ); // Attach the separator to the name topSeparator.setLayoutData( createFormDataAndAttachTopControl( confUI ) ); // create the composite to hold and switch between two subcomponent compositeSwitcher = new Composite( this, SWT.NONE ); // attach to the separator compositeSwitcher.setLayoutData( createFormDataAndAttachTopControl( topSeparator ) ); StackLayout compositeLayout = new StackLayout(); compositeSwitcher.setLayout( compositeLayout ); // Create a child composite to hold the gateway controls gatewayComposite = new Composite( compositeSwitcher, SWT.NONE ); props.setLook( gatewayComposite ); GridLayout gatewayCompositeLayout = new GridLayout( ONE_COLUMN, false ); gatewayCompositeLayout.marginHeight = 0; gatewayCompositeLayout.marginWidth = 0; gatewayComposite.setLayout( gatewayCompositeLayout ); gatewayComposite.setSize( gatewayComposite.computeSize( SWT.DEFAULT, SWT.DEFAULT ) ); createGatewayGroup( gatewayComposite, namedCluster ); // Create a child composite to hold the non gateway controls noGatewayComposite = new Composite( compositeSwitcher, SWT.NONE ); props.setLook( noGatewayComposite ); GridLayout gl = new GridLayout( ONE_COLUMN, false ); gl.marginHeight = 0; gl.marginWidth = 0; noGatewayComposite.setLayout( gl ); noGatewayComposite.setSize( noGatewayComposite.computeSize( SWT.DEFAULT, SWT.DEFAULT ) ); createStorageGroup( noGatewayComposite, namedCluster, namedClusterService ); createShimVendorGroup( noGatewayComposite, namedCluster, namedClusterService ); createHdfsGroup( noGatewayComposite, namedCluster ); createJobTrackerGroup( noGatewayComposite, namedCluster ); createZooKeeperGroup( noGatewayComposite, namedCluster ); createOozieGroup( noGatewayComposite, namedCluster ); createKafkaGroup( noGatewayComposite, namedCluster ); setHdfsAndJobTrackerState( namedCluster ); //choose the properly composite based on the cluster settings compositeLayout.topControl = namedCluster.isUseGateway() ? gatewayComposite : noGatewayComposite; compositeSwitcher.layout(); nameOfNamedCluster.forceFocus(); } public void setStateChangeListener( StateChangeListener stateChangeListener ) { this.stateChangeListener = stateChangeListener; } private FormData createFormDataAndAttachTopControl( Control topControl ) { FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( topControl, 10 ); return fd; } private Composite createHeadOfNamedClusterDialog( final Composite parentComposite, final NamedCluster namedCluster ) { Composite mainRowComposite = new Composite( parentComposite, SWT.NONE ); GridLayout mainRowLayout = new GridLayout( ONE_COLUMN, false ); mainRowLayout.marginWidth = 0; mainRowLayout.marginTop = -10; mainRowComposite.setLayout( mainRowLayout ); props.setLook( mainRowComposite ); Composite nameUICluster = new Composite( mainRowComposite, SWT.NONE ); props.setLook( nameUICluster ); GridLayout nameUILayout = new GridLayout( ONE_COLUMN, false ); nameUILayout.marginWidth = 0; nameUILayout.marginTop = 0; nameUICluster.setLayout( nameUILayout ); createLabel( nameUICluster, BaseMessages.getString( PKG, "NamedClusterDialog.NamedCluster.Name" ), labelGridData ); nameOfNamedCluster = new Text( nameUICluster, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); nameOfNamedCluster.setText( String.valueOf( namedCluster.getName() ) ); nameOfNamedCluster.setLayoutData( gridData ); props.setLook( nameOfNamedCluster ); nameOfNamedCluster.addModifyListener( new ModifyListener() { public void modifyText( final ModifyEvent modifyEvent ) { namedCluster.setName( nameOfNamedCluster.getText() ); stateChanged(); } } ); if ( shouldRenderGatewayCheckbox( namedCluster ) ) { // Create gateway composite Composite gatewayUIComposite = new Composite( mainRowComposite, SWT.NONE ); GridLayout layout = new GridLayout( ONE_COLUMN, false ); layout.marginHeight = 0; layout.marginWidth = 0; gatewayUIComposite.setLayout( layout ); props.setLook( gatewayUIComposite ); // Create gateway check box final Button gatewayButton = new Button( gatewayUIComposite, SWT.CHECK ); gatewayButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.NamedCluster.GatewayCheckBoxTitle" ) ); props.setLook( gatewayButton ); gatewayButton.setSelection( namedCluster.isUseGateway() ); gatewayButton.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { super.widgetSelected( e ); namedCluster.setUseGateway( gatewayButton.getSelection() ); StackLayout layout = (StackLayout) compositeSwitcher.getLayout(); layout.topControl = namedCluster.isUseGateway() ? gatewayComposite : noGatewayComposite; compositeSwitcher.layout(); stateChanged(); } } ); } return mainRowComposite; } private Label createLabel( Composite parent, String text, GridData gd ) { Label label = new Label( parent, SWT.NONE ); label.setText( text ); label.setLayoutData( gd ); props.setLook( label ); return label; } private TextVar createTextVar( final NamedCluster c, Composite parent, String val, GridData gd, int flags, final Callback cb ) { final TextVar textVar = new TextVar( c, parent, flags ); // SWT will typically not allow a null text textVar.setText( StringUtils.isEmpty( val ) ? StringUtils.EMPTY : val ); textVar.setLayoutData( gd ); props.setLook( textVar ); textVar.addModifyListener( new ModifyListener() { public void modifyText( final ModifyEvent modifyEvent ) { cb.invoke( c, textVar, textVar.getText() ); } } ); return textVar; } private Composite createGroup( Composite parent, String groupLabel ) { Group group = new Group( parent, SWT.NONE ); group.setText( groupLabel ); group.setLayout( new RowLayout( SWT.VERTICAL ) ); props.setLook( group ); GridData groupGridData = new GridData(); groupGridData.grabExcessHorizontalSpace = true; groupGridData.horizontalAlignment = SWT.FILL; group.setLayoutData( groupGridData ); // property parent composite Composite pp = new Composite( group, SWT.NONE ); props.setLook( pp ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); gridLayout.verticalSpacing = -10; gridLayout.marginWidth = 0; gridLayout.marginLeft = 5; gridLayout.marginRight = 5; gridLayout.marginTop = -10; gridLayout.marginBottom = -5; pp.setLayout( gridLayout ); return pp; } private Composite createTwoColumnsContainer( Composite parentComposite ) { Composite twoColumnsComposite = new Composite( parentComposite, SWT.NONE ); props.setLook( twoColumnsComposite ); GridLayout gridLayout = new GridLayout( TWO_COLUMNS, false ); gridLayout.marginWidth = 0; twoColumnsComposite.setLayout( gridLayout ); return twoColumnsComposite; } private void createShimVendorGroup( Composite parentComposite, final NamedCluster cluster, final NamedClusterService namedClusterService ) { Composite container = new Composite( parentComposite, SWT.NONE ); props.setLook( container ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); gridLayout.marginWidth = 0; gridLayout.marginBottom = 5; container.setLayout( gridLayout ); // Create a storage type Label createLabel( container, "Vendor shim", labelGridData ); // Create a storage type Drop Down final CCombo shimVendorCombo = new CCombo( container, SWT.BORDER ); List shimIdentifers = PentahoSystem.getAll( ShimIdentifierInterface.class ); String[] vendorList = shimIdentifers.stream() .map( ShimIdentifierInterface::getId ) .filter( shimId -> !shimId.equals( "apache" ) ) .toArray( String[]::new ); shimVendorCombo.setItems( vendorList ); shimVendorCombo.setEditable( false ); shimVendorCombo.select( Arrays.asList( vendorList ).indexOf( cluster.getShimIdentifier() ) ); props.setLook( shimVendorCombo ); shimVendorCombo.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { super.widgetSelected( e ); int index = shimVendorCombo.getSelectionIndex(); if ( index == -1 ) { index = 0; } cluster.setShimIdentifier( vendorList[ index ] ); } } ); shimVendorCombo.addFocusListener( new FocusListener() { @Override public void focusLost( FocusEvent e ) { String uiInputText = shimVendorCombo.getText(); int selectedIndex; if ( Arrays.asList( vendorList ).contains( uiInputText ) ) { selectedIndex = Arrays.asList( vendorList ).indexOf( uiInputText ); cluster.setShimIdentifier( vendorList[ selectedIndex ] ); shimVendorCombo.select( selectedIndex ); } } @Override public void focusGained( FocusEvent e ) { // should not do any thing on enter focus } } ); } private void createStorageGroup( Composite parentComposite, final NamedCluster cluster, final NamedClusterService namedClusterService ) { Map properties = namedClusterService.getProperties(); for ( String key : properties.keySet() ) { if ( key.startsWith( NAMED_CLUSTER_DFS_SCHEME ) ) { // will add 1 because we should use the key without "." schemeValues.add( key.substring( key.lastIndexOf( "." ) + 1 ) ); schemeNames.add( (String) properties.get( key ) ); } } // if we have undefined scheme ( set by variable for example) than we should add the new scheme if ( !schemeValues.contains( cluster.getStorageScheme() ) ) { schemeValues.add( cluster.getStorageScheme() ); schemeNames.add( cluster.getStorageScheme() ); } Composite container = new Composite( parentComposite, SWT.NONE ); props.setLook( container ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); gridLayout.marginWidth = 0; gridLayout.marginBottom = 5; container.setLayout( gridLayout ); // Create a storage type Label createLabel( container, BaseMessages.getString( PKG, "NamedClusterDialog.Storage" ), labelGridData ); // Create a storage type Drop Down final CCombo storageCombo = new CCombo( container, SWT.BORDER ); storageCombo.setItems( schemeNames.toArray( new String[ schemeNames.size() ] ) ); storageCombo.select( schemeValues.indexOf( cluster.getStorageScheme() ) ); props.setLook( storageCombo ); storageCombo.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { super.widgetSelected( e ); int index = storageCombo.getSelectionIndex(); if ( index == -1 ) { index = 0; } cluster.setStorageScheme( schemeValues.get( index ) ); setHdfsAndJobTrackerState( cluster ); } } ); storageCombo.addFocusListener( new FocusListener() { @Override public void focusLost( FocusEvent e ) { String uiInputText = storageCombo.getText(); int selectedIndex; if ( schemeNames.contains( uiInputText ) ) { selectedIndex = schemeNames.indexOf( uiInputText ); cluster.setStorageScheme( schemeValues.get( selectedIndex ) ); storageCombo.select( selectedIndex ); } else if ( schemeValues.contains( uiInputText ) ) { selectedIndex = schemeValues.indexOf( uiInputText ); cluster.setStorageScheme( schemeValues.get( selectedIndex ) ); storageCombo.select( selectedIndex ); } else { schemeNames.add( uiInputText ); schemeValues.add( uiInputText ); storageCombo.setItems( schemeNames.toArray( new String[ schemeNames.size() ] ) ); cluster.setStorageScheme( uiInputText ); } setHdfsAndJobTrackerState( cluster ); } @Override public void focusGained( FocusEvent e ) { // should not do any thing on enter focus } } ); } private void createHdfsGroup( Composite parentComposite, final NamedCluster c ) { Composite pp = createGroup( parentComposite, BaseMessages.getString( PKG, "NamedClusterDialog.HDFS" ) ); hdfsGroup = (Group) pp.getParent(); Composite hdfsRowComposite = createTwoColumnsContainer( pp ); Composite hostUIComposite = new Composite( hdfsRowComposite, SWT.NONE ); props.setLook( hostUIComposite ); hostUIComposite.setLayout( new GridLayout( ONE_COLUMN, false ) ); Composite portUIComposite = new Composite( hdfsRowComposite, SWT.NONE ); props.setLook( portUIComposite ); portUIComposite.setLayout( new GridLayout( ONE_COLUMN, false ) ); hdfsHostLabel = createLabel( hostUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Hostname" ), labelGridData ); // hdfs host input Callback hdfsHostCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setHdfsHost( value ); } }; hdfsHostText = createTextVar( c, hostUIComposite, c.getHdfsHost(), gridData, TEXT_FLAGS, hdfsHostCB ); hdfsPortLabel = createLabel( portUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Port" ), portLabelGridData ); // hdfs port input Callback hdfsPortCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setHdfsPort( value ); } }; hdfsPortText = createTextVar( c, portUIComposite, c.getHdfsPort(), numberGridData, TEXT_FLAGS, hdfsPortCB ); Composite hdfsCredentialsRowComposite = createTwoColumnsContainer( pp ); Composite usernameUIComposite = new Composite( hdfsCredentialsRowComposite, SWT.NONE ); props.setLook( usernameUIComposite ); usernameUIComposite.setLayout( new GridLayout( ONE_COLUMN, false ) ); Composite passwordUIComposite = new Composite( hdfsCredentialsRowComposite, SWT.NONE ); props.setLook( passwordUIComposite ); passwordUIComposite.setLayout( new GridLayout( ONE_COLUMN, false ) ); hdfsUsernameLabel = createLabel( usernameUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Username" ), userNameLabelGridData ); // hdfs user input Callback hdfsUsernameCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setHdfsUsername( value ); } }; hdfsUsernameText = createTextVar( c, usernameUIComposite, c.getHdfsUsername(), userNameGridData, TEXT_FLAGS, hdfsUsernameCB ); hdfsPasswordLabel = createLabel( passwordUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Password" ), passwordLabelGridData ); // hdfs password input Callback hdfsPasswordCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setHdfsPassword( nc.encodePassword( value ) ); } }; hdfsPasswordText = createTextVar( c, passwordUIComposite, c.decodePassword( c.getHdfsPassword() ), passwordGridData, PASSWORD_FLAGS, hdfsPasswordCB ); } private void createJobTrackerGroup( Composite parentComposite, final NamedCluster c ) { Composite pp = createGroup( parentComposite, BaseMessages.getString( PKG, "NamedClusterDialog.JobTracker" ) ); Composite jobTrackerRowComposite = createTwoColumnsContainer( pp ); Composite hostUIComposite = new Composite( jobTrackerRowComposite, SWT.NONE ); props.setLook( hostUIComposite ); hostUIComposite.setLayout( new GridLayout( ONE_COLUMN, false ) ); Composite portUIComposite = new Composite( jobTrackerRowComposite, SWT.NONE ); props.setLook( portUIComposite ); portUIComposite.setLayout( new GridLayout( ONE_COLUMN, false ) ); jtHostLabel = createLabel( hostUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Hostname" ), labelGridData ); // hdfs host input Callback hostCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setJobTrackerHost( value ); } }; jtHostNameText = createTextVar( c, hostUIComposite, c.getJobTrackerHost(), gridData, TEXT_FLAGS, hostCB ); jtPortLabel = createLabel( portUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Port" ), portLabelGridData ); // hdfs port input Callback portCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setJobTrackerPort( value ); } }; jtPortText = createTextVar( c, portUIComposite, c.getJobTrackerPort(), numberGridData, TEXT_FLAGS, portCB ); } private void createZooKeeperGroup( Composite parentComposite, final NamedCluster c ) { Composite pp = createGroup( parentComposite, BaseMessages.getString( PKG, "NamedClusterDialog.ZooKeeper" ) ); Composite zooKeeperRowComposite = createTwoColumnsContainer( pp ); Composite hostUIComposite = new Composite( zooKeeperRowComposite, SWT.NONE ); props.setLook( hostUIComposite ); hostUIComposite.setLayout( new GridLayout( ONE_COLUMN, false ) ); Composite portUIComposite = new Composite( zooKeeperRowComposite, SWT.NONE ); props.setLook( portUIComposite ); portUIComposite.setLayout( new GridLayout( ONE_COLUMN, false ) ); // hdfs host label createLabel( hostUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Hostname" ), labelGridData ); // hdfs host input Callback hostCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setZooKeeperHost( value ); } }; createTextVar( c, hostUIComposite, c.getZooKeeperHost(), gridData, TEXT_FLAGS, hostCB ); // hdfs port label createLabel( portUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Port" ), portLabelGridData ); // hdfs port input Callback portCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setZooKeeperPort( value ); } }; createTextVar( c, portUIComposite, c.getZooKeeperPort(), numberGridData, TEXT_FLAGS, portCB ); } private void createOozieGroup( Composite parentComposite, final NamedCluster namedCluster ) { Composite pp = createGroup( parentComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Oozie" ) ); Composite container = new Composite( pp, SWT.NONE ); props.setLook( container ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); gridLayout.marginBottom = 5; gridLayout.marginTop = 5; container.setLayout( gridLayout ); // oozie label createLabel( container, BaseMessages.getString( PKG, "NamedClusterDialog.URL" ), labelGridData ); // oozie url Callback hostCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setOozieUrl( value ); } }; createTextVar( namedCluster, container, namedCluster.getOozieUrl(), gridData, TEXT_FLAGS, hostCB ); } private void createGatewayGroup( Composite parentComposite, final NamedCluster c ) { Composite pp = createGroup( parentComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Gateway" ) ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); gridLayout.marginBottom = 5; gridLayout.marginTop = 5; Composite gatewayUrlUIComposite = new Composite( pp, SWT.NONE ); props.setLook( gatewayUrlUIComposite ); gatewayUrlUIComposite.setLayout( gridLayout ); createLabel( gatewayUrlUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.GatewayUrl" ), labelGridData ); // gateway url input Callback gatewayUrlCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setGatewayUrl( value ); stateChanged(); } }; GridData gd = new GridData(); gd.widthHint = 365; createTextVar( c, gatewayUrlUIComposite, c.getGatewayUrl(), gd, TEXT_FLAGS, gatewayUrlCB ); Composite gatewayCredentialsRowComposite = createTwoColumnsContainer( pp ); Composite usernameUIComposite = new Composite( gatewayCredentialsRowComposite, SWT.NONE ); props.setLook( usernameUIComposite ); GridLayout userNamelayout = new GridLayout( ONE_COLUMN, false ); usernameUIComposite.setLayout( userNamelayout ); Composite passwordUIComposite = new Composite( gatewayCredentialsRowComposite, SWT.NONE ); props.setLook( passwordUIComposite ); GridLayout passwordLayout = new GridLayout( ONE_COLUMN, false ); passwordUIComposite.setLayout( passwordLayout ); createLabel( usernameUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Username" ), userNameLabelGridData ); // gateway user input Callback gatewayUsernameCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setGatewayUsername( value ); stateChanged(); } }; createTextVar( c, usernameUIComposite, c.getGatewayUsername(), userNameGridData, TEXT_FLAGS, gatewayUsernameCB ); createLabel( passwordUIComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Password" ), passwordLabelGridData ); // gateway password input Callback gatewayPasswordCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setGatewayPassword( nc.encodePassword( value ) ); stateChanged(); } }; createTextVar( c, passwordUIComposite, c.decodePassword( c.getGatewayPassword() ), passwordGridData, PASSWORD_FLAGS, gatewayPasswordCB ); } private void createKafkaGroup( Composite parentComposite, final NamedCluster namedCluster ) { Composite pp = createGroup( parentComposite, BaseMessages.getString( PKG, "NamedClusterDialog.Kafka.GroupTitle" ) ); Composite container = new Composite( pp, SWT.NONE ); props.setLook( container ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); gridLayout.marginBottom = 5; gridLayout.marginTop = 5; container.setLayout( gridLayout ); // kafka label createLabel( container, BaseMessages.getString( PKG, "NamedClusterDialog.Kafka.BootstrapServers.Label" ), labelGridData ); // kafka bootstrap servers Callback bootstrapServersCB = new Callback() { public void invoke( NamedCluster nc, TextVar textVar, String value ) { nc.setKafkaBootstrapServers( value ); } }; createTextVar( namedCluster, container, namedCluster.getKafkaBootstrapServers(), gridData, TEXT_FLAGS, bootstrapServersCB ); } private void setHdfsAndJobTrackerState( NamedCluster cluster ) { boolean state = !cluster.isMapr(); jtHostLabel.setEnabled( state ); jtHostNameText.setEnabled( state ); jtPortLabel.setEnabled( state ); jtPortText.setEnabled( state ); hdfsHostLabel.setEnabled( state ); hdfsHostText.setEnabled( state ); hdfsPortLabel.setEnabled( state ); hdfsPortText.setEnabled( state ); hdfsUsernameLabel.setEnabled( state ); hdfsUsernameText.setEnabled( state ); hdfsPasswordLabel.setEnabled( state ); hdfsPasswordText.setEnabled( state ); String storageName = cluster.getStorageScheme(); //get the human readable name if ( !Utils.isEmpty( schemeNames ) && !Utils.isEmpty( schemeValues ) ) { storageName = schemeNames.get( schemeValues.indexOf( storageName ) ); } hdfsGroup.setText( storageName ); } private boolean shouldRenderGatewayCheckbox( final NamedCluster namedCluster ) { return Boolean.valueOf( namedCluster.getVariable( KETTLE_HADOOP_CLUSTER_GATEWAY_CONNECTION ) ); } private void stateChanged() { if ( stateChangeListener != null ) { stateChangeListener.stateModified(); } } } ================================================ FILE: kettle-plugins/common/ui/src/main/java/org/pentaho/big/data/plugins/common/ui/NamedClusterDialogImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import org.apache.commons.lang.StringUtils; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.ScrolledComposite; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Dialog; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.plugins.LifecyclePluginType; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.gui.WindowProperty; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.di.ui.util.HelpUtils; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.runtime.test.RuntimeTestStatus; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; /** * Dialog that allows you to edit the settings of a named cluster. * * @see NamedCluster */ public class NamedClusterDialogImpl extends Dialog { private static final int RESULT_NO = 1; private static final int DIALOG_WIDTH = 459; private static Class PKG = NamedClusterDialogImpl.class; // for i18n purposes, needed by Translator2!! private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; private Shell shell; private PropsUI props; private NamedCluster originalNamedCluster; private NamedCluster namedCluster; private boolean newClusterCheck = false; private String result; public NamedClusterDialogImpl( Shell parent, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) { this( parent, namedClusterService, runtimeTestActionService, runtimeTester, null ); } public NamedClusterDialogImpl( Shell parent, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester, NamedCluster namedCluster ) { super( parent ); this.namedClusterService = namedClusterService; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; props = PropsUI.getInstance(); this.namedCluster = namedCluster; this.originalNamedCluster = namedCluster == null ? null : namedCluster.clone(); } public NamedCluster getNamedCluster() { return namedCluster; } public void setNamedCluster( NamedCluster namedCluster ) { this.namedCluster = namedCluster; this.originalNamedCluster = namedCluster.clone(); } public boolean isNewClusterCheck() { return newClusterCheck; } public void setNewClusterCheck( boolean newClusterCheck ) { this.newClusterCheck = newClusterCheck; } public void dispose() { props.setScreen( new WindowProperty( shell ) ); shell.dispose(); } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.CLOSE | SWT.ICON | SWT.RESIZE ); props.setLook( shell ); shell.setImage( GUIResource.getInstance().getImageSpoon() ); shell.setMinimumSize( DIALOG_WIDTH, 458 ); shell.setText( BaseMessages.getString( PKG, "NamedClusterDialog.Shell.Title" ) ); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = 15; formLayout.marginHeight = 15; shell.setLayout( formLayout ); BaseStepDialog.setSize( shell ); // Create help button String docUrl = Const.getDocUrl( BaseMessages.getString( PKG, "NamedClusterDialog.Shell.Doc" ) ); PluginInterface plugin = PluginRegistry.getInstance().findPluginWithId( LifecyclePluginType.class, /* TODO */ "HadoopSpoonPlugin" ); HelpUtils.createHelpButton( shell, HelpUtils.getHelpDialogTitle( plugin ), docUrl, BaseMessages.getString( PKG, "NamedClusterDialog.Shell.Title" ) ); // Buttons Button wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) ); FormData fd = new FormData(); Button wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) ); Button wTest = new Button( shell, SWT.PUSH ); wTest.setText( BaseMessages.getString( PKG, "System.Button.Test" ) ); Button[] buttons = new Button[] { wTest, wOK, wCancel }; BaseStepDialog.positionBottomRightButtons( shell, buttons, Const.FORM_MARGIN, null ); // Create a horizontal separator Label bottomSeparator = new Label( shell, SWT.HORIZONTAL | SWT.SEPARATOR ); fd = new FormData(); fd.bottom = new FormAttachment( wCancel, -15 ); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); bottomSeparator.setLayoutData( fd ); ScrolledComposite scrolledComposite = new ScrolledComposite( shell, SWT.V_SCROLL ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.bottom = new FormAttachment( bottomSeparator, -15 ); scrolledComposite.setLayoutData( fd ); props.setLook( scrolledComposite ); NamedClusterComposite namedClusterComposite = new NamedClusterComposite( scrolledComposite, namedCluster, props, namedClusterService ); scrolledComposite.setContent( namedClusterComposite ); namedClusterComposite.pack(); // Add listeners wTest.addListener( SWT.Selection, new Listener() { @Override public void handleEvent( Event event ) { try { RuntimeTestStatus testStatus = ClusterTestDialog.create( shell, getNamedCluster(), runtimeTester ).open(); if ( testStatus != null ) { // We have good results, show the dialog try { new ClusterTestResultsDialog( shell, runtimeTestActionService, testStatus ).open(); } catch ( KettleException ke ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "ClusterTestResultsDialog.FailedToOpen" ), ke.getMessage(), ke ); } } } catch ( KettleException e ) { // The exception already has the message localized new ErrorDialog( shell, BaseMessages.getString( PKG, "NamedClusterDialog.DialogError" ), e.getMessage(), e ); } } } ); wOK.addListener( SWT.Selection, e -> ok() ); wCancel.addListener( SWT.Selection, e -> cancel() ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { public void shellClosed( ShellEvent e ) { cancel(); } } ); namedClusterComposite.setStateChangeListener( () -> { boolean enabled = !namedCluster.isUseGateway() || ( StringUtils.isNotBlank( namedCluster.getName() ) && StringUtils.isNotBlank( namedCluster.getGatewayUrl() ) && StringUtils.isNotBlank( namedCluster.getGatewayUsername() ) && StringUtils.isNotBlank( namedCluster.decodePassword( namedCluster.getGatewayPassword() ) ) ); if ( wOK.isEnabled() != enabled ) { wOK.setEnabled( enabled ); } } ); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return result; } private void cancel() { result = null; dispose(); } public void ok() { result = namedCluster.getName(); if ( StringUtils.isBlank( result ) ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( BaseMessages.getString( PKG, "NamedClusterDialog.Error" ) ); mb.setMessage( BaseMessages.getString( PKG, "NamedClusterDialog.ClusterNameMissing" ) ); mb.open(); return; } else if ( StringUtils.isBlank( namedCluster.getShimIdentifier() ) ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( BaseMessages.getString( PKG, "NamedClusterDialog.Error" ) ); mb.setMessage( BaseMessages.getString( PKG, "NamedClusterDialog.ShimIdentifierMissing" ) ); mb.open(); return; } else if ( newClusterCheck || !originalNamedCluster.getName().equals( result ) ) { // check that the getName does not already exist try { NamedCluster fetched = namedClusterService.read( result, Spoon.getInstance().getMetaStore() ); if ( fetched != null ) { String title = BaseMessages.getString( PKG, "NamedClusterDialog.ClusterNameExists.Title" ); String message = BaseMessages.getString( PKG, "NamedClusterDialog.ClusterNameExists", result ); String replaceButton = BaseMessages.getString( PKG, "NamedClusterDialog.ClusterNameExists.Replace" ); String doNotReplaceButton = BaseMessages.getString( PKG, "NamedClusterDialog.ClusterNameExists.DoNotReplace" ); MessageDialog dialog = new MessageDialog( shell, title, null, message, MessageDialog.WARNING, new String[]{ replaceButton, doNotReplaceButton }, 0 ); // there already exists a cluster with the new getName, ask the user if ( RESULT_NO == dialog.open() ) { // do not exist dialog return; } } } catch ( MetaStoreException ignored ) { // the lookup failed, the cluster does not exist, move on to dispose } } dispose(); } } ================================================ FILE: kettle-plugins/common/ui/src/main/java/org/pentaho/big/data/plugins/common/ui/NamedClusterWidgetImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.layout.RowData; import org.eclipse.swt.layout.RowLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.base.AbstractMeta; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import java.util.List; public class NamedClusterWidgetImpl extends Composite { private static Class PKG = NamedClusterWidgetImpl.class; private NamedClusterService namedClusterService; private Combo nameClusterCombo; private HadoopClusterDelegateImpl ncDelegate; public NamedClusterWidgetImpl( Composite parent, boolean showLabel, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester clusterTester, boolean enableNewEditNameClusterButtons ) { super( parent, SWT.NONE ); this.namedClusterService = namedClusterService; ncDelegate = new HadoopClusterDelegateImpl( Spoon.getInstance(), this.namedClusterService, runtimeTestActionService, clusterTester ); PropsUI props = PropsUI.getInstance(); props.setLook( this ); RowLayout layout = new RowLayout( SWT.HORIZONTAL ); //layout.center = true; //TODO EC:FIX THIS setLayout( layout ); if ( showLabel ) { Label nameLabel = new Label( this, SWT.NONE ); nameLabel.setText( BaseMessages.getString( PKG, "NamedClusterDialog.Shell.Title" ) + ":" ); props.setLook( nameLabel ); } setNameClusterCombo( new Combo( this, SWT.DROP_DOWN | SWT.READ_ONLY ) ); getNameClusterCombo().setLayoutData( new RowData( 150, SWT.DEFAULT ) ); if ( enableNewEditNameClusterButtons ) { Button editButton = new Button( this, SWT.NONE ); editButton.setText( BaseMessages.getString( PKG, "NamedClusterWidget.NamedCluster.Edit" ) ); editButton.addListener( SWT.Selection, new Listener() { public void handleEvent( Event e ) { editNamedCluster(); } } ); props.setLook( editButton ); } if ( enableNewEditNameClusterButtons ) { Button newButton = new Button( this, SWT.NONE ); newButton.setText( BaseMessages.getString( PKG, "NamedClusterWidget.NamedCluster.New" ) ); newButton.addListener( SWT.Selection, new Listener() { public void handleEvent( Event e ) { newNamedCluster(); } } ); props.setLook( newButton ); initiate(); } } private void newNamedCluster() { Spoon spoon = Spoon.getInstance(); AbstractMeta meta = (AbstractMeta) spoon.getActiveMeta(); ncDelegate.newNamedCluster( meta, spoon.getMetaStore(), getShell() ); initiate(); } private void editNamedCluster() { Spoon spoon = Spoon.getInstance(); AbstractMeta meta = (AbstractMeta) spoon.getActiveMeta(); if ( meta != null ) { List namedClusters = null; try { namedClusters = namedClusterService.list( spoon.getMetaStore() ); } catch ( MetaStoreException e ) { //Ignore } int index = getNameClusterCombo().getSelectionIndex(); if ( index > -1 && namedClusters != null && namedClusters.size() > 0 ) { ncDelegate.editNamedCluster( spoon.getMetaStore(), namedClusters .get( index ), getShell() ); initiate(); } } } protected String[] getNamedClusterNames() { try { return namedClusterService.listNames( Spoon.getInstance().getMetaStore() ) .toArray( new String[ 0 ] ); } catch ( MetaStoreException e ) { return new String[ 0 ]; } } public void initiate() { int selectedIndex = getNameClusterCombo().getSelectionIndex(); getNameClusterCombo().removeAll(); getNameClusterCombo().setItems( getNamedClusterNames() ); getNameClusterCombo().select( selectedIndex ); } public NamedCluster getSelectedNamedCluster() { Spoon spoon = Spoon.getInstance(); int index = getNameClusterCombo().getSelectionIndex(); if ( index > -1 ) { String name = getNameClusterCombo().getItem( index ); try { return namedClusterService.read( name, spoon.getMetaStore() ); } catch ( MetaStoreException e ) { return null; } } return null; } public void setSelectedNamedCluster( String name ) { getNameClusterCombo().deselectAll(); for ( int i = 0; i < getNameClusterCombo().getItemCount(); i++ ) { if ( getNameClusterCombo().getItem( i ).equals( name ) ) { getNameClusterCombo().select( i ); return; } } } public void addSelectionListener( SelectionListener selectionListener ) { getNameClusterCombo().addSelectionListener( selectionListener ); } public Combo getNameClusterCombo() { return nameClusterCombo; } protected void setNameClusterCombo( Combo nameClusterCombo ) { this.nameClusterCombo = nameClusterCombo; } } ================================================ FILE: kettle-plugins/common/ui/src/main/java/org/pentaho/big/data/plugins/common/ui/StateChangeListener.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; interface StateChangeListener { void stateModified(); } ================================================ FILE: kettle-plugins/common/ui/src/main/java/org/pentaho/big/data/plugins/common/ui/TestResultComposite.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import org.eclipse.swt.widgets.Composite; /** * Created by mburgess on 8/27/15. */ public class TestResultComposite extends Composite { public TestResultComposite( Composite parent, int style ) { super( parent, style ); } } ================================================ FILE: kettle-plugins/common/ui/src/main/java/org/pentaho/big/data/plugins/common/ui/VfsFileChooserHelper.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemOptions; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.vfs.ui.CustomVfsUiPanel; import org.pentaho.vfs.ui.VfsFileChooserDialog; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; /** * User: RFellows Date: 6/8/12 */ public class VfsFileChooserHelper { private static final Logger logger = LogManager.getLogger( VfsFileChooserHelper.class ); private VfsFileChooserDialog fileChooserDialog = null; private Shell shell = null; private VariableSpace variableSpace = null; private FileSystemOptions fileSystemOptions = null; private String defaultScheme = "file"; private String[] schemeRestrictions = null; private boolean showFileScheme = true; public VfsFileChooserHelper( Shell shell, VfsFileChooserDialog fileChooserDialog, VariableSpace variableSpace ) { this( shell, fileChooserDialog, variableSpace, new FileSystemOptions() ); } public VfsFileChooserHelper( Shell shell, VfsFileChooserDialog fileChooserDialog, VariableSpace variableSpace, FileSystemOptions fileSystemOptions ) { this.fileChooserDialog = fileChooserDialog; this.shell = shell; this.variableSpace = variableSpace; this.fileSystemOptions = fileSystemOptions; this.schemeRestrictions = new String[0]; } public FileObject browse( String[] fileFilters, String[] fileFilterNames, String fileUri ) throws KettleException, FileSystemException { return browse( fileFilters, fileFilterNames, fileUri, VfsFileChooserDialog.VFS_DIALOG_OPEN_DIRECTORY ); } public FileObject browse( String[] fileFilters, String[] fileFilterNames, String fileUri, int fileDialogMode ) throws KettleException, FileSystemException { return browse( fileFilters, fileFilterNames, fileUri, fileSystemOptions, fileDialogMode ); } public FileObject browse( String[] fileFilters, String[] fileFilterNames, String fileUri, int fileDialogMode, boolean showLocation ) throws KettleException, FileSystemException { return browse( fileFilters, fileFilterNames, fileUri, fileSystemOptions, fileDialogMode, showLocation, true ); } public FileObject browse( String[] fileFilters, String[] fileFilterNames, String fileUri, int fileDialogMode, boolean showLocation, boolean showCustomUI ) throws KettleException, FileSystemException { return browse( fileFilters, fileFilterNames, fileUri, fileSystemOptions, fileDialogMode, showLocation, showCustomUI ); } public FileObject browse( String[] fileFilters, String[] fileFilterNames, String fileUri, FileSystemOptions opts ) throws KettleException, FileSystemException { return browse( fileFilters, fileFilterNames, fileUri, opts, VfsFileChooserDialog.VFS_DIALOG_OPEN_DIRECTORY ); } public FileObject browse( String[] fileFilters, String[] fileFilterNames, String fileUri, FileSystemOptions opts, int fileDialogMode ) throws KettleException, FileSystemException { return browse( fileFilters, fileFilterNames, fileUri, opts, fileDialogMode, true, true ); } public FileObject browse( String[] fileFilters, String[] fileFilterNames, String fileUri, FileSystemOptions opts, int fileDialogMode, boolean showLocation, boolean showCustomUI ) throws KettleException, FileSystemException { // Get current file FileObject rootFile = null; FileObject initialFile = null; Spoon spoon = Spoon.getInstance(); if ( fileUri != null ) { initialFile = KettleVFS.getInstance( spoon.getExecutionBowl() ).getFileObject( fileUri, variableSpace, opts ); } else { initialFile = KettleVFS.getInstance( spoon.getExecutionBowl() ) .getFileObject( Spoon.getInstance().getLastFileOpened() ); } rootFile = initialFile.getFileSystem().getRoot(); fileChooserDialog.setRootFile( rootFile ); fileChooserDialog.setInitialFile( initialFile ); fileChooserDialog.defaultInitialFile = rootFile; FileObject selectedFile = null; selectedFile = fileChooserDialog.open( shell, this.schemeRestrictions, getDefaultScheme(), showFileScheme(), initialFile.getName().getPath(), fileFilters, fileFilterNames, returnsUserAuthenticatedFileObjects(), fileDialogMode, showLocation, showCustomUI ); return selectedFile; } public VariableSpace getVariableSpace() { return variableSpace; } public void setVariableSpace( VariableSpace variableSpace ) { this.variableSpace = variableSpace; } public FileSystemOptions getFileSystemOptions() { return fileSystemOptions; } public void setFileSystemOptions( FileSystemOptions fileSystemOptions ) { this.fileSystemOptions = fileSystemOptions; } public String getDefaultScheme() { return defaultScheme; } public void setDefaultScheme( String defaultScheme ) { this.defaultScheme = defaultScheme; } public String getSchemeRestriction() { String schemaRestriction = null; if ( this.schemeRestrictions != null && this.schemeRestrictions.length > 0 ) { schemaRestriction = this.schemeRestrictions[0]; } return schemaRestriction; } public void setSchemeRestriction( String schemeRestriction ) { this.schemeRestrictions = new String[1]; this.schemeRestrictions[0] = schemeRestriction; } public void setSchemeRestrictions( String[] schemeRestrictions ) { this.schemeRestrictions = schemeRestrictions; } public boolean showFileScheme() { return this.showFileScheme; } public void setShowFileScheme( boolean showFileScheme ) { this.showFileScheme = showFileScheme; } protected boolean returnsUserAuthenticatedFileObjects() { return false; } public void setNamedCluster( NamedCluster namedCluster ) { VfsFileChooserDialog dialog = Spoon.getInstance().getVfsFileChooserDialog( null, null ); for ( CustomVfsUiPanel currentPanel : dialog.getCustomVfsUiPanels() ) { if ( currentPanel != null ) { try { Method setNamedCluster = currentPanel.getClass().getMethod( "setNamedCluster", new Class[] { String.class } ); setNamedCluster.invoke( currentPanel, namedCluster.getName() ); } catch ( NoSuchMethodException e ) { if ( logger.isDebugEnabled() ) { logger.debug( "Couldn't set named cluster " + namedCluster.getName() + " on " + currentPanel + " because it doesn't have setNamedCluster method.", e ); } } catch ( InvocationTargetException e ) { if ( logger.isDebugEnabled() ) { logger.debug( "Couldn't set named cluster " + namedCluster.getName() + " on " + currentPanel + " because of exception.", e.getCause() ); } } catch ( IllegalAccessException e ) { if ( logger.isDebugEnabled() ) { logger.debug( "Couldn't set named cluster " + namedCluster.getName() + " on " + currentPanel + " because setNamedCluster method isn't accessible.", e ); } } } } } @VisibleForTesting VfsFileChooserDialog getFileChooserDialog() { return fileChooserDialog; } @VisibleForTesting Shell getShell() { return shell; } @VisibleForTesting String[] getSchemeRestrictions() { return schemeRestrictions; } } ================================================ FILE: kettle-plugins/common/ui/src/main/resources/apachesampleconfig.properties ================================================ # # HITACHI VANTARA PROPRIETARY AND CONFIDENTIAL # # Copyright 2007 - 2022 Hitachi Vantara. All rights reserved. # # NOTICE: All information including source code contained herein is, and # remains the sole property of Hitachi Vantara and its licensors. The intellectual # and technical concepts contained herein are proprietary and confidential # to, and are trade secrets of Hitachi Vantara and may be covered by U.S. and foreign # patents, or patents in process, and are protected by trade secret and # copyright laws. The receipt or possession of this source code and/or related # information does not convey or imply any rights to reproduce, disclose or # distribute its contents, or to manufacture, use, or sell anything that it # may describe, in whole or in part. Any reproduction, modification, distribution, # or public display of this information without the express written authorization # from Hitachi Vantara is strictly prohibited and in violation of applicable laws and # international treaties. Access to the source code contained herein is strictly # prohibited to anyone except those individuals and entities who have executed # confidentiality and non-disclosure agreements or other agreements with Hitachi Vantara, # explicitly covering such access. # # ADDITIONAL RESOURCES # For additional questions please visit help.pentaho.com # Search for impersonation or secure impersonation # # # # THE NAME OF YOUR CONFIGURATION name=Apache Generic # # # # GENERAL CONFIGURATIONS # These are comma-separated lists of the following: # # Directories and/or file lists available for this configuration classpath= # # Native libraries library.path= # # Classes or packages to ignore from the Hadoop configuration directory ignore.classes=org.apache.derby.iapi.services mr1.java.system.hadoop.cluster.path.separator=: # # # SECURITY CONFIGURATIONS # # Kerberos Authentication pentaho.authentication.default.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.kerberos.keytabLocation= pentaho.authentication.default.kerberos.password= # # Secure Impersonation # Please choose one of the following: # # disabled - when using an unsecured cluster # simple - when using a 1 to 1 mapping from the server to your cluster pentaho.authentication.default.mapping.impersonation.type=disabled pentaho.authentication.default.mapping.server.credentials.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation= pentaho.authentication.default.mapping.server.credentials.kerberos.password= # # # # OOZIE pentaho.oozie.proxy.user=oozie # ================================================ FILE: kettle-plugins/common/ui/src/main/resources/apachevanillasampleconfig.properties ================================================ # # HITACHI VANTARA PROPRIETARY AND CONFIDENTIAL # # Copyright 2024 Hitachi Vantara. All rights reserved. # # NOTICE: All information including source code contained herein is, and # remains the sole property of Hitachi Vantara and its licensors. The intellectual # and technical concepts contained herein are proprietary and confidential # to, and are trade secrets of Hitachi Vantara and may be covered by U.S. and foreign # patents, or patents in process, and are protected by trade secret and # copyright laws. The receipt or possession of this source code and/or related # information does not convey or imply any rights to reproduce, disclose or # distribute its contents, or to manufacture, use, or sell anything that it # may describe, in whole or in part. Any reproduction, modification, distribution, # or public display of this information without the express written authorization # from Hitachi Vantara is strictly prohibited and in violation of applicable laws and # international treaties. Access to the source code contained herein is strictly # prohibited to anyone except those individuals and entities who have executed # confidentiality and non-disclosure agreements or other agreements with Hitachi Vantara, # explicitly covering such access. # # ADDITIONAL RESOURCES # For additional questions please visit help.pentaho.com # Search for impersonation or secure impersonation # # # # THE NAME OF YOUR CONFIGURATION name= Apache Vanilla 3.3.0 # # # # GENERAL CONFIGURATIONS # These are comma-separated lists of the following: # # Directories and/or file lists available for this configuration classpath= # # Native libraries library.path= # # Classes or packages to ignore from the Hadoop configuration directory ignore.classes=org.apache.derby.iapi.services mr1.java.system.hadoop.cluster.path.separator=: # # # SECURITY CONFIGURATIONS # # Kerberos Authentication pentaho.authentication.default.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.kerberos.keytabLocation= pentaho.authentication.default.kerberos.password= # # Secure Impersonation # Please choose one of the following: # # disabled - when using an unsecured cluster # simple - when using a 1 to 1 mapping from the server to your cluster pentaho.authentication.default.mapping.impersonation.type=disabled pentaho.authentication.default.mapping.server.credentials.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation= pentaho.authentication.default.mapping.server.credentials.kerberos.password= # # # # OOZIE pentaho.oozie.proxy.user=oozie # ================================================ FILE: kettle-plugins/common/ui/src/main/resources/cdpdc71sampleconfig.properties ================================================ # # HITACHI VANTARA PROPRIETARY AND CONFIDENTIAL # # Copyright 2007 - 2022 Hitachi Vantara. All rights reserved. # # NOTICE: All information including source code contained herein is, and # remains the sole property of Hitachi Vantara and its licensors. The intellectual # and technical concepts contained herein are proprietary and confidential # to, and are trade secrets of Hitachi Vantara and may be covered by U.S. and foreign # patents, or patents in process, and are protected by trade secret and # copyright laws. The receipt or possession of this source code and/or related # information does not convey or imply any rights to reproduce, disclose or # distribute its contents, or to manufacture, use, or sell anything that it # may describe, in whole or in part. Any reproduction, modification, distribution, # or public display of this information without the express written authorization # from Hitachi Vantara is strictly prohibited and in violation of applicable laws and # international treaties. Access to the source code contained herein is strictly # prohibited to anyone except those individuals and entities who have executed # confidentiality and non-disclosure agreements or other agreements with Hitachi Vantara, # explicitly covering such access. # # ADDITIONAL RESOURCES # For additional questions please visit help.pentaho.com # Search for impersonation or secure impersonation # # # # THE NAME OF YOUR CONFIGURATION name= Cloudera Data Platform(CDP) 7.1 # # # # GENERAL CONFIGURATIONS # These are comma-separated lists of the following: # # Directories and/or file lists available for this configuration classpath= # # Native libraries library.path= # # Classes or packages to ignore from the Hadoop configuration directory ignore.classes=org.apache.derby.iapi.services mr1.java.system.hadoop.cluster.path.separator=: # # # SECURITY CONFIGURATIONS # # Kerberos Authentication pentaho.authentication.default.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.kerberos.keytabLocation= pentaho.authentication.default.kerberos.password= # # Secure Impersonation # Please choose one of the following: # # disabled - when using an unsecured cluster # simple - when using a 1 to 1 mapping from the server to your cluster pentaho.authentication.default.mapping.impersonation.type=disabled pentaho.authentication.default.mapping.server.credentials.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation= pentaho.authentication.default.mapping.server.credentials.kerberos.password= # # # # OOZIE pentaho.oozie.proxy.user=oozie # ================================================ FILE: kettle-plugins/common/ui/src/main/resources/dataproc1421sampleconfig.properties ================================================ # # HITACHI VANTARA PROPRIETARY AND CONFIDENTIAL # # Copyright 2007 - 2022 Hitachi Vantara. All rights reserved. # # NOTICE: All information including source code contained herein is, and # remains the sole property of Hitachi Vantara and its licensors. The intellectual # and technical concepts contained herein are proprietary and confidential # to, and are trade secrets of Hitachi Vantara and may be covered by U.S. and foreign # patents, or patents in process, and are protected by trade secret and # copyright laws. The receipt or possession of this source code and/or related # information does not convey or imply any rights to reproduce, disclose or # distribute its contents, or to manufacture, use, or sell anything that it # may describe, in whole or in part. Any reproduction, modification, distribution, # or public display of this information without the express written authorization # from Hitachi Vantara is strictly prohibited and in violation of applicable laws and # international treaties. Access to the source code contained herein is strictly # prohibited to anyone except those individuals and entities who have executed # confidentiality and non-disclosure agreements or other agreements with Hitachi Vantara, # explicitly covering such access. # # ADDITIONAL RESOURCES # For additional questions please visit help.pentaho.com # Search for impersonation or secure impersonation # # # # THE NAME OF YOUR CONFIGURATION name=Google Dataproc 1.4 # # # # GENERAL CONFIGURATIONS # These are comma-separated lists of the following: # # Directories and/or file lists available for this configuration classpath= # # Native libraries library.path= # # Classes or packages to ignore from the Hadoop configuration directory ignore.classes=java.security.Permission,org.apache.derby mr1.java.system.hadoop.cluster.path.separator=: # # # SECURITY CONFIGURATIONS # # Kerberos Authentication pentaho.authentication.default.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.kerberos.keytabLocation= pentaho.authentication.default.kerberos.password= # # Secure Impersonation # Please choose one of the following: # # disabled - when using an unsecured cluster # simple - when using a 1 to 1 mapping from the server to your cluster pentaho.authentication.default.mapping.impersonation.type=disabled pentaho.authentication.default.mapping.server.credentials.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation= pentaho.authentication.default.mapping.server.credentials.kerberos.password= # # # # OOZIE pentaho.oozie.proxy.user=oozie # ================================================ FILE: kettle-plugins/common/ui/src/main/resources/dataproc23sampleconfig.properties ================================================ # # HITACHI VANTARA PROPRIETARY AND CONFIDENTIAL # # Copyright 2007 - 2022 Hitachi Vantara. All rights reserved. # # NOTICE: All information including source code contained herein is, and # remains the sole property of Hitachi Vantara and its licensors. The intellectual # and technical concepts contained herein are proprietary and confidential # to, and are trade secrets of Hitachi Vantara and may be covered by U.S. and foreign # patents, or patents in process, and are protected by trade secret and # copyright laws. The receipt or possession of this source code and/or related # information does not convey or imply any rights to reproduce, disclose or # distribute its contents, or to manufacture, use, or sell anything that it # may describe, in whole or in part. Any reproduction, modification, distribution, # or public display of this information without the express written authorization # from Hitachi Vantara is strictly prohibited and in violation of applicable laws and # international treaties. Access to the source code contained herein is strictly # prohibited to anyone except those individuals and entities who have executed # confidentiality and non-disclosure agreements or other agreements with Hitachi Vantara, # explicitly covering such access. # # ADDITIONAL RESOURCES # For additional questions please visit help.pentaho.com # Search for impersonation or secure impersonation # # # # THE NAME OF YOUR CONFIGURATION name=Google Dataproc 2.3 # # # # GENERAL CONFIGURATIONS # These are comma-separated lists of the following: # # Directories and/or file lists available for this configuration classpath= # # Native libraries library.path= # # Classes or packages to ignore from the Hadoop configuration directory ignore.classes=java.security.Permission,org.apache.derby mr1.java.system.hadoop.cluster.path.separator=: # # # SECURITY CONFIGURATIONS # # Kerberos Authentication pentaho.authentication.default.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.kerberos.keytabLocation= pentaho.authentication.default.kerberos.password= # # Secure Impersonation # Please choose one of the following: # # disabled - when using an unsecured cluster # simple - when using a 1 to 1 mapping from the server to your cluster pentaho.authentication.default.mapping.impersonation.type=disabled pentaho.authentication.default.mapping.server.credentials.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation= pentaho.authentication.default.mapping.server.credentials.kerberos.password= # # # # OOZIE pentaho.oozie.proxy.user=oozie # ================================================ FILE: kettle-plugins/common/ui/src/main/resources/emr521sampleconfig.properties ================================================ # # HITACHI VANTARA PROPRIETARY AND CONFIDENTIAL # # Copyright 2007 - 2022 Hitachi Vantara. All rights reserved. # # NOTICE: All information including source code contained herein is, and # remains the sole property of Hitachi Vantara and its licensors. The intellectual # and technical concepts contained herein are proprietary and confidential # to, and are trade secrets of Hitachi Vantara and may be covered by U.S. and foreign # patents, or patents in process, and are protected by trade secret and # copyright laws. The receipt or possession of this source code and/or related # information does not convey or imply any rights to reproduce, disclose or # distribute its contents, or to manufacture, use, or sell anything that it # may describe, in whole or in part. Any reproduction, modification, distribution, # or public display of this information without the express written authorization # from Hitachi Vantara is strictly prohibited and in violation of applicable laws and # international treaties. Access to the source code contained herein is strictly # prohibited to anyone except those individuals and entities who have executed # confidentiality and non-disclosure agreements or other agreements with Hitachi Vantara, # explicitly covering such access. # # ADDITIONAL RESOURCES # For additional questions please visit help.pentaho.com # Search for impersonation or secure impersonation # # # # THE NAME OF YOUR CONFIGURATION name=Amazon EMR 5.21 # # # # GENERAL CONFIGURATIONS # These are comma-separated lists of the following: # # Directories and/or file lists available for this configuration classpath=lib/avro-1.8.0.jar # # Native libraries library.path= # # Comma-separated list of classes or package names to explicitly ignore when # loading classes from the resources within this Hadoop configuration directory # or the classpath property # e.g.: org.apache.commons.log,org.apache.log4j # Note, the two packages above are automatically included for all configurations ignore.classes=com.ctc.wstx.stax # # # # SECURITY CONFIGURATIONS # # Kerberos Authentication pentaho.authentication.default.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.kerberos.keytabLocation= pentaho.authentication.default.kerberos.password= # # Secure Impersonation # Please choose one of the following: # # disabled - when using an unsecured cluster # simple - when using a 1 to 1 mapping from the server to your cluster pentaho.authentication.default.mapping.impersonation.type=disabled pentaho.authentication.default.mapping.server.credentials.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation= pentaho.authentication.default.mapping.server.credentials.kerberos.password= # # # # OOZIE pentaho.oozie.proxy.user=oozie # ================================================ FILE: kettle-plugins/common/ui/src/main/resources/emr770sampleconfig.properties ================================================ # # HITACHI VANTARA PROPRIETARY AND CONFIDENTIAL # # Copyright 2025 Hitachi Vantara. All rights reserved. # # NOTICE: All information including source code contained herein is, and # remains the sole property of Hitachi Vantara and its licensors. The intellectual # and technical concepts contained herein are proprietary and confidential # to, and are trade secrets of Hitachi Vantara and may be covered by U.S. and foreign # patents, or patents in process, and are protected by trade secret and # copyright laws. The receipt or possession of this source code and/or related # information does not convey or imply any rights to reproduce, disclose or # distribute its contents, or to manufacture, use, or sell anything that it # may describe, in whole or in part. Any reproduction, modification, distribution, # or public display of this information without the express written authorization # from Hitachi Vantara is strictly prohibited and in violation of applicable laws and # international treaties. Access to the source code contained herein is strictly # prohibited to anyone except those individuals and entities who have executed # confidentiality and non-disclosure agreements or other agreements with Hitachi Vantara, # explicitly covering such access. # # ADDITIONAL RESOURCES # For additional questions please visit help.pentaho.com # Search for impersonation or secure impersonation # # # # THE NAME OF YOUR CONFIGURATION name=Amazon EMR 7.7 # # # # GENERAL CONFIGURATIONS # These are comma-separated lists of the following: # # Directories and/or file lists available for this configuration classpath=lib/avro-1.8.0.jar # # Native libraries library.path= # # Comma-separated list of classes or package names to explicitly ignore when # loading classes from the resources within this Hadoop configuration directory # or the classpath property # e.g.: org.apache.commons.log,org.apache.log4j # Note, the two packages above are automatically included for all configurations ignore.classes=com.ctc.wstx.stax # # # # SECURITY CONFIGURATIONS # # Kerberos Authentication pentaho.authentication.default.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.kerberos.keytabLocation= pentaho.authentication.default.kerberos.password= # # Secure Impersonation # Please choose one of the following: # # disabled - when using an unsecured cluster # simple - when using a 1 to 1 mapping from the server to your cluster pentaho.authentication.default.mapping.impersonation.type=disabled pentaho.authentication.default.mapping.server.credentials.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation= pentaho.authentication.default.mapping.server.credentials.kerberos.password= # # # # OOZIE pentaho.oozie.proxy.user=oozie # ================================================ FILE: kettle-plugins/common/ui/src/main/resources/hdi40sampleconfig.properties ================================================ # # HITACHI VANTARA PROPRIETARY AND CONFIDENTIAL # # Copyright 2007 - 2022 Hitachi Vantara. All rights reserved. # # NOTICE: All information including source code contained herein is, and # remains the sole property of Hitachi Vantara and its licensors. The intellectual # and technical concepts contained herein are proprietary and confidential # to, and are trade secrets of Hitachi Vantara and may be covered by U.S. and foreign # patents, or patents in process, and are protected by trade secret and # copyright laws. The receipt or possession of this source code and/or related # information does not convey or imply any rights to reproduce, disclose or # distribute its contents, or to manufacture, use, or sell anything that it # may describe, in whole or in part. Any reproduction, modification, distribution, # or public display of this information without the express written authorization # from Hitachi Vantara is strictly prohibited and in violation of applicable laws and # international treaties. Access to the source code contained herein is strictly # prohibited to anyone except those individuals and entities who have executed # confidentiality and non-disclosure agreements or other agreements with Hitachi Vantara, # explicitly covering such access. # # ADDITIONAL RESOURCES # For additional questions please visit help.pentaho.com # Search for impersonation or secure impersonation # # # # THE NAME OF YOUR CONFIGURATION name=Azure HDInsights 4.0 # # # # GENERAL CONFIGURATIONS # These are comma-separated lists of the following: # # Directories and/or file lists available for this configuration classpath= # # Native libraries library.path= # # Classes or packages to ignore from the Hadoop configuration directory ignore.classes=java.security.Permission,org.apache.derby mr1.java.system.hadoop.cluster.path.separator=: # # # SECURITY CONFIGURATIONS # # Kerberos Authentication pentaho.authentication.default.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.kerberos.keytabLocation= pentaho.authentication.default.kerberos.password= # # Secure Impersonation # Please choose one of the following: # # disabled - when using an unsecured cluster # simple - when using a 1 to 1 mapping from the server to your cluster pentaho.authentication.default.mapping.impersonation.type=disabled pentaho.authentication.default.mapping.server.credentials.kerberos.principal=exampleUser@EXAMPLE.COM # # Please define one of the following: pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation= pentaho.authentication.default.mapping.server.credentials.kerberos.password= # # # # OOZIE pentaho.oozie.proxy.user=oozie # ================================================ FILE: kettle-plugins/common/ui/src/main/resources/org/pentaho/big/data/plugins/common/ui/messages/messages_en_US.properties ================================================ NamedClusterDialog.Shell.Title=Hadoop Cluster NamedClusterDialog.NamedCluster.Configuration=Configuration NamedClusterDialog.NamedCluster.Type=Type NamedClusterDialog.NamedCluster.Name=Cluster name: NamedClusterDialog.NamedCluster.DisplayName=Display Name NamedClusterWidget.NamedCluster.New=New... NamedClusterWidget.NamedCluster.Edit=Edit... NamedClusterDialog.NamedCluster.GatewayCheckBoxTitle=Use a gateway to connect to the cluster NamedClusterDialog.HDFS=HDFS NamedClusterDialog.ZooKeeper=ZooKeeper NamedClusterDialog.JobTracker=JobTracker NamedClusterDialog.Oozie=Oozie NamedClusterDialog.Gateway=Gateway NamedClusterDialog.URL=URL: NamedClusterDialog.Port=Port: NamedClusterDialog.Hostname=Hostname: NamedClusterDialog.Username=Username: NamedClusterDialog.Password=Password: NamedClusterDialog.Storage=Storage: NamedClusterDialog.GatewayUrl=URL: NamedClusterDialog.Kafka.GroupTitle=Kafka NamedClusterDialog.Kafka.BootstrapServers.Label=Bootstrap servers: NamedClusterDialog.Error=Error NamedClusterDialog.Warning=Warning NamedClusterDialog.ClusterNameMissing=You must enter a Hadoop cluster name to continue. NamedClusterDialog.ShimIdentifierMissing=You must select a Vendor shim to continue. NamedClusterDialog.ClusterNameExists.Title=Hadoop Cluster Exists NamedClusterDialog.ClusterNameExists=Hadoop Cluster {0} already exists. Do you want to replace it with this one? NamedClusterDialog.ClusterNameExists.Replace=Yes, Replace NamedClusterDialog.ClusterNameExists.DoNotReplace=No NamedClusterDialog.HadoopClusters=Hadoop clusters NamedClusterDialog.NamedCluster.IsMapR=Use MapR client NamedClusterDialog.NamedCluster.IsMapR.Title=Select if this configuration is for a MapR cluster NamedClusterDialog.Shell.Doc=Data/Hadoop/Connect_to_Cluster NamedClusterDialog.DialogError=Error opening dialog ClusterTestDialog.Title=Hadoop Cluster Test ClusterTestDialog.ClusterTest.Label=Testing Hadoop Cluster ClusterTestDialog.ModuleTest=Cluster Test: {0} ClusterTestDialog.TestResult=\t{0}: {1} {2} ClusterTestDialog.TestsFinished=Tests Finished! ClusterTestDialog.FailedToOpen=Failed to open the Cluster Test Dialog ClusterTestResultsDialog.Title=Hadoop Cluster Test ClusterTestResultsDialog.ClusterTestResults.Label=Results ClusterTestResultsDialog.Shell.Doc.Title=Hadoop Cluster Test ClusterTestResultsDialog.Shell.Doc.Header=Hadoop Cluster Test details ClusterTestResultsDialog.FailedToOpen=Failed to open the Cluster Test Results Dialog Spoon.Dialog.ErrorAddingNewConfigurationForCluster.Title=Error Spoon.Dialog.ErrorAddingNewConfigurationForCluster.Message=Something went wrong trying to add the new configuration for the cluster: {0} Spoon.Dialog.ErrorRenamingPreviousClusterConfig.Title=Error Spoon.Dialog.ErrorRenamingPreviousClusterConfig.Message=Couldn't rename the previous shim configuration file ================================================ FILE: kettle-plugins/common/ui/src/test/java/org/pentaho/big/data/plugins/common/ui/HadoopClusterDelegateImplTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import org.eclipse.swt.widgets.Shell; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.stores.delegate.DelegatingMetaStore; import org.pentaho.metastore.stores.xml.XmlMetaStore; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import static org.pentaho.big.data.plugins.common.ui.HadoopClusterDelegateImpl.PKG; import static org.pentaho.big.data.plugins.common.ui.HadoopClusterDelegateImpl .SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_MESSAGE; import static org.pentaho.big.data.plugins.common.ui.HadoopClusterDelegateImpl .SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_TITLE; /** * Created by bryan on 10/19/15. */ public class HadoopClusterDelegateImplTest { private Spoon spoon; private NamedClusterService namedClusterService; private RuntimeTestActionService runtimeTestActionService; private RuntimeTester runtimeTester; private HadoopClusterDelegateImpl hadoopClusterDelegate; private IMetaStore metaStore; private NamedCluster namedCluster; private String namedClusterName; private CommonDialogFactory commonDialogFactory; private Shell shell; private VariableSpace variables; private Path tempDirectoryName; @Before public void setup() throws IOException { spoon = mock( Spoon.class ); shell = mock( Shell.class ); when( spoon.getShell() ).thenReturn( shell ); namedClusterService = mock( NamedClusterService.class ); runtimeTestActionService = mock( RuntimeTestActionService.class ); runtimeTester = mock( RuntimeTester.class ); metaStore = mock( IMetaStore.class ); namedCluster = mock( NamedCluster.class ); variables = new Variables(); namedClusterName = "namedClusterName"; when( namedCluster.getName() ).thenReturn( namedClusterName ); commonDialogFactory = mock( CommonDialogFactory.class ); hadoopClusterDelegate = new HadoopClusterDelegateImpl( spoon, namedClusterService, runtimeTestActionService, runtimeTester, commonDialogFactory ); // avoid putting test data in the local user's metastore tempDirectoryName = Files.createTempDirectory( this.getClass().getName() ); System.setProperty( "user.home", tempDirectoryName.toString() ); String configurationDirectory = System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs"; Files.createDirectories( Paths.get( configurationDirectory + "/" + namedClusterName ) ); } @After public void tearDown() { deleteDirectory( tempDirectoryName.toFile() ); } private boolean deleteDirectory( File directoryToBeDeleted) { File[] allContents = directoryToBeDeleted.listFiles(); if (allContents != null) { for (File file : allContents) { deleteDirectory(file); } } return directoryToBeDeleted.delete(); } @Test public void testSimpleConstructor() { assertNotNull( new HadoopClusterDelegateImpl( spoon, namedClusterService, runtimeTestActionService, runtimeTester ) ); } @Test public void testDupeNamedClusterNullNc() { hadoopClusterDelegate.dupeNamedCluster( metaStore, null, shell ); verifyNoMoreInteractions( metaStore, shell ); } @Test public void testDupeNamedClusterNullNewName() { NamedClusterDialogImpl namedClusterDialog = mock( NamedClusterDialogImpl.class ); NamedCluster clonedNamedCluster = mock( NamedCluster.class ); when( namedCluster.clone() ).thenReturn( clonedNamedCluster ); when( commonDialogFactory .createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, clonedNamedCluster ) ).thenReturn( namedClusterDialog ); when( namedClusterDialog.open() ).thenReturn( null ); hadoopClusterDelegate.dupeNamedCluster( metaStore, namedCluster, shell ); verify( namedClusterDialog ).setNewClusterCheck( true ); verify( clonedNamedCluster ).setName( BaseMessages.getString( Spoon.class, HadoopClusterDelegateImpl.SPOON_VARIOUS_DUPE_NAME ) + namedClusterName ); verifyNoMoreInteractions( metaStore ); } @Test public void testDupeNamedClusterNullMetastore() throws MetaStoreException, IOException { String newName = "newName"; NamedClusterDialogImpl namedClusterDialog = mock( NamedClusterDialogImpl.class ); NamedCluster clonedNamedCluster = mock( NamedCluster.class ); DelegatingMetaStore spoonMetastore = mock( DelegatingMetaStore.class ); XmlMetaStore xmlMetaStore = mock( XmlMetaStore.class ); when( spoon.getMetaStore() ).thenReturn( spoonMetastore ); when( spoonMetastore.getActiveMetaStore() ).thenReturn( xmlMetaStore ); when( namedCluster.clone() ).thenReturn( clonedNamedCluster ); when( namedCluster.getShimIdentifier() ).thenReturn( "oldShimId" ); when( clonedNamedCluster.getShimIdentifier() ).thenReturn( "shimId" ); when( commonDialogFactory .createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, clonedNamedCluster ) ).thenReturn( namedClusterDialog ); when( namedClusterDialog.open() ).thenReturn( newName ); hadoopClusterDelegate.dupeNamedCluster( null, namedCluster, shell ); verify( namedClusterDialog ).setNewClusterCheck( true ); verify( clonedNamedCluster ).setName( BaseMessages.getString( Spoon.class, HadoopClusterDelegateImpl.SPOON_VARIOUS_DUPE_NAME ) + namedClusterName ); verify( namedClusterService ).create( clonedNamedCluster, spoonMetastore ); verify( spoon ).refreshTree( HadoopClusterDelegateImpl.STRING_NAMED_CLUSTERS ); } @Test public void testDelNamedCluster() throws MetaStoreException { when( namedClusterService.read( namedClusterName, metaStore ) ).thenReturn( namedCluster ); hadoopClusterDelegate.delNamedCluster( metaStore, namedCluster ); verify( namedClusterService ).delete( namedClusterName, metaStore ); verify( spoon ).refreshTree( HadoopClusterDelegateImpl.STRING_NAMED_CLUSTERS ); verify( spoon ).setShellText(); } @Test public void testDelNamedClusterNull() throws MetaStoreException { when( namedClusterService.read( namedClusterName, metaStore ) ).thenReturn( null ); hadoopClusterDelegate.delNamedCluster( metaStore, namedCluster ); verify( namedClusterService, never() ).delete( namedClusterName, metaStore ); verify( spoon ).refreshTree( HadoopClusterDelegateImpl.STRING_NAMED_CLUSTERS ); verify( spoon ).setShellText(); } @Test public void testDelNamedClusterNullMetastore() throws MetaStoreException { DelegatingMetaStore metaStore2 = mock( DelegatingMetaStore.class ); when( spoon.getMetaStore() ).thenReturn( metaStore2 ); when( namedClusterService.read( namedClusterName, metaStore2 ) ).thenReturn( namedCluster ); hadoopClusterDelegate.delNamedCluster( null, namedCluster ); verify( namedClusterService ).delete( namedClusterName, metaStore2 ); verify( spoon ).refreshTree( HadoopClusterDelegateImpl.STRING_NAMED_CLUSTERS ); verify( spoon ).setShellText(); } @Test public void testDelNamedClusterException() throws MetaStoreException { when( namedClusterService.read( namedClusterName, metaStore ) ).thenReturn( namedCluster ); MetaStoreException metaStoreException = new MetaStoreException(); doThrow( metaStoreException ).when( namedClusterService ).delete( namedClusterName, metaStore ); hadoopClusterDelegate.delNamedCluster( metaStore, namedCluster ); verify( commonDialogFactory ).createErrorDialog( shell, BaseMessages.getString( PKG, HadoopClusterDelegateImpl.SPOON_DIALOG_ERROR_DELETING_NAMED_CLUSTER_TITLE ), BaseMessages .getString( PKG, HadoopClusterDelegateImpl.SPOON_DIALOG_ERROR_DELETING_NAMED_CLUSTER_MESSAGE, namedClusterName ), metaStoreException ); verify( spoon ).refreshTree( HadoopClusterDelegateImpl.STRING_NAMED_CLUSTERS ); verify( spoon ).setShellText(); } @Test public void testEditNamedClusterNullMetastore() throws MetaStoreException { DelegatingMetaStore spoonMetastore = mock( DelegatingMetaStore.class ); when( spoon.getMetaStore() ).thenReturn( spoonMetastore ); NamedClusterDialogImpl namedClusterDialog = mock( NamedClusterDialogImpl.class ); NamedCluster clonedNamedCluster = mock( NamedCluster.class ); when( namedClusterService.read( namedClusterName, spoonMetastore ) ).thenReturn( namedCluster ); when( namedCluster.clone() ).thenReturn( clonedNamedCluster ); String shimId = "shimId"; when( namedCluster.getShimIdentifier() ).thenReturn( shimId ); when( commonDialogFactory .createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, clonedNamedCluster ) ).thenReturn( namedClusterDialog ); String clonedName = "clonedName"; when( clonedNamedCluster.getName() ).thenReturn( clonedName ); when( clonedNamedCluster.getShimIdentifier() ).thenReturn( shimId ); when( namedClusterDialog.open() ).thenReturn( clonedName ); when( namedClusterDialog.getNamedCluster() ).thenReturn( clonedNamedCluster ); assertEquals( clonedName, hadoopClusterDelegate.editNamedCluster( null, namedCluster, shell ) ); verify( namedClusterDialog ).setNewClusterCheck( false ); verify( spoon ).refreshTree( HadoopClusterDelegateImpl.STRING_NAMED_CLUSTERS ); verify( namedClusterService ).create( clonedNamedCluster, spoonMetastore ); } @Test public void testEditNamedClusterNull() throws MetaStoreException { NamedClusterDialogImpl namedClusterDialog = mock( NamedClusterDialogImpl.class ); NamedCluster clonedNamedCluster = mock( NamedCluster.class ); when( namedClusterService.read( namedClusterName, metaStore ) ).thenReturn( namedCluster ); when( namedCluster.clone() ).thenReturn( clonedNamedCluster ); when( commonDialogFactory .createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, clonedNamedCluster ) ).thenReturn( namedClusterDialog ); when( namedClusterDialog.open() ).thenReturn( null ); hadoopClusterDelegate.editNamedCluster( metaStore, namedCluster, shell ); verify( namedClusterDialog ).setNewClusterCheck( false ); verifyNoMoreInteractions( namedClusterService ); } @Test public void testNewNamedClusterNullMetastore() throws MetaStoreException { DelegatingMetaStore spoonMetastore = mock( DelegatingMetaStore.class ); when( spoon.getMetaStore() ).thenReturn( spoonMetastore ); when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); NamedClusterDialogImpl namedClusterDialog = mock( NamedClusterDialogImpl.class ); when( commonDialogFactory .createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, namedCluster ) ).thenReturn( namedClusterDialog ); when( namedClusterDialog.open() ).thenReturn( namedClusterName ); assertEquals( namedClusterName, hadoopClusterDelegate.newNamedCluster( variables, null, shell ) ); verify( namedClusterDialog ).setNewClusterCheck( true ); verify( namedCluster ).shareVariablesWith( variables ); verify( namedClusterService ).create( namedCluster, spoonMetastore ); verify( spoon ).refreshTree( HadoopClusterDelegateImpl.STRING_NAMED_CLUSTERS ); } @Test public void testNewNamedClusterNullVariables() throws MetaStoreException { when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); NamedClusterDialogImpl namedClusterDialog = mock( NamedClusterDialogImpl.class ); when( commonDialogFactory .createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, namedCluster ) ).thenReturn( namedClusterDialog ); when( namedClusterDialog.open() ).thenReturn( namedClusterName ); assertEquals( namedClusterName, hadoopClusterDelegate.newNamedCluster( null, metaStore, shell ) ); verify( namedClusterDialog ).setNewClusterCheck( true ); verify( namedCluster ).initializeVariablesFrom( null ); verify( namedClusterService ).create( namedCluster, metaStore ); verify( spoon ).refreshTree( HadoopClusterDelegateImpl.STRING_NAMED_CLUSTERS ); } @Test public void testNewNamedClusterNullResult() throws MetaStoreException { when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); NamedClusterDialogImpl namedClusterDialog = mock( NamedClusterDialogImpl.class ); when( commonDialogFactory .createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, namedCluster ) ).thenReturn( namedClusterDialog ); when( namedClusterDialog.open() ).thenReturn( null ); assertNull( hadoopClusterDelegate.newNamedCluster( null, metaStore, shell ) ); verify( namedClusterDialog ).setNewClusterCheck( true ); verify( namedClusterService, times( 0 ) ).create( any( NamedCluster.class ), any( IMetaStore.class ) ); verify( spoon, times( 0 ) ).refreshTree( HadoopClusterDelegateImpl.STRING_NAMED_CLUSTERS ); } @Test public void testNewNamedClusterErrorSaving() throws MetaStoreException { when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); NamedClusterDialogImpl namedClusterDialog = mock( NamedClusterDialogImpl.class ); when( commonDialogFactory .createNamedClusterDialog( shell, namedClusterService, runtimeTestActionService, runtimeTester, namedCluster ) ).thenReturn( namedClusterDialog ); when( namedClusterDialog.open() ).thenReturn( namedClusterName ); MetaStoreException metaStoreException = new MetaStoreException(); doThrow( metaStoreException ).when( namedClusterService ).create( namedCluster, metaStore ); hadoopClusterDelegate.newNamedCluster( variables, metaStore, shell ); verify( commonDialogFactory ).createErrorDialog( shell, BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_TITLE ), BaseMessages.getString( PKG, SPOON_DIALOG_ERROR_SAVING_NAMED_CLUSTER_MESSAGE, namedCluster.getName() ), metaStoreException ); } } ================================================ FILE: kettle-plugins/common/ui/src/test/java/org/pentaho/big/data/plugins/common/ui/TestClusterTestDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.common.ui; import org.apache.commons.lang.exception.ExceptionUtils; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.ProgressBar; import org.eclipse.swt.widgets.Shell; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mockito; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.logging.LogChannelInterfaceFactory; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.RuntimeTestStatus; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.module.RuntimeTestModuleResults; import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity; import org.pentaho.runtime.test.result.RuntimeTestResult; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import java.util.ArrayList; public class TestClusterTestDialog { private static LogChannelInterfaceFactory oldLogChannelInterfaceFactory; private static LogChannelInterface logChannelInterface; private ClusterTestDialog testDialog; private Shell parent; private NamedCluster namedCluster; private RuntimeTester runtimeTester; private PropsUI props; @BeforeClass public static void beforeClass() { KettleLogStore.init(); oldLogChannelInterfaceFactory = KettleLogStore.getLogChannelInterfaceFactory(); setKettleLogFactoryWithMock(); } public static void setKettleLogFactoryWithMock() { LogChannelInterfaceFactory logChannelInterfaceFactory = Mockito.mock( LogChannelInterfaceFactory.class ); logChannelInterface = Mockito.mock( LogChannelInterface.class ); Mockito.when( logChannelInterfaceFactory.create( Mockito.any() ) ).thenReturn( logChannelInterface ); KettleLogStore.setLogChannelInterfaceFactory( logChannelInterfaceFactory ); } @Before public void setup() throws KettleException { parent = Mockito.mock( Shell.class ); namedCluster = Mockito.mock( NamedCluster.class ); runtimeTester = Mockito.mock( RuntimeTester.class ); props = Mockito.mock( PropsUI.class ); testDialog = new ClusterTestDialog( parent, namedCluster, runtimeTester ) { @Override protected PropsUI getPropsUIInstance() { return props; } @Override public void dispose() { } }; } @Test public void testExceptionIsPrintedToLog() throws KettleException { ProgressBar progressBar = Mockito.mock( ProgressBar.class ); RuntimeTestStatus clusterTestStatus = Mockito.mock( RuntimeTestStatus.class ); Label testLabel = Mockito.mock( Label.class ); RuntimeTestModuleResults runtimeTestModuleResults = Mockito.mock( RuntimeTestModuleResults.class ); RuntimeTestResult result = Mockito.mock( RuntimeTestResult.class ); RuntimeTestResultEntry entry = Mockito.mock( RuntimeTestResultEntry.class ); Exception exception = new Exception(); ArrayList results = new ArrayList<>(); results.add( runtimeTestModuleResults ); ArrayList runtimeTestResults = new ArrayList<>(); runtimeTestResults.add( result ); Mockito.when( clusterTestStatus.getModuleResults() ).thenReturn( results ); Mockito.when( clusterTestStatus.isDone() ).thenReturn( true ); Mockito.when( runtimeTestModuleResults.getRuntimeTestResults() ).thenReturn( runtimeTestResults ); Mockito.when( result.getRuntimeTest() ).thenReturn( Mockito.mock( RuntimeTest.class ) ); Mockito.when( result.getRuntimeTestResultEntries() ).thenReturn( new ArrayList<>() ); Mockito.when( result.getOverallStatusEntry() ).thenReturn( entry ); Mockito.when( entry.getSeverity() ).thenReturn( RuntimeTestEntrySeverity.FATAL ); Mockito.when( entry.getException() ).thenReturn( exception ); testDialog.getRunnable( progressBar, clusterTestStatus, testLabel ).run(); Mockito.verify( logChannelInterface, Mockito.times( 1 ) ).logBasic( ExceptionUtils.getStackTrace( exception ) ); } @Test public void testExceptionsArePrintedToLog() throws KettleException { ProgressBar progressBar = Mockito.mock( ProgressBar.class ); RuntimeTestStatus clusterTestStatus = Mockito.mock( RuntimeTestStatus.class ); Label testLabel = Mockito.mock( Label.class ); RuntimeTestModuleResults runtimeTestModuleResults = Mockito.mock( RuntimeTestModuleResults.class ); RuntimeTestResult result = Mockito.mock( RuntimeTestResult.class ); RuntimeTestResultEntry entry = Mockito.mock( RuntimeTestResultEntry.class ); Exception exception = new Exception(); ArrayList results = new ArrayList<>(); results.add( runtimeTestModuleResults ); ArrayList runtimeTestResults = new ArrayList<>(); runtimeTestResults.add( result ); ArrayList entries = new ArrayList<>(); entries.add( entry ); entries.add( entry ); Mockito.when( clusterTestStatus.getModuleResults() ).thenReturn( results ); Mockito.when( clusterTestStatus.isDone() ).thenReturn( true ); Mockito.when( runtimeTestModuleResults.getRuntimeTestResults() ).thenReturn( runtimeTestResults ); Mockito.when( result.getRuntimeTest() ).thenReturn( Mockito.mock( RuntimeTest.class ) ); Mockito.when( result.getRuntimeTestResultEntries() ).thenReturn( entries ); Mockito.when( entry.getSeverity() ).thenReturn( RuntimeTestEntrySeverity.FATAL ); Mockito.when( entry.getException() ).thenReturn( exception ); testDialog.getRunnable( progressBar, clusterTestStatus, testLabel ).run(); Mockito.verify( logChannelInterface, Mockito.times( 2 ) ).logBasic( ExceptionUtils.getStackTrace( exception ) ); } @AfterClass public static void tearDown() { KettleLogStore.setLogChannelInterfaceFactory( oldLogChannelInterfaceFactory ); } } ================================================ FILE: kettle-plugins/formats/assemblies/plugin/pom.xml ================================================ 4.0.0 formats-assemblies pentaho 11.1.0.0-SNAPSHOT pdi-formats-plugin pom PDI Formats Plugin Distribution ${project.basedir}/src/main/resources ${project.build.directory}/assembly pentaho pdi-formats-core ${project.version} ================================================ FILE: kettle-plugins/formats/assemblies/plugin/src/assembly/assembly.xml ================================================ zip zip ${resources.directory} . true ${assembly.dir} . . pentaho:pdi-formats-core:jar false runtime . false false pentaho:pdi-formats-core:jar runtime false lib pentaho:pdi-formats-core:* pentaho:pentaho-big-data-kettle-plugins-formats-meta ================================================ FILE: kettle-plugins/formats/assemblies/plugin/src/main/resources/version.xml ================================================ ${project.version} ================================================ FILE: kettle-plugins/formats/assemblies/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-formats 11.1.0.0-SNAPSHOT formats-assemblies pom PDI Formats Plugin Assemblies plugin ================================================ FILE: kettle-plugins/formats/core/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-formats 11.1.0.0-SNAPSHOT pdi-formats-core PDI Formats Core site 3.0 3.3.0-I20070606-0010 4.0.0 1.9.6 src/main/resources false src/main/resources-filtered true pentaho pentaho-big-data-kettle-plugins-formats-meta ${project.version} org.pentaho shim-api ${pentaho-hadoop-shims.version} provided pentaho-kettle kettle-core ${pdi.version} provided org.pentaho.di.plugins pentaho-metastore-locator-api ${pdi.version} provided pentaho-kettle kettle-engine ${pdi.version} provided pentaho-kettle kettle-ui-swt ${pdi.version} provided org.eclipse jface ${jface.version} provided * * org.apache.orc orc-core ${org.apache.orc.version} org.apache.hadoop hadoop-client-api junit junit ${dependency.junit.revision} test org.mockito mockito-core ${mockito.version} test org.mockito mockito-inline ${mockito.version} test pentaho-kettle kettle-core ${pdi.version} tests test pentaho-kettle kettle-engine ${pdi.version} tests test org.pentaho pentaho-hadoop-shims-common-services-api ${pdi.version} compile pentaho pentaho-big-data-impl-cluster ${pdi.version} compile pentaho pentaho-big-data-legacy-core ${project.version} compile org.apache.parquet parquet-hadoop ${parquet.version} provided ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/NamedClusterResolver.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.di.core.logging.LogChannel; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.net.URI; import java.net.URISyntaxException; import java.util.Collection; import java.util.Optional; public class NamedClusterResolver { private final NamedClusterServiceLocator namedClusterServiceLocator; private final NamedClusterService namedClusterService; private MetastoreLocator metaStoreService; private static NamedClusterResolver namedClusterResolver = null; private NamedClusterResolver() { this( BigDataServicesHelper.getNamedClusterServiceLocator(), NamedClusterManager.getInstance() ); } private NamedClusterResolver( NamedClusterServiceLocator namedClusterServiceLocator, NamedClusterService namedClusterService ) { this.namedClusterServiceLocator = namedClusterServiceLocator; this.namedClusterService = namedClusterService; } public static synchronized NamedClusterResolver getInstance() { if ( namedClusterResolver == null ) { namedClusterResolver = new NamedClusterResolver(); } return namedClusterResolver; } protected synchronized MetastoreLocator getMetastoreLocator() { if ( this.metaStoreService == null ) { try { Collection metastoreLocators = PluginServiceLoader.loadServices( MetastoreLocator.class ); this.metaStoreService = metastoreLocators.stream().findFirst().orElse( null ); } catch ( Exception e ) { LOG.logError( "Error getting MetastoreLocator", e ); } } return this.metaStoreService; } private static final LogChannelInterface LOG = LogChannel.GENERAL; public NamedCluster resolveNamedCluster( String fileName ) { return resolveNamedCluster( fileName, null ); } public NamedCluster resolveNamedCluster( String fileName, String embeddedMetastoreKey ) { NamedCluster namedCluster = null; Optional uri = fileUri( fileName ); if ( uri.isPresent() ) { String scheme = uri.get().getScheme(); String hostName = uri.get().getHost(); MetastoreLocator metastoreLocator = getMetastoreLocator(); if ( metastoreLocator != null ) { if ( scheme != null && scheme.equals( "hc" ) ) { namedCluster = namedClusterService.getNamedClusterByName( hostName, metastoreLocator.getMetastore() ); if ( namedCluster == null && embeddedMetastoreKey != null ) { namedCluster = namedClusterService .getNamedClusterByName( hostName, metastoreLocator.getExplicitMetastore( embeddedMetastoreKey ) ); } } else { namedCluster = namedClusterService.getNamedClusterByHost( hostName, metastoreLocator.getMetastore( embeddedMetastoreKey ) ); if ( namedCluster == null && embeddedMetastoreKey != null ) { namedCluster = namedClusterService .getNamedClusterByHost( hostName, metastoreLocator.getExplicitMetastore( embeddedMetastoreKey ) ); } } } } return namedCluster; } private Optional fileUri( String fileName ) { try { return Optional.of( new URI( fileName ) ); } catch ( URISyntaxException e ) { LOG.logDebug( String.format( "Couldn't parse %s as a URI.", fileName ) ); return Optional.empty(); } } public NamedClusterServiceLocator getNamedClusterServiceLocator() { return namedClusterServiceLocator; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/NullableValuesEnum.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl; /** * Enum with valid list of Nullable values - used for the Nullable combo box *

* Also contains convience methods to get the default value and return a list of values as string to populate combo box */ public enum NullableValuesEnum { YES( "Yes" ), NO( "No" ); private String value; NullableValuesEnum( String value ) { this.value = value; } public String getValue() { return value; } public static NullableValuesEnum getDefaultValue() { return NullableValuesEnum.YES; } public static String[] getValuesArr() { String[] valueArr = new String[ NullableValuesEnum.values().length ]; int i = 0; for ( NullableValuesEnum nullValueEnum : NullableValuesEnum.values() ) { valueArr[ i++ ] = nullValueEnum.getValue(); } return valueArr; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/BaseOrcStepDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc; import org.eclipse.jface.window.DefaultToolTip; import org.eclipse.jface.window.ToolTip; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.MouseTrackAdapter; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.ui.core.ConstUI; import org.pentaho.di.ui.core.events.dialog.SelectionAdapterFileDialogTextVar; import org.pentaho.di.ui.core.events.dialog.SelectionAdapterOptions; import org.pentaho.di.ui.core.events.dialog.SelectionOperation; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.step.BaseStepDialog; public abstract class BaseOrcStepDialog extends BaseStepDialog implements StepDialogInterface { protected final Class PKG = getClass(); protected static final Class BPKG = BaseOrcStepDialog.class; protected T meta; protected ModifyListener lsMod; public static final int MARGIN = 15; public static final int FIELDS_SEP = 10; public static final int FIELD_LABEL_SEP = 5; public static final int FIELD_SMALL = 150; public static final int FIELD_MEDIUM = 250; public static final int FIELD_LARGE = 350; private static final String ELLIPSIS = "..."; private static final int TABLE_ITEM_MARGIN = 2; private static final int TOOLTIP_SHOW_DELAY = 350; private static final int TOOLTIP_HIDE_DELAY = 2000; // width of the icon in a varfield protected static final int VAR_EXTRA_WIDTH = GUIResource.getInstance().getImageVariable().getBounds().width; protected TextVar wPath; protected Button wbBrowse; public BaseOrcStepDialog( Shell parent, T in, TransMeta transMeta, String sname ) { super( parent, (BaseStepMeta) in, transMeta, sname ); meta = in; } @Override public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE ); props.setLook( shell ); setShellImage( shell, meta ); lsMod = e -> meta.setChanged(); changed = meta.hasChanged(); createUI(); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { @Override public void shellClosed( ShellEvent e ) { cancel(); } } ); int height = Math.max( getMinHeight( shell, getWidth() ), getHeight() ); shell.setMinimumSize( getWidth(), height ); shell.setSize( getWidth(), height ); getData( meta ); shell.open(); wStepname.setFocus(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } protected abstract void createUI(); protected Control createFooter( Composite shell ) { wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( getMsg( "System.Button.Cancel" ) ); wCancel.addListener( SWT.Selection, lsCancel ); new FD( wCancel ).right( 100, 0 ).bottom( 100, 0 ).apply(); // Some buttons wOK = new Button( shell, SWT.PUSH ); wOK.setText( getMsg( "System.Button.OK" ) ); wOK.addListener( SWT.Selection, lsOK ); new FD( wOK ).right( wCancel, -FIELD_LABEL_SEP ).bottom( 100, 0 ).apply(); lsPreview = getPreview(); if ( lsPreview != null ) { wPreview = new Button( shell, SWT.PUSH ); wPreview.setText( getBaseMsg( "BaseStepDialog.Preview" ) ); wPreview.pack(); wPreview.addListener( SWT.Selection, lsPreview ); int offset = wPreview.getBounds().width / 2; new FD( wPreview ).left( 50, -offset ).bottom( 100, 0 ).apply(); } return wCancel; } protected void cancel() { stepname = null; meta.setChanged( changed ); dispose(); } protected void ok() { if ( Utils.isEmpty( wStepname.getText() ) ) { return; } stepname = wStepname.getText(); getInfo( meta, false ); dispose(); } protected abstract String getStepTitle(); /** * Read the data from the meta object and show it in this dialog. * * @param meta The meta object to obtain the data from. */ protected abstract void getData( T meta ); /** * Fill meta object from UI options. * * @param meta meta object * @param preview flag for preview or real options should be used. Currently, only one option is differ for preview - EOL * chars. It uses as "mixed" for be able to preview any file. */ protected abstract void getInfo( T meta, boolean preview ); protected abstract int getWidth(); protected abstract int getHeight(); protected abstract Listener getPreview(); protected Label createHeader() { // main form FormLayout formLayout = new FormLayout(); formLayout.marginWidth = 15; formLayout.marginHeight = 15; shell.setLayout( formLayout ); // title shell.setText( getStepTitle() ); // buttons lsOK = e -> ok(); lsCancel = e -> cancel(); // Stepname label wlStepname = new Label( shell, SWT.RIGHT ); wlStepname.setText( getBaseMsg( "BaseStepDialog.StepName" ) ); props.setLook( wlStepname ); new FD( wlStepname ).left( 0, 0 ).top( 0, 0 ).apply(); // Stepname field wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wStepname.setText( stepname ); props.setLook( wStepname ); wStepname.addModifyListener( lsMod ); new FD( wStepname ).left( 0, 0 ).top( wlStepname, FIELD_LABEL_SEP ).width( FIELD_MEDIUM ).rright().apply(); // separator Label separator = new Label( shell, SWT.HORIZONTAL | SWT.SEPARATOR ); FormData fdSpacer = new FormData(); fdSpacer.height = 2; fdSpacer.left = new FormAttachment( 0, 0 ); fdSpacer.top = new FormAttachment( wStepname, 15 ); fdSpacer.right = new FormAttachment( 100, 0 ); separator.setLayoutData( fdSpacer ); addIcon(); return separator; } protected void addIcon() { Label wicon = new Label( shell, SWT.RIGHT ); String stepId = meta.getParentStepMeta().getStepID(); wicon.setImage( GUIResource.getInstance().getImagesSteps().get( stepId ).getAsBitmapForSize( shell.getDisplay(), ConstUI.LARGE_ICON_SIZE, ConstUI.LARGE_ICON_SIZE ) ); FormData fdlicon = new FormData(); fdlicon.top = new FormAttachment( 0, 0 ); fdlicon.right = new FormAttachment( 100, 0 ); wicon.setLayoutData( fdlicon ); props.setLook( wicon ); } protected Control addFileWidgets( Control prev ) { Label wlPath = new Label( shell, SWT.RIGHT ); wlPath.setText( getBaseMsg( "OrcDialog.Filename.Label" ) ); props.setLook( wlPath ); new FD( wlPath ).left( 0, 0 ).top( prev, MARGIN ).apply(); wPath = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wPath.addModifyListener( event -> { if ( wPreview != null ) { wPreview.setEnabled( !Utils.isEmpty( wPath.getText() ) ); } } ); props.setLook( wPath ); wPath.addModifyListener( lsMod ); new FD( wPath ).left( 0, 0 ).top( wlPath, FIELD_LABEL_SEP ).width( FIELD_LARGE + VAR_EXTRA_WIDTH ).rright().apply(); wbBrowse = new Button( shell, SWT.PUSH ); props.setLook( wbBrowse ); wbBrowse.setText( getMsg( "System.Button.Browse" ) ); wbBrowse.addSelectionListener( new SelectionAdapterFileDialogTextVar( log, wPath, transMeta, new SelectionAdapterOptions( transMeta.getBowl(), selectionOperation() ) ) ); int bOffset = ( wbBrowse.computeSize( SWT.DEFAULT, SWT.DEFAULT, false ).y - wPath.computeSize( SWT.DEFAULT, SWT.DEFAULT, false ).y ) / 2; new FD( wbBrowse ).left( wPath, FIELD_LABEL_SEP ).top( wlPath, FIELD_LABEL_SEP - bOffset ).apply(); return wPath; } protected abstract SelectionOperation selectionOperation(); protected String getBaseMsg( String key ) { return BaseMessages.getString( BPKG, key ); } protected String getMsg( String key ) { return BaseMessages.getString( PKG, key ); } /** * Class for apply layout settings to SWT controls. */ protected class FD { private final Control control; private final FormData fd; public FD( Control control ) { this.control = control; props.setLook( control ); fd = new FormData(); } public FD width( int width ) { fd.width = width; return this; } public FD height( int height ) { fd.height = height; return this; } public FD top( int numerator, int offset ) { fd.top = new FormAttachment( numerator, offset ); return this; } public FD top( Control control, int offset ) { fd.top = new FormAttachment( control, offset ); return this; } public FD bottom( int numerator, int offset ) { fd.bottom = new FormAttachment( numerator, offset ); return this; } public FD bottom( Control control, int offset ) { fd.bottom = new FormAttachment( control, offset ); return this; } public FD left( int numerator, int offset ) { fd.left = new FormAttachment( numerator, offset ); return this; } public FD left( int numerator ) { return left( numerator, 0 ); } public FD left( Control control, int offset ) { fd.left = new FormAttachment( control, offset ); return this; } public FD right( int numerator, int offset ) { fd.right = new FormAttachment( numerator, offset ); return this; } public FD rright() { fd.right = new FormAttachment( 100, -getControlOffset( control, fd.width ) ); return this; } public FD right( Control control, int offset ) { fd.right = new FormAttachment( control, offset ); return this; } public void apply() { control.setLayoutData( fd ); } } protected int getMinHeight( Composite comp, int minWidth ) { comp.pack(); return comp.computeSize( minWidth, SWT.DEFAULT ).y; } protected void setTruncatedColumn( Table table, int targetColumn ) { table.addListener( SWT.EraseItem, event -> { if ( event.index == targetColumn ) { event.detail &= ~SWT.FOREGROUND; } } ); table.addListener( SWT.PaintItem, event -> { TableItem item = (TableItem) event.item; int colIdx = event.index; if ( colIdx == targetColumn ) { String contents = item.getText( colIdx ); if ( Utils.isEmpty( contents ) ) { return; } Point size = event.gc.textExtent( contents ); int targetWidth = item.getBounds( colIdx ).width; int yOffset = Math.max( 0, ( event.height - size.y ) / 2 ); if ( size.x > targetWidth ) { contents = shortenText( event.gc, contents, targetWidth ); } event.gc.drawText( contents, event.x + TABLE_ITEM_MARGIN, event.y + yOffset, true ); } } ); } protected void addColumnTooltip( Table table, int columnIndex ) { final DefaultToolTip toolTip = new DefaultToolTip( table, ToolTip.RECREATE, true ); toolTip.setRespectMonitorBounds( true ); toolTip.setRespectDisplayBounds( true ); toolTip.setPopupDelay( TOOLTIP_SHOW_DELAY ); toolTip.setHideDelay( TOOLTIP_HIDE_DELAY ); toolTip.setShift( new Point( ConstUI.TOOLTIP_OFFSET, ConstUI.TOOLTIP_OFFSET ) ); table.addMouseTrackListener( new MouseTrackAdapter() { @Override public void mouseHover( MouseEvent e ) { Point coord = new Point( e.x, e.y ); TableItem item = table.getItem( coord ); if ( item != null && item.getBounds( columnIndex ).contains( coord ) ) { String contents = item.getText( columnIndex ); if ( !Utils.isEmpty( contents ) ) { toolTip.setText( contents ); toolTip.show( coord ); return; } } toolTip.hide(); } @Override public void mouseExit( MouseEvent e ) { toolTip.hide(); } } ); } protected String shortenText( GC gc, String text, final int targetWidth ) { if ( Utils.isEmpty( text ) ) { return ""; } int textWidth = gc.textExtent( text ).x; int extra = gc.textExtent( ELLIPSIS ).x + 2 * TABLE_ITEM_MARGIN; if ( targetWidth <= extra || textWidth <= targetWidth ) { return text; } int len = text.length(); for ( int chomp = 1; chomp < len && textWidth + extra >= targetWidth; chomp++ ) { text = text.substring( 0, text.length() - 1 ); textWidth = gc.textExtent( text ).x; } return text + ELLIPSIS; } private int getControlOffset( Control control, int controlWidth ) { // remaining space for min size match return getWidth() - getMarginWidths( control ) - controlWidth; } private int getMarginWidths( Control control ) { // get the width added by container margins and (wm-specific) decorations int extraWidth = 0; for ( Composite parent = control.getParent(); !parent.equals( getParent() ); parent = parent.getParent() ) { extraWidth += parent.computeTrim( 0, 0, 0, 0 ).width; if ( parent.getLayout() instanceof FormLayout ) { extraWidth += 2 * ( (FormLayout) parent.getLayout() ).marginWidth; } } return extraWidth; } protected void setIntegerOnly( TextVar textVar ) { textVar.getTextWidget().addVerifyListener( e -> { if ( !StringUtil.isEmpty( e.text ) && !StringUtil.isVariable( e.text ) && !StringUtil.IsInteger( e.text ) ) { e.doit = false; } } ); } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/input/OrcInput.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.input; import org.apache.commons.vfs2.FileObject; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.big.data.kettle.plugins.formats.orc.input.OrcInputMetaBase; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.vfs.AliasedFileObject; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.trans.steps.file.BaseFileInputStep; import org.pentaho.di.trans.steps.file.IBaseFileInputReader; import org.pentaho.hadoop.shim.api.format.FormatService; import org.pentaho.hadoop.shim.api.format.IOrcInputField; import org.pentaho.hadoop.shim.api.format.IPentahoOrcInputFormat; import java.util.Arrays; import java.util.List; public class OrcInput extends BaseFileInputStep { public static final long SPLIT_SIZE = 128L * 1024L * 1024L; public OrcInput( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } @Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (OrcInputMeta) smi; data = (OrcInputData) sdi; try { if ( data.input == null || data.reader == null || data.rowIterator == null ) { FormatService formatService = getFormatService(); if ( meta.inputFiles == null || meta.getFilename() == null || meta.getFilename().length() == 0 ) { throw new KettleException( "No input files defined" ); } data.input = formatService.createInputFormat( IPentahoOrcInputFormat.class, getNamedCluster() ); String inputFileName = getKettleVFSFileName( getTransMeta().getBowl(), meta.getParentStepMeta().getParentTransMeta().environmentSubstitute( meta.getFilename() ) ); data.input.setInputFile( inputFileName ); data.input.setSchema( createSchemaFromMeta( meta ) ); data.reader = data.input.createRecordReader( null ); data.rowIterator = data.reader.iterator(); } if ( data.rowIterator.hasNext() ) { RowMetaAndData row = data.rowIterator.next(); putRow( row.getRowMeta(), row.getData() ); return true; } else { data.reader.close(); data.reader = null; data.input = null; setOutputDone(); return false; } } catch ( KettleException ex ) { throw ex; } catch ( Exception ex ) { throw new KettleException( ex ); } } private NamedCluster getNamedCluster() { return meta.getNamedClusterResolver().resolveNamedCluster( environmentSubstitute( meta.getFilename() ) ); } private FormatService getFormatService() throws KettleException { FormatService formatService; try { formatService = meta.getNamedClusterResolver().getNamedClusterServiceLocator() .getService( getNamedCluster(), FormatService.class ); } catch ( ClusterInitializationException e ) { throw new KettleException( "can't get service format shim ", e ); } return formatService; } @Override protected boolean init() { return true; } @Override protected IBaseFileInputReader createReader( OrcInputMeta meta, OrcInputData data, FileObject file ) throws Exception { return null; } public static List retrieveSchema( Bowl bowl, NamedClusterServiceLocator namedClusterServiceLocator, NamedCluster namedCluster, String dataPath ) throws Exception { FormatService formatService = namedClusterServiceLocator.getService( namedCluster, FormatService.class ); IPentahoOrcInputFormat in = formatService.createInputFormat( IPentahoOrcInputFormat.class, namedCluster ); in.setInputFile( getKettleVFSFileName( bowl, dataPath ) ); return in.readSchema(); } public static List createSchemaFromMeta( OrcInputMetaBase meta ) { return Arrays.asList( meta.getInputFields() ); } public static String getKettleVFSFileName( Bowl bowl, String path ) throws KettleFileException { String inputFileName = path; FileObject inputFileObject = KettleVFS.getInstance( bowl ).getFileObject( path ); if ( AliasedFileObject.isAliasedFile( inputFileObject ) ) { inputFileName = ( (AliasedFileObject) inputFileObject ).getOriginalURIString(); } return inputFileName; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/input/OrcInputData.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.input; import java.util.Iterator; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.trans.steps.file.BaseFileInputStepData; import org.pentaho.hadoop.shim.api.format.IPentahoInputFormat.IPentahoRecordReader; import org.pentaho.hadoop.shim.api.format.IPentahoOrcInputFormat; public class OrcInputData extends BaseFileInputStepData { IPentahoOrcInputFormat input; IPentahoRecordReader reader; Iterator rowIterator; RowMetaInterface outputRowMeta; } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/input/OrcInputDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.input; import org.apache.commons.lang.StringUtils; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.big.data.kettle.plugins.formats.impl.orc.BaseOrcStepDialog; import org.pentaho.big.data.kettle.plugins.formats.orc.OrcInputField; import org.pentaho.di.core.Const; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.TransPreviewFactory; import org.pentaho.di.ui.core.dialog.EnterNumberDialog; import org.pentaho.di.ui.core.dialog.EnterTextDialog; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.dialog.PreviewRowsDialog; import org.pentaho.di.ui.core.events.dialog.SelectionOperation; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.ColumnsResizer; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.trans.dialog.TransPreviewProgressDialog; import org.pentaho.hadoop.shim.api.format.IOrcInputField; import org.pentaho.hadoop.shim.api.format.OrcSpec; import java.util.List; @PluginDialog( id = "OrcInput", image = "OI.svg", pluginType = PluginDialog.PluginType.STEP, documentationUrl = "pdi-transformation-steps-reference-overview/orc-input" ) public class OrcInputDialog extends BaseOrcStepDialog { private static final int SHELL_WIDTH = 526; private static final int SHELL_HEIGHT = 506; private static final int ORC_PATH_COLUMN_INDEX = 1; private static final int FIELD_NAME_COLUMN_INDEX = 2; private static final int FIELD_TYPE_COLUMN_INDEX = 3; private static final int FORMAT_COLUMN_INDEX = 4; private static final int FIELD_SOURCE_TYPE_COLUMN_INDEX = 5; private static final String UNABLE_TO_LOAD_SCHEMA_FROM_CONTAINER_FILE = "OrcInput.Error.UnableToLoadSchemaFromContainerFile"; private TableView wInputFields; private Button wPassThruFields; public OrcInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ) { super( parent, (OrcInputMeta) in, transMeta, sname ); } @Override protected void createUI( ) { Control prev = createHeader(); //main fields prev = addFileWidgets( prev ); createFooter( shell ); Label separator = new Label( shell, SWT.HORIZONTAL | SWT.SEPARATOR ); FormData fdSpacer = new FormData(); fdSpacer.height = 2; fdSpacer.left = new FormAttachment( 0, 0 ); fdSpacer.bottom = new FormAttachment( wCancel, -MARGIN ); fdSpacer.right = new FormAttachment( 100, 0 ); separator.setLayoutData( fdSpacer ); Group fieldsContainer = new Group( shell, SWT.SHADOW_IN ); fieldsContainer.setLayout( new FormLayout() ); fieldsContainer.setText( BaseMessages.getString( PKG, "OrcInputDialog.Fields.Label" ) ); new FD( fieldsContainer ).left( 0, 0 ).top( prev, MARGIN ).right( 100, 0 ).bottom( separator, -MARGIN ).apply(); // Accept fields from previous steps? // wPassThruFields = new Button( fieldsContainer, SWT.CHECK ); wPassThruFields.setText( BaseMessages.getString( PKG, "OrcInputDialog.PassThruFields.Label" ) ); wPassThruFields.setToolTipText( BaseMessages.getString( PKG, "OrcInputDialog.PassThruFields.Tooltip" ) ); wPassThruFields.setOrientation( SWT.LEFT_TO_RIGHT ); props.setLook( wPassThruFields ); new FD( wPassThruFields ).left( 0, MARGIN ).top( 0, MARGIN ).apply(); //get fields button lsGet = e -> populateFieldsTable(); Button wGetFields = new Button( fieldsContainer, SWT.PUSH ); wGetFields.setText( BaseMessages.getString( PKG, "OrcInputDialog.Fields.Get" ) ); props.setLook( wGetFields ); new FD( wGetFields ).bottom( 100, -FIELDS_SEP ).right( 100, -MARGIN ).apply(); wGetFields.addListener( SWT.Selection, lsGet ); // fields table ColumnInfo orcPathColumnInfo = new ColumnInfo( BaseMessages.getString( PKG, "OrcInputDialog.Fields.column.Path" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ); ColumnInfo nameColumnInfo = new ColumnInfo( BaseMessages.getString( PKG, "OrcInputDialog.Fields.column.Name" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ); ColumnInfo typeColumnInfo = new ColumnInfo( BaseMessages.getString( PKG, "OrcInputDialog.Fields.column.Type" ), ColumnInfo.COLUMN_TYPE_CCOMBO, ValueMetaFactory.getValueMetaNames() ); ColumnInfo formatColumnInfo = new ColumnInfo( BaseMessages.getString( PKG, "OrcInputDialog.Fields.column.Format" ), ColumnInfo.COLUMN_TYPE_CCOMBO, Const.getDateFormats() ); ColumnInfo sorceTypeColumnInfo = new ColumnInfo( BaseMessages.getString( PKG, "OrcInputDialog.Fields.column.SourceType" ), ColumnInfo.COLUMN_TYPE_TEXT, ValueMetaFactory.getValueMetaNames(), true ); ColumnInfo[] parameterColumns = new ColumnInfo[] { orcPathColumnInfo, nameColumnInfo, typeColumnInfo, formatColumnInfo, sorceTypeColumnInfo}; parameterColumns[0].setAutoResize( false ); parameterColumns[1].setUsingVariables( true ); parameterColumns[3].setAutoResize( false ); wInputFields = new TableView( transMeta, fieldsContainer, SWT.FULL_SELECTION | SWT.SINGLE | SWT.BORDER | SWT.NO_SCROLL | SWT.V_SCROLL, parameterColumns, 7, null, props ); ColumnsResizer resizer = new ColumnsResizer( 0, 40, 20, 20, 20, 0 ); wInputFields.getTable().addListener( SWT.Resize, resizer ); props.setLook( wInputFields ); new FD( wInputFields ).left( 0, MARGIN ).right( 100, -MARGIN ).top( wPassThruFields, FIELDS_SEP ) .bottom( wGetFields, -FIELDS_SEP ).apply(); wInputFields.setRowNums(); wInputFields.optWidth( true ); for ( ColumnInfo col : parameterColumns ) { col.setAutoResize( false ); } resizer.addColumnResizeListeners( wInputFields.getTable() ); setTruncatedColumn( wInputFields.getTable(), 1 ); if ( !Const.isWindows() ) { addColumnTooltip( wInputFields.getTable(), 1 ); } } protected void populateFieldsTable() { try { List inputFields = getInputFieldsFromOrcFile( false ); wInputFields.clearAll(); for ( IOrcInputField field : inputFields ) { TableItem item = new TableItem( wInputFields.table, SWT.NONE ); if ( field != null ) { setField( item, concatenateOrcNameAndType( field ), ORC_PATH_COLUMN_INDEX ); setField( item, field.getPentahoFieldName(), FIELD_NAME_COLUMN_INDEX ); setField( item, ValueMetaFactory.getValueMetaName( field.getPentahoType() ), FIELD_TYPE_COLUMN_INDEX ); setField( item, OrcSpec.DataType.getDataType( field.getFormatType() ).getName(), FIELD_SOURCE_TYPE_COLUMN_INDEX ); } } wInputFields.removeEmptyRows(); wInputFields.setRowNums(); wInputFields.optWidth( true ); } catch ( Exception ex ) { logError( BaseMessages.getString( PKG, UNABLE_TO_LOAD_SCHEMA_FROM_CONTAINER_FILE ), ex ); new ErrorDialog( shell, stepname, BaseMessages.getString( PKG, UNABLE_TO_LOAD_SCHEMA_FROM_CONTAINER_FILE, getProcessedFileName() ), ex ); } } private String getProcessedFileName() { return transMeta.environmentSubstitute( wPath.getText() ); } private List getInputFieldsFromOrcFile( boolean failQuietly ) { String orcFileName = getProcessedFileName(); List inputFields = null; try { inputFields = OrcInput.retrieveSchema( transMeta.getBowl(), meta.getNamedClusterResolver().getNamedClusterServiceLocator(), meta.getNamedClusterResolver().resolveNamedCluster( orcFileName ), orcFileName ); } catch ( Exception ex ) { if ( !failQuietly ) { logError( BaseMessages.getString( PKG, UNABLE_TO_LOAD_SCHEMA_FROM_CONTAINER_FILE ), ex ); new ErrorDialog( shell, stepname, BaseMessages.getString( PKG, UNABLE_TO_LOAD_SCHEMA_FROM_CONTAINER_FILE, orcFileName ), ex ); } } return inputFields; } private void setField( TableItem item, String fieldValue, int fieldIndex ) { if ( !Utils.isEmpty( fieldValue ) ) { item.setText( fieldIndex, fieldValue ); } } /** * Read the data from the meta object and show it in this dialog. */ @Override protected void getData( OrcInputMeta meta ) { if ( meta.getFilename() != null && meta.getFilename().length() > 0 ) { wPath.setText( meta.getFilename() ); } wPassThruFields.setSelection( meta.inputFiles.passingThruFields ); int itemIndex = 0; for ( IOrcInputField inputField : meta.getInputFields() ) { TableItem item = null; if ( itemIndex < wInputFields.table.getItemCount() ) { item = wInputFields.table.getItem( itemIndex ); } else { item = new TableItem( wInputFields.table, SWT.NONE ); } if ( inputField.getFormatFieldName() != null ) { item.setText( ORC_PATH_COLUMN_INDEX, concatenateOrcNameAndType( inputField ) ); } if ( inputField.getPentahoFieldName() != null ) { item.setText( FIELD_NAME_COLUMN_INDEX, inputField.getPentahoFieldName() ); } if ( getTypeDesc( inputField.getPentahoType() ) != null ) { item.setText( FIELD_TYPE_COLUMN_INDEX, getTypeDesc( inputField.getPentahoType() ) ); } if ( getSourceTypeDesc( inputField.getFormatType() ) != null ) { item.setText( FIELD_SOURCE_TYPE_COLUMN_INDEX, getSourceTypeDesc( inputField.getFormatType() ) ); } if ( inputField.getStringFormat() != null ) { item.setText( FORMAT_COLUMN_INDEX, inputField.getStringFormat() ); } else { item.setText( FORMAT_COLUMN_INDEX, "" ); } itemIndex++; } } public String getTypeDesc( int type ) { return ValueMetaFactory.getValueMetaName( type ); } public String getSourceTypeDesc( int type ) { return OrcSpec.DataType.getDataType( type ).getName(); } /** * Fill meta object from UI options. */ @Override protected void getInfo( OrcInputMeta meta, boolean preview ) { String filePath = wPath.getText(); if ( filePath != null && !filePath.isEmpty() ) { meta.allocateFiles( 1 ); meta.setFilename( wPath.getText().trim() ); } meta.inputFiles.passingThruFields = wPassThruFields.getSelection(); List actualOrcFileInputFields = getInputFieldsFromOrcFile( true ); int nrFields = wInputFields.nrNonEmpty(); meta.setInputFields( new OrcInputField[nrFields] ); for ( int i = 0; i < nrFields; i++ ) { TableItem item = wInputFields.getNonEmpty( i ); OrcInputField field = new OrcInputField(); field.setFormatFieldName( extractFieldName( item.getText( ORC_PATH_COLUMN_INDEX ) ) ); if ( actualOrcFileInputFields != null ) { IOrcInputField actualOrcField = actualOrcFileInputFields.stream() .filter( x -> field.getFormatFieldName().equals( x.getFormatFieldName() ) ) .findFirst( ).orElse( null ); if ( actualOrcField != null ) { field.setFormatType( actualOrcField.getFormatType() ); } else { field.setFormatType( extractOrcType( item.getText( FIELD_SOURCE_TYPE_COLUMN_INDEX ) ).getId() ); item.setText( concatenateOrcNameAndType( field ) ); } } field.setPentahoFieldName( item.getText( FIELD_NAME_COLUMN_INDEX ) ); field.setPentahoType( ValueMetaFactory.getIdForValueMeta( item.getText( FIELD_TYPE_COLUMN_INDEX ) ) ); field.setStringFormat( item.getText( FORMAT_COLUMN_INDEX ) ); meta.getInputFields()[ i ] = field; } } /** * When all else fails, extract he orc type from the field description. * * @see #concatenateOrcNameAndType(IOrcInputField) */ private OrcSpec.DataType extractOrcType( String orcNameTypeFromUI ) { if ( orcNameTypeFromUI != null ) { String uiType = StringUtils.substringBetween( orcNameTypeFromUI, "(", ")" ); if ( uiType != null ) { String uiTypeTrimmed = uiType.trim(); for ( OrcSpec.DataType temp : OrcSpec.DataType.values() ) { if ( temp.getName().equalsIgnoreCase( uiTypeTrimmed ) ) { return temp; } } } } return null; } /** * Get the field name from the UI path column * * @see #concatenateOrcNameAndType(IOrcInputField) */ private String extractFieldName( String orcNameTypeFromUI ) { if ( orcNameTypeFromUI != null ) { return StringUtils.substringBefore( orcNameTypeFromUI, "(" ).trim(); } return orcNameTypeFromUI; } /** * this method must be changed only with change {@link #extractOrcType(String)} * since this method converts the field for show user and the extract methods myst convert to internal format */ private String concatenateOrcNameAndType( IOrcInputField field ) { String typeName; OrcSpec.DataType orcDataType = OrcSpec.DataType.getDataType( field.getFormatType() ); if ( orcDataType == null ) { typeName = "unknown"; } else { typeName = OrcSpec.DataType.getDataType( field.getFormatType() ).getName(); } return field.getFormatFieldName() + " (" + typeName + ")"; } private void doPreview() { getInfo( meta, true ); TransMeta previewMeta = TransPreviewFactory.generatePreviewTransformation( transMeta, meta, wStepname.getText() ); transMeta.getVariable( "Internal.Transformation.Filename.Directory" ); previewMeta.getVariable( "Internal.Transformation.Filename.Directory" ); EnterNumberDialog numberDialog = new EnterNumberDialog( shell, props.getDefaultPreviewSize(), BaseMessages.getString( PKG, "OrcInputDialog.PreviewSize.DialogTitle" ), BaseMessages.getString( PKG, "OrcInputDialog.PreviewSize.DialogMessage" ) ); int previewSize = numberDialog.open(); if ( previewSize > 0 ) { TransPreviewProgressDialog progressDialog = new TransPreviewProgressDialog( shell, previewMeta, new String[] { wStepname.getText() }, new int[] { previewSize } ); progressDialog.open(); Trans trans = progressDialog.getTrans(); String loggingText = progressDialog.getLoggingText(); if ( !progressDialog.isCancelled() && trans.getResult() != null && trans.getResult().getNrErrors() > 0 ) { EnterTextDialog etd = new EnterTextDialog( shell, BaseMessages.getString( PKG, "System.Dialog.PreviewError.Title" ), BaseMessages.getString( PKG, "System.Dialog.PreviewError.Message" ), loggingText, true ); etd.setReadOnly(); etd.open(); } PreviewRowsDialog prd = new PreviewRowsDialog( shell, transMeta, SWT.NONE, wStepname.getText(), progressDialog .getPreviewRowsMeta( wStepname.getText() ), progressDialog.getPreviewRows( wStepname.getText() ), loggingText ); prd.open(); } } @Override protected int getWidth() { return SHELL_WIDTH; } @Override protected int getHeight() { return SHELL_HEIGHT; } @Override protected String getStepTitle() { return BaseMessages.getString( PKG, "OrcInputDialog.Shell.Title" ); } @Override protected Listener getPreview() { return e -> doPreview(); } @Override protected SelectionOperation selectionOperation() { return SelectionOperation.FILE_OR_FOLDER; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/input/OrcInputMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.input; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.big.data.kettle.plugins.formats.orc.input.OrcInputMetaBase; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; //keep ID as new because we will have old step with ID OrcInput @Step( id = "OrcInput", image = "OI.svg", name = "OrcInput.Name", description = "OrcInput.Description", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", i18nPackageName = "org.pentaho.di.trans.steps.orc" ) @InjectionSupported( localizationPrefix = "OrcInput.Injection.", groups = { "FILENAME_LINES", "FIELDS" }, hide = { "FILEMASK", "EXCLUDE_FILEMASK", "FILE_REQUIRED", "INCLUDE_SUBFOLDERS", "FIELD_POSITION", "FIELD_LENGTH", "FIELD_IGNORE", "FIELD_FORMAT", "FIELD_PRECISION", "FIELD_CURRENCY", "FIELD_DECIMAL", "FIELD_GROUP", "FIELD_REPEAT", "FIELD_TRIM_TYPE", "FIELD_NULL_STRING", "FIELD_IF_NULL", "FIELD_NULLABLE", "ACCEPT_FILE_NAMES", "ACCEPT_FILE_STEP", "PASS_THROUGH_FIELDS", "ACCEPT_FILE_FIELD", "ADD_FILES_TO_RESULT", "IGNORE_ERRORS", "FILE_ERROR_FIELD", "FILE_ERROR_MESSAGE_FIELD", "SKIP_BAD_FILES", "WARNING_FILES_TARGET_DIR", "WARNING_FILES_EXTENTION", "ERROR_FILES_TARGET_DIR", "ERROR_FILES_EXTENTION", "LINE_NR_FILES_TARGET_DIR", "LINE_NR_FILES_EXTENTION", "FILE_SHORT_FILE_FIELDNAME", "FILE_EXTENSION_FIELDNAME", "FILE_PATH_FIELDNAME", "FILE_SIZE_FIELDNAME", "FILE_HIDDEN_FIELDNAME", "FILE_LAST_MODIFICATION_FIELDNAME", "FILE_URI_FIELDNAME", "FILE_ROOT_URI_FIELDNAME", "FIELD_SOURCE_TYPE" } ) public class OrcInputMeta extends OrcInputMetaBase { private final NamedClusterResolver namedClusterResolver; public OrcInputMeta() { this( NamedClusterResolver.getInstance() ); } public OrcInputMeta( NamedClusterResolver namedClusterResolver ) { this.namedClusterResolver = namedClusterResolver; } @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return new OrcInput( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } @Override public StepDataInterface getStepData() { return new OrcInputData(); } public NamedClusterResolver getNamedClusterResolver() { return namedClusterResolver; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/output/OrcOutput.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.output; import org.apache.orc.CompressionKind; import org.pentaho.big.data.kettle.plugins.formats.impl.output.PvfsFileAliaser; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.hadoop.shim.api.format.FormatService; import org.pentaho.hadoop.shim.api.format.IPentahoOrcOutputFormat; import java.io.IOException; public class OrcOutput extends BaseStep implements StepInterface { private OrcOutputMeta meta; private OrcOutputData data; private PvfsFileAliaser pvfsFileAliaser; public OrcOutput( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } @Override public synchronized boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { try { meta = (OrcOutputMeta) smi; data = (OrcOutputData) sdi; if ( data.output == null ) { init(); } Object[] currentRow = getRow(); if ( currentRow != null ) { //create new outputMeta RowMetaInterface outputRMI = new RowMeta(); //create data equals with output fileds Object[] outputData = new Object[ meta.getOutputFields().size() ]; for ( int i = 0; i < meta.getOutputFields().size(); i++ ) { int inputRowIndex = getInputRowMeta().indexOfValue( meta.getOutputFields().get( i ).getPentahoFieldName() ); if ( inputRowIndex == -1 ) { throw new KettleException( "Field name [" + meta.getOutputFields().get( i ).getPentahoFieldName() + " ] couldn't be found in the input stream!" ); } else { ValueMetaInterface vmi = ValueMetaFactory.cloneValueMeta( getInputRowMeta().getValueMeta( inputRowIndex ) ); //add output value meta according output fields outputRMI.addValueMeta( i, vmi ); //add output data according output fields outputData[ i ] = currentRow[ inputRowIndex ]; } } RowMetaAndData row = new RowMetaAndData( outputRMI, outputData ); data.writer.write( row ); putRow( row.getRowMeta(), row.getData() ); return true; } else { // no more input to be expected... closeWriter(); pvfsFileAliaser.copyFileToFinalDestination(); pvfsFileAliaser.deleteTempFileAndFolder(); setOutputDone(); return false; } } catch ( IllegalStateException e ) { getLogChannel().logError( e.getMessage() ); setErrors( 1 ); pvfsFileAliaser.deleteTempFileAndFolder(); setOutputDone(); return false; } catch ( KettleException ex ) { throw ex; } catch ( Exception ex ) { throw new KettleException( ex ); } } public void init() throws Exception { FormatService formatService; try { formatService = meta.getNamedClusterResolver().getNamedClusterServiceLocator() .getService( getNamedCluster(), FormatService.class ); } catch ( ClusterInitializationException e ) { throw new KettleException( "can't get service format shim ", e ); } if ( meta.getFilename() == null ) { throw new KettleException( "No output files defined" ); } data.output = formatService.createOutputFormat( IPentahoOrcOutputFormat.class, getNamedCluster() ); String outputFileName = environmentSubstitute( meta.constructOutputFilename() ); pvfsFileAliaser = new PvfsFileAliaser( getTransMeta().getBowl(), outputFileName, getTransMeta(), data.output, meta.isOverrideOutput(), getLogChannel() ); data.output.setOutputFile( pvfsFileAliaser.generateAlias(), meta.isOverrideOutput() ); data.output.setFields( meta.getOutputFields() ); CompressionKind compression; try { compression = CompressionKind.valueOf( meta.getCompressionType().toUpperCase() ); } catch ( Exception ex ) { compression = CompressionKind.NONE; } data.output.setCompression( compression ); if ( compression != CompressionKind.NONE ) { data.output.setCompressSize( meta.getCompressSize() ); } data.output.setRowIndexStride( meta.getRowsBetweenEntries() ); data.output.setStripeSize( meta.getStripeSize() ); data.writer = data.output.createRecordWriter(); } private NamedCluster getNamedCluster() { return meta.getNamedClusterResolver().resolveNamedCluster( environmentSubstitute( meta.getFilename() ) ); } public void closeWriter() throws KettleException { try { data.writer.close(); } catch ( IOException e ) { throw new KettleException( e ); } data.output = null; } public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (OrcOutputMeta) smi; data = (OrcOutputData) sdi; return super.init( smi, sdi ); } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/output/OrcOutputData.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.output; import org.pentaho.di.trans.step.BaseStepData; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.hadoop.shim.api.format.IPentahoOrcOutputFormat; import org.pentaho.hadoop.shim.api.format.IPentahoOutputFormat.IPentahoRecordWriter; public class OrcOutputData extends BaseStepData implements StepDataInterface { public IPentahoOrcOutputFormat output; public IPentahoRecordWriter writer; } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/output/OrcOutputDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.output; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.big.data.kettle.plugins.formats.impl.NullableValuesEnum; import org.pentaho.big.data.kettle.plugins.formats.impl.orc.BaseOrcStepDialog; import org.pentaho.big.data.kettle.plugins.formats.orc.OrcTypeConverter; import org.pentaho.big.data.kettle.plugins.formats.orc.output.OrcOutputField; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.events.dialog.SelectionOperation; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.ColumnsResizer; import org.pentaho.di.ui.core.widget.ComboVar; import org.pentaho.di.ui.core.widget.TableView; import org.eclipse.swt.widgets.Table; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.step.TableItemInsertListener; import org.pentaho.hadoop.shim.api.format.OrcSpec; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.function.BiConsumer; @PluginDialog( id = "OrcOutput", image = "OO.svg", pluginType = PluginDialog.PluginType.STEP, documentationUrl = "pdi-transformation-steps-reference-overview/orc-output" ) public class OrcOutputDialog extends BaseOrcStepDialog implements StepDialogInterface { private static final Class PKG = OrcOutputMeta.class; private static final int SHELL_WIDTH = 698; private static final int SHELL_HEIGHT = 554; private ComboVar wCompression; private TextVar wStripeSize; private TextVar wCompressSize; private Button wInlineIndexes; private TextVar wRowsBetweenEntries; private Button wDateInFileName; private Button wTimeInFileName; private Button wOverwriteExistingFile; private Button wSpecifyDateTimeFormat; private ComboVar wDateTimeFormat; private int startingRowsBetweenEntries = OrcOutputMeta.DEFAULT_ROWS_BETWEEN_ENTRIES; private TableView wOutputFields; public OrcOutputDialog( Shell parent, Object orcOutputMeta, TransMeta transMeta, String sname ) { this( parent, (OrcOutputMeta) orcOutputMeta, transMeta, sname ); } public OrcOutputDialog( Shell parent, OrcOutputMeta orcOutputMeta, TransMeta transMeta, String sname ) { super( parent, orcOutputMeta, transMeta, sname ); this.meta = orcOutputMeta; } @Override protected void createUI() { Control prev = createHeader(); //main fields prev = addFileWidgets( prev ); createFooter( shell ); Label separator = new Label( shell, SWT.HORIZONTAL | SWT.SEPARATOR ); FormData fdSpacer = new FormData(); fdSpacer.height = 2; fdSpacer.left = new FormAttachment( 0, 0 ); fdSpacer.bottom = new FormAttachment( wCancel, -MARGIN ); fdSpacer.right = new FormAttachment( 100, 0 ); separator.setLayoutData( fdSpacer ); Composite tabContainer; tabContainer = new Composite( shell, SWT.NONE ); tabContainer.setLayout( new FormLayout() ); new FD( tabContainer ).left( 0, 0 ).top( prev, 0 ).right( 100, 0 ).bottom( separator, -MARGIN ).apply(); wOverwriteExistingFile = new Button( tabContainer, SWT.CHECK ); wOverwriteExistingFile.setText( BaseMessages.getString( PKG, "OrcOutputDialog.OverwriteFile.Label" ) ); props.setLook( wOverwriteExistingFile ); new FD( wOverwriteExistingFile ).left( 0, 0 ).top( tabContainer, FIELDS_SEP ).apply(); wOverwriteExistingFile.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { meta.setChanged(); } } ); CTabFolder wTabFolder = new CTabFolder( tabContainer, SWT.BORDER ); props.setLook( wTabFolder, Props.WIDGET_STYLE_TAB ); wTabFolder.setSimple( false ); addFieldsTab( wTabFolder ); addOptionsTab( wTabFolder ); new FD( wTabFolder ).left( 0, 0 ).top( wOverwriteExistingFile, MARGIN ).right( 100, 0 ).bottom( 100, 0 ).apply(); wTabFolder.setSelection( 0 ); } @Override protected String getStepTitle() { return BaseMessages.getString( PKG, "OrcOutputDialog.Shell.Title" ); } private void addFieldsTab( CTabFolder wTabFolder ) { CTabItem wTab = new CTabItem( wTabFolder, SWT.NONE ); wTab.setText( BaseMessages.getString( PKG, "OrcOutputDialog.FieldsTab.TabTitle" ) ); Composite wComp = new Composite( wTabFolder, SWT.NONE ); props.setLook( wComp ); FormLayout layout = new FormLayout(); layout.marginWidth = MARGIN; layout.marginHeight = MARGIN; wComp.setLayout( layout ); lsGet = e -> getFields(); Button wGetFields = new Button( wComp, SWT.PUSH ); wGetFields.setText( BaseMessages.getString( PKG, "OrcOutputDialog.Fields.Get" ) ); props.setLook( wGetFields ); new FD( wGetFields ).bottom( 100, 0 ).right( 100, 0 ).apply(); wGetFields.addListener( SWT.Selection, lsGet ); ColumnInfo[] parameterColumns = new ColumnInfo[]{ new ColumnInfo( BaseMessages.getString( PKG, "OrcOutputDialog.Fields.column.Path" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ), new ColumnInfo( BaseMessages.getString( PKG, "OrcOutputDialog.Fields.column.Name" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ), new ColumnInfo( BaseMessages.getString( PKG, "OrcOutputDialog.Fields.column.Type" ), ColumnInfo.COLUMN_TYPE_CCOMBO, OrcSpec.DataType.getDisplayableTypeNames() ), new ColumnInfo( BaseMessages.getString( PKG, "OrcOutputDialog.Fields.column.Precision" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ), new ColumnInfo( BaseMessages.getString( PKG, "OrcOutputDialog.Fields.column.Scale" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ), new ColumnInfo( BaseMessages.getString( PKG, "OrcOutputDialog.Fields.column.Default" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ), new ColumnInfo( BaseMessages.getString( PKG, "OrcOutputDialog.Fields.column.Null" ), ColumnInfo.COLUMN_TYPE_CCOMBO, NullableValuesEnum.getValuesArr(), true )}; parameterColumns[0].setAutoResize( false ); parameterColumns[1].setUsingVariables( true ); wOutputFields = new TableView( transMeta, wComp, SWT.FULL_SELECTION | SWT.SINGLE | SWT.BORDER | SWT.NO_SCROLL | SWT.V_SCROLL, parameterColumns, 7, lsMod, props ); ColumnsResizer resizer = new ColumnsResizer( 0, 30, 20, 10, 10, 10, 15, 5 ); wOutputFields.getTable().addListener( SWT.Resize, resizer ); props.setLook( wOutputFields ); new FD( wOutputFields ).left( 0, 0 ).right( 100, 0 ).top( wComp, 0 ).bottom( wGetFields, -FIELDS_SEP ).apply(); wOutputFields.setRowNums(); wOutputFields.optWidth( true ); new FD( wComp ).left( 0, 0 ).top( 0, 0 ).right( 100, 0 ).bottom( 100, 0 ).apply(); wTab.setControl( wComp ); for ( ColumnInfo col : parameterColumns ) { col.setAutoResize( false ); } resizer.addColumnResizeListeners( wOutputFields.getTable() ); setTruncatedColumn( wOutputFields.getTable(), 1 ); if ( !Const.isWindows() ) { addColumnTooltip( wOutputFields.getTable(), 1 ); } } private void addOptionsTab( CTabFolder wTabFolder ) { CTabItem wTab = new CTabItem( wTabFolder, SWT.NONE ); wTab.setText( BaseMessages.getString( PKG, "OrcOutputDialog.Options.TabTitle" ) ); Composite wGrid = new Composite( wTabFolder, SWT.NONE ); wTab.setControl( wGrid ); props.setLook( wGrid ); FormLayout formLayout = new FormLayout(); formLayout.marginHeight = MARGIN; formLayout.marginWidth = MARGIN; wGrid.setLayout( formLayout ); Label wLabel = createLabel( wGrid, "OrcOutputDialog.Options.Compression" ); FormData formData = new FormData(); formData.top = new FormAttachment( 0, 0 ); wLabel.setLayoutData( formData ); wCompression = createComboVar( wGrid, meta.getCompressionTypes() ); formData = new FormData(); formData.top = new FormAttachment( wLabel, 5 ); formData.width = FIELD_SMALL + VAR_EXTRA_WIDTH; wCompression.setLayoutData( formData ); props.setLook( wCompression ); wLabel = createLabel( wGrid, "OrcOutputDialog.Options.StripeSize" ); formData = new FormData(); formData.top = new FormAttachment( wCompression, 10 ); wLabel.setLayoutData( formData ); wStripeSize = new TextVar( transMeta, wGrid, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wStripeSize ); formData = new FormData(); formData.top = new FormAttachment( wLabel, 5 ); formData.width = FIELD_SMALL + VAR_EXTRA_WIDTH; wStripeSize.setLayoutData( formData ); setIntegerOnly( wStripeSize ); wStripeSize.addModifyListener( lsMod ); wLabel = createLabel( wGrid, "OrcOutputDialog.Options.CompressSize" ); formData = new FormData(); formData.top = new FormAttachment( wStripeSize, 10 ); wLabel.setLayoutData( formData ); wCompressSize = new TextVar( transMeta, wGrid, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wCompressSize ); formData = new FormData(); formData.top = new FormAttachment( wLabel, 5 ); formData.width = FIELD_SMALL + VAR_EXTRA_WIDTH; wCompressSize.setLayoutData( formData ); wCompressSize.getTextWidget().addModifyListener( lsMod ); setIntegerOnly( wCompressSize ); wCompressSize.addModifyListener( lsMod ); wInlineIndexes = new Button( wGrid, SWT.CHECK ); props.setLook( wInlineIndexes ); wInlineIndexes.setText( BaseMessages.getString( PKG, "OrcOutputDialog.Options.InlineIndexes" ) ); formData = new FormData(); formData.top = new FormAttachment( 0, 0 ); formData.left = new FormAttachment( wCompressSize, 50 ); wInlineIndexes.setLayoutData( formData ); wInlineIndexes.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { meta.setChanged(); boolean isSelected = wInlineIndexes.getSelection(); if ( isSelected ) { wRowsBetweenEntries.setEnabled( true ); wRowsBetweenEntries.setText( Integer.toString( startingRowsBetweenEntries ) ); } else { wRowsBetweenEntries.setEnabled( false ); wRowsBetweenEntries.setText( "" ); } } } ); wLabel = createLabel( wGrid, "OrcOutputDialog.Options.RowsBetweenEntries" ); formData = new FormData(); formData.top = new FormAttachment( wInlineIndexes, 10 ); formData.left = new FormAttachment( wCompressSize, 70 ); wLabel.setLayoutData( formData ); wRowsBetweenEntries = new TextVar( transMeta, wGrid, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wRowsBetweenEntries ); formData = new FormData(); formData.top = new FormAttachment( wLabel, 5 ); formData.left = new FormAttachment( wCompressSize, 70 ); formData.width = FIELD_SMALL + VAR_EXTRA_WIDTH; wRowsBetweenEntries.setLayoutData( formData ); setIntegerOnly( wRowsBetweenEntries ); wRowsBetweenEntries.addModifyListener( lsMod ); wDateInFileName = new Button( wGrid, SWT.CHECK ); props.setLook( wDateInFileName ); wDateInFileName.setText( BaseMessages.getString( PKG, "OrcOutputDialog.Options.DateInFileName" ) ); formData = new FormData(); formData.top = new FormAttachment( wRowsBetweenEntries, 10 ); formData.left = new FormAttachment( wCompressSize, 50 ); wDateInFileName.setLayoutData( formData ); wDateInFileName.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { meta.setChanged(); boolean isSelected = wDateInFileName.getSelection(); if ( isSelected ) { wSpecifyDateTimeFormat.setSelection( false ); wDateTimeFormat.setText( "" ); wDateTimeFormat.setEnabled( false ); } } } ); wTimeInFileName = new Button( wGrid, SWT.CHECK ); props.setLook( wTimeInFileName ); wTimeInFileName.setText( BaseMessages.getString( PKG, "OrcOutputDialog.Options.TimeInFileName" ) ); formData = new FormData(); formData.top = new FormAttachment( wDateInFileName, 10 ); formData.left = new FormAttachment( wCompressSize, 50 ); wTimeInFileName.setLayoutData( formData ); wTimeInFileName.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { meta.setChanged(); boolean isSelected = wTimeInFileName.getSelection(); if ( isSelected ) { wSpecifyDateTimeFormat.setSelection( false ); wDateTimeFormat.setText( "" ); wDateTimeFormat.setEnabled( false ); } } } ); wSpecifyDateTimeFormat = new Button( wGrid, SWT.CHECK ); wSpecifyDateTimeFormat.setText( BaseMessages.getString( PKG, "OrcOutputDialog.Options.SpecifyDateTimeFormat" ) ); props.setLook( wSpecifyDateTimeFormat ); formData = new FormData(); formData.top = new FormAttachment( wTimeInFileName, 10 ); formData.left = new FormAttachment( wCompressSize, 50 ); wSpecifyDateTimeFormat.setLayoutData( formData ); wSpecifyDateTimeFormat.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { meta.setChanged(); boolean isSelected = wSpecifyDateTimeFormat.getSelection(); wDateTimeFormat.setEnabled( isSelected ); if ( !isSelected ) { wDateTimeFormat.setText( "" ); wTimeInFileName.setEnabled( true ); wDateInFileName.setEnabled( true ); } else { wTimeInFileName.setSelection( false ); wDateInFileName.setSelection( false ); wTimeInFileName.setEnabled( false ); wDateInFileName.setEnabled( false ); } } } ); String[] dates = Const.getDateFormats(); dates = Arrays.stream( dates ).filter( d -> d.indexOf( '/' ) < 0 && d.indexOf( '\\' ) < 0 && d.indexOf( ':' ) < 0 ) .toArray( String[]::new ); // remove formats with slashes and colons wDateTimeFormat = createComboVar( wGrid, dates ); props.setLook( wDateTimeFormat ); formData = new FormData(); formData.top = new FormAttachment( wSpecifyDateTimeFormat, 5 ); formData.left = new FormAttachment( wCompressSize, 70 ); wDateTimeFormat.setLayoutData( formData ); } protected ComboVar createComboVar( Composite container, String[] options ) { ComboVar combo = new ComboVar( transMeta, container, SWT.LEFT | SWT.BORDER ); combo.setItems( options ); combo.addModifyListener( lsMod ); return combo; } protected String getComboVarValue( ComboVar combo ) { String text = combo.getText(); String data = (String) combo.getData( text ); return data != null ? data : text; } private Label createLabel( Composite container, String labelRef ) { Label label = new Label( container, SWT.NONE ); label.setText( BaseMessages.getString( PKG, labelRef ) ); props.setLook( label ); return label; } @Override protected void ok() { if ( Utils.isEmpty( wStepname.getText() ) ) { return; } stepname = wStepname.getText(); List validationErrorFields = validateOutputFields( wOutputFields ); if ( validationErrorFields != null && !validationErrorFields.isEmpty() ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( BaseMessages.getString( PKG, "OrcOutput.MissingDefaultFields.Title" ) ); mb.setMessage( BaseMessages.getString( PKG, "OrcOutput.MissingDefaultFields.Msg" ) ); mb.open(); return; } getInfo( meta, false ); dispose(); } /** * Read the data from the meta object and show it in this dialog. */ protected void getData( OrcOutputMeta meta ) { if ( meta.getFilename() != null ) { wPath.setText( meta.getFilename() ); } wOverwriteExistingFile.setSelection( meta.isOverrideOutput() ); populateFieldsUI( meta, wOutputFields ); wCompression.setText( meta.getCompressionType() ); wCompressSize.setText( meta.getCompressSize() > 0 ? Integer.toString( meta.getCompressSize() ) : Integer.toString( OrcOutputMeta.DEFAULT_COMPRESS_SIZE ) ); int rowsBetweenEntries = meta.getRowsBetweenEntries(); if ( rowsBetweenEntries != 0 ) { startingRowsBetweenEntries = rowsBetweenEntries; wInlineIndexes.setSelection( true ); wRowsBetweenEntries.setText( Integer.toString( rowsBetweenEntries ) ); wRowsBetweenEntries.setEnabled( true ); } else { startingRowsBetweenEntries = OrcOutputMeta.DEFAULT_ROWS_BETWEEN_ENTRIES; wInlineIndexes.setSelection( false ); wRowsBetweenEntries.setText( "" ); wRowsBetweenEntries.setEnabled( false ); } wStripeSize.setText( Integer.toString( meta.getStripeSize() ) ); String dateTimeFormat = coalesce( meta.getDateTimeFormat() ); if ( !dateTimeFormat.isEmpty() ) { wTimeInFileName.setSelection( false ); wDateInFileName.setSelection( false ); wTimeInFileName.setEnabled( false ); wDateInFileName.setEnabled( false ); wSpecifyDateTimeFormat.setSelection( true ); wDateTimeFormat.setText( dateTimeFormat ); wDateTimeFormat.setEnabled( true ); } else { wTimeInFileName.setEnabled( true ); wDateInFileName.setEnabled( true ); wTimeInFileName.setSelection( meta.isTimeInFileName() ); wDateInFileName.setSelection( meta.isDateInFileName() ); wSpecifyDateTimeFormat.setSelection( false ); wDateTimeFormat.setEnabled( false ); wDateTimeFormat.setText( "" ); } } // ui -> meta @Override protected void getInfo( OrcOutputMeta meta, boolean preview ) { meta.setFilename( wPath.getText() ); meta.setOverrideOutput( wOverwriteExistingFile.getSelection() ); meta.setCompressionType( wCompression.getText() ); int compressSize = ( wCompressSize.getText().length() > 0 ) ? Integer.parseInt( wCompressSize.getText() ) : OrcOutputMeta.DEFAULT_COMPRESS_SIZE; meta.setCompressSize( compressSize ); int stripeSize = ( wStripeSize.getText().length() > 0 ) ? Integer.parseInt( wStripeSize.getText() ) : OrcOutputMeta.DEFAULT_STRIPE_SIZE; meta.setStripeSize( stripeSize ); int rowsBetweenEntries = ( wRowsBetweenEntries.getText().length() > 0 ) ? Integer.parseInt( wRowsBetweenEntries.getText() ) : 0; meta.setRowsBetweenEntries( rowsBetweenEntries ); if ( wSpecifyDateTimeFormat.getSelection() ) { meta.setTimeInFileName( false ); meta.setDateInFileName( false ); meta.setDateTimeFormat( wDateTimeFormat.getText().trim() ); } else { meta.setTimeInFileName( wTimeInFileName.getSelection() ); meta.setDateInFileName( wDateInFileName.getSelection() ); meta.setDateTimeFormat( "" ); } saveOutputFields( wOutputFields, meta ); } private void saveOutputFields( TableView wFields, OrcOutputMeta meta ) { int nrFields = wFields.nrNonEmpty(); List outputFields = new ArrayList<>(); for ( int i = 0; i < nrFields; i++ ) { TableItem item = wFields.getNonEmpty( i ); int j = 1; OrcOutputField field = new OrcOutputField(); field.setFormatFieldName( item.getText( j++ ) ); field.setPentahoFieldName( item.getText( j++ ) ); field.setFormatType( item.getText( j++ ) ); if ( field.getOrcType().equals( OrcSpec.DataType.DECIMAL ) ) { field.setPrecision( item.getText( j++ ) ); field.setScale( item.getText( j++ ) ); } else if ( field.getOrcType().equals( OrcSpec.DataType.FLOAT ) || field.getOrcType().equals( OrcSpec.DataType.DOUBLE ) ) { j++; field.setScale( item.getText( j++ ) ); } else { j += 2; } field.setDefaultValue( item.getText( j++ ) ); field.setAllowNull( getNullableValue( item.getText( j++ ) ) ); outputFields.add( field ); } meta.setOutputFields( outputFields ); } private List validateOutputFields( TableView wFields ) { int nrFields = wFields.nrNonEmpty(); List validationErrorFields = new ArrayList<>(); for ( int i = 0; i < nrFields; i++ ) { TableItem item = wFields.getNonEmpty( i ); int j = 1; String path = item.getText( j++ ); String name = item.getText( j++ ); String type = item.getText( j++ ); String precision = item.getText( j++ ); if ( precision == null || precision.trim().isEmpty() ) { item.setText( 4, Integer.toString( OrcSpec.DEFAULT_DECIMAL_PRECISION ) ); } String scale = item.getText( j++ ); if ( scale == null || scale.trim().isEmpty() ) { item.setText( 5, Integer.toString( OrcSpec.DEFAULT_DECIMAL_SCALE ) ); } String defaultValue = item.getText( j++ ); String nullString = getNullableValue( item.getText( j++ ) ); if ( nullString.equals( NullableValuesEnum.NO.getValue() ) && ( defaultValue == null || defaultValue.trim().isEmpty() ) ) { validationErrorFields.add( name ); } } return validationErrorFields; } private String getNullableValue( String nullString ) { return ( nullString != null && !nullString.isEmpty() ) ? nullString : NullableValuesEnum.getDefaultValue().getValue(); } private void populateFieldsUI( OrcOutputMeta meta, TableView wOutputFields ) { populateFieldsUI( meta.getOutputFields(), wOutputFields, ( field, item ) -> { int i = 1; item.setText( i++, coalesce( field.getFormatFieldName() ) ); item.setText( i++, coalesce( field.getPentahoFieldName() ) ); item.setText( i++, coalesce( field.getOrcType().getName() ) ); if ( field.getOrcType().equals( OrcSpec.DataType.DECIMAL ) ) { item.setText( i++, coalesce( String.valueOf( field.getPrecision() ) ) ); item.setText( i++, coalesce( String.valueOf( field.getScale() ) ) ); } else if ( field.getOrcType().equals( OrcSpec.DataType.FLOAT ) || field.getOrcType().equals( OrcSpec.DataType.DOUBLE ) ) { i++; item.setText( i++, field.getScale() > 0 ? String.valueOf( field.getScale() ) : "" ); } else { i += 2; } item.setText( i++, coalesce( field.getDefaultValue() ) ); item.setText( i++, field.getAllowNull() ? NullableValuesEnum.YES.getValue() : NullableValuesEnum.NO.getValue() ); } ); } private String coalesce( String value ) { return value == null ? "" : value; } private void populateFieldsUI( List fields, TableView wFields, BiConsumer converter ) { int nrFields = fields.size(); for ( int i = 0; i < nrFields; i++ ) { TableItem item = null; if ( i < wFields.table.getItemCount() ) { item = wFields.table.getItem( i ); } else { item = new TableItem( wFields.table, SWT.NONE ); } converter.accept( fields.get( i ), item ); } } protected void getFields() { try { RowMetaInterface r = transMeta.getPrevStepFields( stepname ); if ( r != null ) { TableItemInsertListener listener = ( tableItem, v ) -> true; getFieldsFromPreviousStep( r, wOutputFields, 1, new int[]{1, 2}, new int[]{3}, 4, 5, true, listener ); } } catch ( KettleException ke ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.GetFieldsFailed.Title" ), BaseMessages .getString( PKG, "System.Dialog.GetFieldsFailed.Message" ), ke ); } } private MessageDialog getFieldsChoiceDialog( Shell shell, int existingFields, int newFields ) { MessageDialog messageDialog = new MessageDialog( shell, BaseMessages.getString( PKG, "OrcOutputDialog.GetFieldsChoice.Title" ), // "Warning!" null, BaseMessages.getString( PKG, "OrcOutputDialog.GetFieldsChoice.Message", "" + existingFields, "" + newFields ), MessageDialog.WARNING, new String[] { BaseMessages.getString( PKG, "OrcOutputDialog.AddNew" ), BaseMessages.getString( PKG, "OrcOutputDialog.Add" ), BaseMessages.getString( PKG, "OrcOutputDialog.ClearAndAdd" ), BaseMessages.getString( PKG, "OrcOutputDialog.Cancel" ), }, 0 ); MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() ); return messageDialog; } private void getFieldsFromPreviousStep( RowMetaInterface row, TableView tableView, int keyColumn, int[] nameColumn, int[] dataTypeColumn, int lengthColumn, int precisionColumn, boolean optimizeWidth, TableItemInsertListener listener ) { if ( row == null || row.size() == 0 ) { return; // nothing to do } Table table = tableView.table; // get a list of all the non-empty keys (names) // List keys = new ArrayList<>(); for ( int i = 0; i < table.getItemCount(); i++ ) { TableItem tableItem = table.getItem( i ); String key = tableItem.getText( keyColumn ); if ( !Utils.isEmpty( key ) && keys.indexOf( key ) < 0 ) { keys.add( key ); } } int choice = 0; if ( !keys.isEmpty() ) { // Ask what we should do with the existing data in the step. // MessageDialog getFieldsChoiceDialog = getFieldsChoiceDialog( tableView.getShell(), keys.size(), row.size() ); int idx = getFieldsChoiceDialog.open(); choice = idx & 0xFF; } if ( choice == 3 || choice == 255 ) { return; // Cancel clicked } if ( choice == 2 ) { tableView.clearAll( false ); } for ( int i = 0; i < row.size(); i++ ) { ValueMetaInterface v = row.getValueMeta( i ); boolean add = true; // hang on, see if it's not yet in the table view if ( choice == 0 && keys.indexOf( v.getName() ) >= 0 ) { add = false; } if ( add ) { TableItem tableItem = new TableItem( table, SWT.NONE ); for ( int c = 0; c < nameColumn.length; c++ ) { tableItem.setText( nameColumn[ c ], Const.NVL( v.getName(), "" ) ); } String orcTypeName = OrcTypeConverter.convertToOrcType( v.getType() ); if ( dataTypeColumn != null ) { for ( int c = 0; c < dataTypeColumn.length; c++ ) { tableItem.setText( dataTypeColumn[ c ], orcTypeName ); } } if ( orcTypeName.equals( OrcSpec.DataType.DECIMAL.getName() ) ) { if ( lengthColumn > 0 && v.getLength() > 0 ) { tableItem.setText( lengthColumn, Integer.toString( v.getLength() ) ); } else { // Set the default precision tableItem.setText( lengthColumn, Integer.toString( OrcSpec.DEFAULT_DECIMAL_PRECISION ) ); } if ( precisionColumn > 0 && v.getPrecision() >= 0 ) { tableItem.setText( precisionColumn, Integer.toString( v.getPrecision() ) ); } else { // Set the default scale tableItem.setText( precisionColumn, Integer.toString( OrcSpec.DEFAULT_DECIMAL_SCALE ) ); } } else if ( orcTypeName.equals( OrcSpec.DataType.FLOAT.getName() ) || orcTypeName.equals( OrcSpec.DataType.DOUBLE.getName() ) ) { if ( precisionColumn > 0 && v.getPrecision() > 0 ) { tableItem.setText( precisionColumn, Integer.toString( v.getPrecision() ) ); } } if ( listener != null && !listener.tableItemInserted( tableItem, v ) ) { tableItem.dispose(); // remove it again } } } tableView.removeEmptyRows(); tableView.setRowNums(); if ( optimizeWidth ) { tableView.optWidth( true ); } } @Override protected int getWidth() { return SHELL_WIDTH; } @Override protected int getHeight() { return SHELL_HEIGHT; } @Override protected Listener getPreview() { return null; } @Override protected SelectionOperation selectionOperation() { return SelectionOperation.SAVE_TO_FILE_FOLDER; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/output/OrcOutputMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.output; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.big.data.kettle.plugins.formats.orc.output.OrcOutputMetaBase; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; @Step( id = "OrcOutput", image = "OO.svg", name = "OrcOutput.Name", description = "OrcOutput.Description", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", i18nPackageName = "org.pentaho.di.trans.steps.orc" ) @InjectionSupported( localizationPrefix = "OrcOutput.Injection.", groups = {"FIELDS"} ) public class OrcOutputMeta extends OrcOutputMetaBase { private final NamedClusterResolver namedClusterResolver; public OrcOutputMeta() { this( NamedClusterResolver.getInstance() ); } public OrcOutputMeta( NamedClusterResolver namedClusterResolver ) { this.namedClusterResolver = namedClusterResolver; } @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return new OrcOutput( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } @Override public StepDataInterface getStepData() { return new OrcOutputData(); } public NamedClusterResolver getNamedClusterResolver() { return namedClusterResolver; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/output/PvfsFileAliaser.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.output; import org.apache.commons.io.IOUtils; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.AliasedFileObject; import org.pentaho.di.core.vfs.IKettleVFS; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.hadoop.shim.api.format.IPvfsAliasGenerator; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.FileAlreadyExistsException; /** * Logic to use a temporary file for output and then copy that file to some VFS/PVFS scheme that wasn't original * supoorted for the output content. */ public class PvfsFileAliaser { private String finalFilePath; private String temporaryFilePath; private VariableSpace variableSpace; private IPvfsAliasGenerator aliasGenerator; private boolean isOverwriteOutput; private LogChannelInterface log; private IKettleVFS ikettleVFS; public PvfsFileAliaser( Bowl bowl, String finalFilePath, VariableSpace variableSpace, IPvfsAliasGenerator aliasGenerator, boolean isOverwriteOutput, LogChannelInterface log ) { this.ikettleVFS = KettleVFS.getInstance( bowl ); this.finalFilePath = finalFilePath; this.variableSpace = variableSpace; this.aliasGenerator = aliasGenerator; this.isOverwriteOutput = isOverwriteOutput; this.log = log; } public String generateAlias() throws KettleFileException, FileSystemException, FileAlreadyExistsException { FileObject pvfsFileObject = ikettleVFS.getFileObject( finalFilePath, variableSpace ); if ( AliasedFileObject.isAliasedFile( pvfsFileObject ) ) { finalFilePath = ( (AliasedFileObject) pvfsFileObject ).getOriginalURIString(); } //See if we need to use a another URI because the HadoopFileSystem is not supported for this URL. String aliasedFile = aliasGenerator.generateAlias( finalFilePath ); temporaryFilePath = finalFilePath; if ( aliasedFile != null ) { if ( pvfsFileObject.exists() ) { if ( isOverwriteOutput ) { pvfsFileObject.delete(); } else { throw new FileAlreadyExistsException( temporaryFilePath ); } } temporaryFilePath = aliasedFile; //set the outputFile to the temporary alias file } return temporaryFilePath; } public void copyFileToFinalDestination() throws KettleFileException, IOException { if ( aliasingIsActive() ) { FileObject srcFile = ikettleVFS.getFileObject( temporaryFilePath, variableSpace ); FileObject destFile = ikettleVFS.getFileObject( finalFilePath, variableSpace ); try ( InputStream in = KettleVFS.getInputStream( srcFile ); OutputStream out = ikettleVFS.getOutputStream( destFile, false ) ) { IOUtils.copy( in, out ); } } } public void deleteTempFileAndFolder() { try { if ( aliasingIsActive() ) { FileObject srcFile = ikettleVFS.getFileObject( temporaryFilePath, variableSpace ); srcFile.getParent().deleteAll(); } } catch ( FileSystemException | KettleFileException e ) { log.logError( e.getMessage(), e ); } } private boolean aliasingIsActive() { return !finalFilePath.equals( temporaryFilePath ) && temporaryFilePath != null && !s3nSwitchedTos3a(); } private boolean s3nSwitchedTos3a() { return finalFilePath != null && temporaryFilePath != null && finalFilePath.startsWith( "s3n" ) && temporaryFilePath .startsWith( "s3a" ); } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/BaseParquetStepDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet; import org.eclipse.jface.window.DefaultToolTip; import org.eclipse.jface.window.ToolTip; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.MouseTrackAdapter; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.ui.core.ConstUI; import org.pentaho.di.ui.core.events.dialog.SelectionAdapterFileDialogTextVar; import org.pentaho.di.ui.core.events.dialog.SelectionAdapterOptions; import org.pentaho.di.ui.core.events.dialog.SelectionOperation; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.step.BaseStepDialog; public abstract class BaseParquetStepDialog extends BaseStepDialog implements StepDialogInterface { public static final int MARGIN = 15; public static final int FIELDS_SEP = 10; public static final int FIELD_LABEL_SEP = 5; public static final int FIELD_TINY = 100; public static final int FIELD_SMALL = 150; public static final int FIELD_MEDIUM = 250; public static final int FIELD_LARGE = 350; public static final int TABLE_ITEM_MARGIN = 2; public static final int TOOLTIP_SHOW_DELAY = 350; public static final int TOOLTIP_HIDE_DELAY = 2000; // width of the icon in a varfield public static final int VAR_EXTRA_WIDTH = GUIResource.getInstance().getImageVariable().getBounds().width; protected static final Class BPKG = BaseParquetStepDialog.class; private static final String ELLIPSIS = "..."; protected final Class parquetStepDialogClass = getClass(); protected T meta; protected ModifyListener lsMod; protected TextVar wPath; protected Button wbBrowse; public BaseParquetStepDialog( Shell parent, T in, TransMeta transMeta, String sname ) { super( parent, (BaseStepMeta) in, transMeta, sname ); meta = in; } public static String shortenText( GC gc, String text, final int targetWidth ) { if ( Utils.isEmpty( text ) ) { return ""; } int textWidth = gc.textExtent( text ).x; int extra = gc.textExtent( ELLIPSIS ).x + 2 * TABLE_ITEM_MARGIN; if ( targetWidth <= extra || textWidth <= targetWidth ) { return text; } int len = text.length(); for ( int chomp = 1; chomp < len && textWidth + extra >= targetWidth; chomp++ ) { text = text.substring( 0, text.length() - 1 ); textWidth = gc.textExtent( text ).x; } return text + ELLIPSIS; } public static void setIntegerOnly( TextVar textVar ) { textVar.getTextWidget().addVerifyListener( e -> { if ( !StringUtil.isEmpty( e.text ) && !StringUtil.isVariable( e.text ) && !StringUtil.IsInteger( e.text ) ) { e.doit = false; } } ); } @Override public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE ); props.setLook( shell ); setShellImage( shell, meta ); lsMod = e -> meta.setChanged(); changed = meta.hasChanged(); createUI(); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { @Override public void shellClosed( ShellEvent e ) { cancel(); } } ); int height = Math.max( getMinHeight( shell, getWidth() ), getHeight() ); shell.setMinimumSize( getWidth(), height ); shell.setSize( getWidth(), height ); getData( meta ); shell.open(); wStepname.setFocus(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } protected abstract void createUI(); protected Control createFooter( Composite shell ) { wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( getMsg( "System.Button.Cancel" ) ); wCancel.addListener( SWT.Selection, lsCancel ); new FD( wCancel ).right( 100, 0 ).bottom( 100, 0 ).apply(); // Some buttons wOK = new Button( shell, SWT.PUSH ); wOK.setText( getMsg( "System.Button.OK" ) ); wOK.addListener( SWT.Selection, lsOK ); new FD( wOK ).right( wCancel, -FIELD_LABEL_SEP ).bottom( 100, 0 ).apply(); lsPreview = getPreview(); if ( lsPreview != null ) { wPreview = new Button( shell, SWT.PUSH ); wPreview.setText( getBaseMsg( "BaseStepDialog.Preview" ) ); wPreview.pack(); wPreview.addListener( SWT.Selection, lsPreview ); int offset = wPreview.getBounds().width / 2; new FD( wPreview ).left( 50, -offset ).bottom( 100, 0 ).apply(); } return wCancel; } protected void cancel() { stepname = null; meta.setChanged( changed ); dispose(); } protected void ok() { if ( Utils.isEmpty( wStepname.getText() ) ) { return; } stepname = wStepname.getText(); getInfo( meta, false ); dispose(); } protected abstract String getStepTitle(); /** * Read the data from the meta object and show it in this dialog. * * @param meta The meta object to obtain the data from. */ protected abstract void getData( T meta ); /** * Fill meta object from UI options. * * @param meta meta object * @param preview flag for preview or real options should be used. Currently, only one option is differ for preview - * EOL chars. It uses as "mixed" for be able to preview any file. */ protected abstract void getInfo( T meta, boolean preview ); protected abstract int getWidth(); protected abstract int getHeight(); protected abstract Listener getPreview(); protected Label createHeader() { // main form FormLayout formLayout = new FormLayout(); formLayout.marginWidth = 15; formLayout.marginHeight = 15; shell.setLayout( formLayout ); // title shell.setText( getStepTitle() ); // buttons lsOK = e -> ok(); lsCancel = e -> cancel(); // Stepname label wlStepname = new Label( shell, SWT.RIGHT ); wlStepname.setText( getBaseMsg( "BaseStepDialog.StepName" ) ); props.setLook( wlStepname ); new FD( wlStepname ).left( 0, 0 ).top( 0, 0 ).apply(); // Stepname field wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wStepname.setText( stepname ); props.setLook( wStepname ); wStepname.addModifyListener( lsMod ); new FD( wStepname ).left( 0, 0 ).top( wlStepname, FIELD_LABEL_SEP ).width( FIELD_MEDIUM ).rright().apply(); // separator Label separator = new Label( shell, SWT.HORIZONTAL | SWT.SEPARATOR ); FormData fdSpacer = new FormData(); fdSpacer.height = 2; fdSpacer.left = new FormAttachment( 0, 0 ); fdSpacer.top = new FormAttachment( wStepname, 15 ); fdSpacer.right = new FormAttachment( 100, 0 ); separator.setLayoutData( fdSpacer ); addIcon(); return separator; } protected void addIcon() { Label wicon = new Label( shell, SWT.RIGHT ); String stepId = meta.getParentStepMeta().getStepID(); wicon.setImage( GUIResource.getInstance().getImagesSteps().get( stepId ).getAsBitmapForSize( shell.getDisplay(), ConstUI.LARGE_ICON_SIZE, ConstUI.LARGE_ICON_SIZE ) ); FormData fdlicon = new FormData(); fdlicon.top = new FormAttachment( 0, 0 ); fdlicon.right = new FormAttachment( 100, 0 ); wicon.setLayoutData( fdlicon ); props.setLook( wicon ); } protected Control addFileWidgets( Control prev ) { Label wlPath = new Label( shell, SWT.RIGHT ); wlPath.setText( getBaseMsg( "ParquetDialog.Filename.Label" ) ); props.setLook( wlPath ); new FD( wlPath ).left( 0, 0 ).top( prev, MARGIN ).apply(); wPath = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wPath.addModifyListener( event -> { if ( wPreview != null ) { wPreview.setEnabled( !Utils.isEmpty( wPath.getText() ) ); } } ); props.setLook( wPath ); wPath.addModifyListener( lsMod ); new FD( wPath ).left( 0, 0 ).top( wlPath, FIELD_LABEL_SEP ).width( FIELD_LARGE + VAR_EXTRA_WIDTH ).rright().apply(); wbBrowse = new Button( shell, SWT.PUSH ); props.setLook( wbBrowse ); wbBrowse.setText( getMsg( "System.Button.Browse" ) ); wbBrowse.addSelectionListener( new SelectionAdapterFileDialogTextVar( log, wPath, transMeta, new SelectionAdapterOptions( transMeta.getBowl(), selectionOperation() ) ) ); int bOffset = ( wbBrowse.computeSize( SWT.DEFAULT, SWT.DEFAULT, false ).y - wPath.computeSize( SWT.DEFAULT, SWT.DEFAULT, false ).y ) / 2; new FD( wbBrowse ).left( wPath, FIELD_LABEL_SEP ).top( wlPath, FIELD_LABEL_SEP - bOffset ).apply(); return wPath; } protected abstract SelectionOperation selectionOperation(); protected String getBaseMsg( String key ) { return BaseMessages.getString( BPKG, key ); } protected String getMsg( String key ) { return BaseMessages.getString( parquetStepDialogClass, key ); } protected int getMinHeight( Composite comp, int minWidth ) { comp.pack(); return comp.computeSize( minWidth, SWT.DEFAULT ).y; } protected void setTruncatedColumn( Table table, int targetColumn ) { table.addListener( SWT.EraseItem, event -> { if ( event.index == targetColumn ) { event.detail &= ~SWT.FOREGROUND; } } ); table.addListener( SWT.PaintItem, event -> { TableItem item = (TableItem) event.item; int colIdx = event.index; if ( colIdx == targetColumn ) { String contents = item.getText( colIdx ); if ( Utils.isEmpty( contents ) ) { return; } Point size = event.gc.textExtent( contents ); int targetWidth = item.getBounds( colIdx ).width; int yOffset = Math.max( 0, ( event.height - size.y ) / 2 ); if ( size.x > targetWidth ) { contents = shortenText( event.gc, contents, targetWidth ); } event.gc.drawText( contents, event.x + TABLE_ITEM_MARGIN, event.y + yOffset, true ); } } ); } protected void addColumnTooltip( Table table, int columnIndex ) { final DefaultToolTip toolTip = new DefaultToolTip( table, ToolTip.RECREATE, true ); toolTip.setRespectMonitorBounds( true ); toolTip.setRespectDisplayBounds( true ); toolTip.setPopupDelay( TOOLTIP_SHOW_DELAY ); toolTip.setHideDelay( TOOLTIP_HIDE_DELAY ); toolTip.setShift( new Point( ConstUI.TOOLTIP_OFFSET, ConstUI.TOOLTIP_OFFSET ) ); table.addMouseTrackListener( new MouseTrackAdapter() { @Override public void mouseHover( MouseEvent e ) { Point coord = new Point( e.x, e.y ); TableItem item = table.getItem( coord ); if ( item != null && item.getBounds( columnIndex ).contains( coord ) ) { String contents = item.getText( columnIndex ); if ( !Utils.isEmpty( contents ) ) { toolTip.setText( contents ); toolTip.show( coord ); return; } } toolTip.hide(); } @Override public void mouseExit( MouseEvent e ) { toolTip.hide(); } } ); } /** * Class for apply layout settings to SWT controls. */ protected class FD { private final Control control; private final FormData formData; public FD( Control control ) { this.control = control; props.setLook( control ); formData = new FormData(); } private int getControlOffset( Control control, int controlWidth ) { // remaining space for min size match return getWidth() - getMarginWidths( control ) - controlWidth; } private int getMarginWidths( Control control ) { // get the width added by container margins and (wm-specific) decorations int extraWidth = 0; for ( Composite parent = control.getParent(); !parent.equals( getParent() ); parent = parent.getParent() ) { extraWidth += parent.computeTrim( 0, 0, 0, 0 ).width; if ( parent.getLayout() instanceof FormLayout ) { extraWidth += 2 * ( (FormLayout) parent.getLayout() ).marginWidth; } } return extraWidth; } public FD width( int width ) { formData.width = width; return this; } public FD height( int height ) { formData.height = height; return this; } public FD top( int numerator, int offset ) { formData.top = new FormAttachment( numerator, offset ); return this; } public FD top( Control control, int offset ) { formData.top = new FormAttachment( control, offset ); return this; } public FD bottom( int numerator, int offset ) { formData.bottom = new FormAttachment( numerator, offset ); return this; } public FD bottom( Control control, int offset ) { formData.bottom = new FormAttachment( control, offset ); return this; } public FD left( int numerator, int offset ) { formData.left = new FormAttachment( numerator, offset ); return this; } public FD left( int numerator ) { return left( numerator, 0 ); } public FD left( Control control, int offset ) { formData.left = new FormAttachment( control, offset ); return this; } public FD right( int numerator, int offset ) { formData.right = new FormAttachment( numerator, offset ); return this; } public FD rright() { formData.right = new FormAttachment( 100, -getControlOffset( control, formData.width ) ); return this; } public FD right( Control control, int offset ) { formData.right = new FormAttachment( control, offset ); return this; } public void apply() { control.setLayoutData( formData ); } } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/input/ParquetInput.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.input; import org.apache.commons.vfs2.FileObject; import org.pentaho.big.data.kettle.plugins.formats.parquet.input.ParquetInputField; import org.pentaho.big.data.kettle.plugins.formats.parquet.input.ParquetInputMetaBase; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.vfs.AliasedFileObject; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.trans.steps.file.BaseFileInputStep; import org.pentaho.di.trans.steps.file.IBaseFileInputReader; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.format.FormatService; import org.pentaho.hadoop.shim.api.format.IParquetInputField; import org.pentaho.hadoop.shim.api.format.IPentahoInputFormat.IPentahoInputSplit; import org.pentaho.hadoop.shim.api.format.IPentahoParquetInputFormat; import java.nio.file.NoSuchFileException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; public class ParquetInput extends BaseFileInputStep { public static final long SPLIT_SIZE = 128 * 1024 * 1024L; public ParquetInput( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } public static List retrieveSchema( Bowl bowl, NamedClusterServiceLocator namedClusterServiceLocator, NamedCluster namedCluster, String path ) throws Exception { FormatService formatService = namedClusterServiceLocator.getService( namedCluster, FormatService.class ); IPentahoParquetInputFormat in = formatService.createInputFormat( IPentahoParquetInputFormat.class, namedCluster ); FileObject inputFileObject = KettleVFS.getInstance( bowl ).getFileObject( path ); if ( AliasedFileObject.isAliasedFile( inputFileObject ) ) { path = ( (AliasedFileObject) inputFileObject ).getOriginalURIString(); } return in.readSchema( path ); } public static List createSchemaFromMeta( ParquetInputMetaBase meta ) { List fields = new ArrayList<>(); for ( ParquetInputField f : meta.getInputFields() ) { fields.add( f ); } return fields; } @Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (ParquetInputMeta) smi; data = (ParquetInputData) sdi; try { if ( data.splits == null ) { initSplits(); } if ( data.currentSplit >= data.splits.size() ) { setOutputDone(); return false; } if ( data.reader == null ) { openReader( data ); } if ( data.rowIterator.hasNext() ) { RowMetaAndData row = data.rowIterator.next(); putRow( row.getRowMeta(), row.getData() ); return true; } else { data.reader.close(); data.reader = null; logDebug( "Close split {0}", data.currentSplit ); data.currentSplit++; return true; } } catch ( NoSuchFileException ex ) { throw new KettleException( "No input file" ); } catch ( KettleException ex ) { throw ex; } catch ( Exception ex ) { throw new KettleException( ex ); } } void initSplits() throws Exception { FormatService formatService = meta.getNamedClusterResolver().getNamedClusterServiceLocator() .getService( getNamedCluster(), FormatService.class ); if ( meta.inputFiles == null || meta.inputFiles.fileName == null || meta.inputFiles.fileName.length == 0 ) { throw new KettleException( "No input files defined" ); } String[] resolvedInputFileNames = new String[ meta.inputFiles.fileName.length ]; int i = 0; for ( String file : meta.inputFiles.fileName ) { resolvedInputFileNames[ i ] = StringUtil.toUri( environmentSubstitute( file ) ).toString(); FileObject inputFileObject = KettleVFS.getInstance( getTransMeta().getBowl() ) .getFileObject( resolvedInputFileNames[ i ], getTransMeta() ); if ( AliasedFileObject.isAliasedFile( inputFileObject ) ) { resolvedInputFileNames[ i ] = ( (AliasedFileObject) inputFileObject ).getOriginalURIString(); } i++; } data.input = formatService.createInputFormat( IPentahoParquetInputFormat.class, getNamedCluster() ); // Pentaho 8.0 transformations will have the formatType set to 0. Get the fields from the schema and set the // formatType to the formatType retrieved from the schema. List actualFileFields = ParquetInput.retrieveSchema( getTransMeta().getBowl(), meta.getNamedClusterResolver().getNamedClusterServiceLocator(), getNamedCluster(), resolvedInputFileNames[ 0 ] ); if ( meta.isIgnoreEmptyFolder() && ( actualFileFields.isEmpty() ) ) { data.splits = new ArrayList<>(); logBasic( "No Parquet input files found." ); } else { Map fieldNamesToTypes = actualFileFields.stream() .collect( Collectors.toMap( IParquetInputField::getFormatFieldName, Function.identity() ) ); for ( ParquetInputField f : meta.getInputFields() ) { if ( fieldNamesToTypes.containsKey( f.getFormatFieldName() ) ) { if ( f.getFormatType() == 0 ) { f.setFormatType( fieldNamesToTypes.get( f.getFormatFieldName() ).getFormatType() ); } f.setPrecision( fieldNamesToTypes.get( f.getFormatFieldName() ).getPrecision() ); f.setScale( fieldNamesToTypes.get( f.getFormatFieldName() ).getScale() ); } } data.input.setSchema( createSchemaFromMeta( meta ) ); if ( resolvedInputFileNames != null && resolvedInputFileNames.length == 1 ) { data.input.setInputFile( resolvedInputFileNames[ 0 ] ); } else if ( resolvedInputFileNames != null && resolvedInputFileNames.length > 1 ) { data.input.setInputFiles( resolvedInputFileNames ); } data.input.setSplitSize( SPLIT_SIZE ); data.splits = data.input.getSplits(); logDebug( "Input split count: {0}", data.splits.size() ); } data.currentSplit = 0; } private NamedCluster getNamedCluster() { return meta.getNamedClusterResolver().resolveNamedCluster( environmentSubstitute( meta.getFilename() ) ); } void openReader( ParquetInputData data ) throws Exception { logDebug( "Open split {0}", data.currentSplit ); IPentahoInputSplit sp = data.splits.get( data.currentSplit ); data.reader = data.input.createRecordReader( sp ); data.rowIterator = data.reader.iterator(); } @Override protected boolean init() { return true; } @Override protected IBaseFileInputReader createReader( ParquetInputMeta meta, ParquetInputData data, FileObject file ) throws Exception { return null; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/input/ParquetInputData.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.input; import java.util.Iterator; import java.util.List; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.trans.steps.file.BaseFileInputStepData; import org.pentaho.hadoop.shim.api.format.IPentahoInputFormat.IPentahoRecordReader; import org.pentaho.hadoop.shim.api.format.IPentahoInputFormat.IPentahoInputSplit; import org.pentaho.hadoop.shim.api.format.IPentahoParquetInputFormat; public class ParquetInputData extends BaseFileInputStepData { IPentahoParquetInputFormat input; List splits; int currentSplit; IPentahoRecordReader reader; Iterator rowIterator; RowMetaInterface outputRowMeta; } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/input/ParquetInputDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.input; import org.apache.commons.lang.StringUtils; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.big.data.kettle.plugins.formats.impl.parquet.BaseParquetStepDialog; import org.pentaho.big.data.kettle.plugins.formats.parquet.input.ParquetInputField; import org.pentaho.di.core.Const; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.TransPreviewFactory; import org.pentaho.di.ui.core.dialog.EnterNumberDialog; import org.pentaho.di.ui.core.dialog.EnterTextDialog; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.dialog.PreviewRowsDialog; import org.pentaho.di.ui.core.events.dialog.SelectionOperation; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.ColumnsResizer; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.trans.dialog.TransPreviewProgressDialog; import org.pentaho.hadoop.shim.api.format.IParquetInputField; import org.pentaho.hadoop.shim.api.format.ParquetSpec; import java.util.List; @PluginDialog( id = "ParquetInput", image = "PI.svg", pluginType = PluginDialog.PluginType.STEP, documentationUrl = "pdi-transformation-steps-reference-overview/parquet-input" ) public class ParquetInputDialog extends BaseParquetStepDialog { private static final int SHELL_WIDTH = 526; private static final int SHELL_HEIGHT = 506; private static final int PARQUET_PATH_COLUMN_INDEX = 1; private static final int FIELD_NAME_COLUMN_INDEX = 2; private static final int FIELD_TYPE_COLUMN_INDEX = 3; private static final int FORMAT_COLUMN_INDEX = 4; private static final int FIELD_SOURCE_TYPE_COLUMN_INDEX = 5; private static final String UNABLE_TO_LOAD_SCHEMA_FROM_CONTAINER_FILE = "ParquetInput.Error.UnableToLoadSchemaFromContainerFile"; private TableView wInputFields; private Button wPassThruFields; private Button wIgnoreEmptyFolder; public ParquetInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ) { super( parent, (ParquetInputMeta) in, transMeta, sname ); } @Override protected void createUI( ) { Control prev = createHeader(); //main fields prev = addFileWidgets( prev ); createFooter( shell ); Label separator = new Label( shell, SWT.HORIZONTAL | SWT.SEPARATOR ); FormData fdSpacer = new FormData(); fdSpacer.height = 2; fdSpacer.left = new FormAttachment( 0, 0 ); fdSpacer.bottom = new FormAttachment( wCancel, -MARGIN ); fdSpacer.right = new FormAttachment( 100, 0 ); separator.setLayoutData( fdSpacer ); wIgnoreEmptyFolder = new Button( shell, SWT.CHECK ); wIgnoreEmptyFolder.setText( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.IgnoreEmptyFolder.Label" ) ); wIgnoreEmptyFolder.setToolTipText( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.IgnoreEmptyFolder.Tooltip" ) ); wIgnoreEmptyFolder.setOrientation( SWT.LEFT_TO_RIGHT ); props.setLook( wIgnoreEmptyFolder ); new FD( wIgnoreEmptyFolder ).left( 0, 0 ).top( prev, MARGIN ).apply(); Group fieldsContainer = new Group( shell, SWT.SHADOW_IN ); fieldsContainer.setLayout( new FormLayout() ); fieldsContainer.setText( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.Fields.Label" ) ); new FD( fieldsContainer ).left( 0, 0 ).top( wIgnoreEmptyFolder, MARGIN ).right( 100, 0 ).bottom( separator, -MARGIN ).apply(); // Accept fields from previous steps? // wPassThruFields = new Button( fieldsContainer, SWT.CHECK ); wPassThruFields.setText( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.PassThruFields.Label" ) ); wPassThruFields.setToolTipText( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.PassThruFields.Tooltip" ) ); wPassThruFields.setOrientation( SWT.LEFT_TO_RIGHT ); props.setLook( wPassThruFields ); new FD( wPassThruFields ).left( 0, MARGIN ).top( 0, MARGIN ).apply(); //get fields button lsGet = e -> populateFieldsTable(); Button wGetFields = new Button( fieldsContainer, SWT.PUSH ); wGetFields.setText( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.Fields.Get" ) ); props.setLook( wGetFields ); new FD( wGetFields ).bottom( 100, -FIELDS_SEP ).right( 100, -MARGIN ).apply(); wGetFields.addListener( SWT.Selection, lsGet ); // fields table ColumnInfo parquetPathColumnInfo = new ColumnInfo( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.Fields.column.Path" ), ColumnInfo.COLUMN_TYPE_TEXT, false, true ); ColumnInfo nameColumnInfo = new ColumnInfo( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.Fields.column.Name" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ); ColumnInfo typeColumnInfo = new ColumnInfo( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.Fields.column.Type" ), ColumnInfo.COLUMN_TYPE_CCOMBO, ValueMetaFactory.getValueMetaNames() ); ColumnInfo formatColumnInfo = new ColumnInfo( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.Fields.column.Format" ), ColumnInfo.COLUMN_TYPE_CCOMBO, Const.getDateFormats() ); ColumnInfo sourceTypeColumnInfo = new ColumnInfo( BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.Fields.column.SourceType" ), ColumnInfo.COLUMN_TYPE_TEXT, ValueMetaFactory.getValueMetaNames(), true ); ColumnInfo[] parameterColumns = new ColumnInfo[] {parquetPathColumnInfo, nameColumnInfo, typeColumnInfo, formatColumnInfo, sourceTypeColumnInfo}; parameterColumns[0].setAutoResize( false ); parameterColumns[1].setUsingVariables( true ); parameterColumns[3].setAutoResize( false ); wInputFields = new TableView( transMeta, fieldsContainer, SWT.FULL_SELECTION | SWT.SINGLE | SWT.BORDER | SWT.NO_SCROLL | SWT.V_SCROLL, parameterColumns, 7, null, props ); ColumnsResizer resizer = new ColumnsResizer( 0, 40, 20, 20, 20, 0 ); wInputFields.getTable().addListener( SWT.Resize, resizer ); props.setLook( wInputFields ); new FD( wInputFields ).left( 0, MARGIN ).right( 100, -MARGIN ).top( wPassThruFields, FIELDS_SEP ) .bottom( wGetFields, -FIELDS_SEP ).apply(); wInputFields.setRowNums(); wInputFields.optWidth( true ); for ( ColumnInfo col : parameterColumns ) { col.setAutoResize( false ); } resizer.addColumnResizeListeners( wInputFields.getTable() ); setTruncatedColumn( wInputFields.getTable(), 1 ); if ( !Const.isWindows() ) { addColumnTooltip( wInputFields.getTable(), 1 ); } } protected void populateFieldsTable() { try { List inputFields = getInputFieldsFromParquetFile( false ); wInputFields.clearAll(); for ( IParquetInputField field : inputFields ) { TableItem item = new TableItem( wInputFields.table, SWT.NONE ); if ( field != null ) { setField( item, concatenateParquetNameAndType( field ), PARQUET_PATH_COLUMN_INDEX ); setField( item, field.getPentahoFieldName(), FIELD_NAME_COLUMN_INDEX ); setField( item, ValueMetaFactory.getValueMetaName( field.getPentahoType() ), FIELD_TYPE_COLUMN_INDEX ); setField( item, field.getStringFormat(), FORMAT_COLUMN_INDEX ); setField( item, ParquetSpec.DataType.getDataType( field.getFormatType() ).getName(), FIELD_SOURCE_TYPE_COLUMN_INDEX ); } } wInputFields.removeEmptyRows(); wInputFields.setRowNums(); wInputFields.optWidth( true ); } catch ( Exception ex ) { logError( BaseMessages.getString( parquetStepDialogClass, UNABLE_TO_LOAD_SCHEMA_FROM_CONTAINER_FILE ), ex ); new ErrorDialog( shell, stepname, BaseMessages.getString( parquetStepDialogClass, UNABLE_TO_LOAD_SCHEMA_FROM_CONTAINER_FILE, getProcessedFileName() ), ex ); } } private String getProcessedFileName() { return transMeta.environmentSubstitute( wPath.getText() ); } private List getInputFieldsFromParquetFile( boolean failQuietly ) { String parquetFileName = getProcessedFileName(); List inputFields = null; try { inputFields = ParquetInput.retrieveSchema( transMeta.getBowl(), meta.getNamedClusterResolver().getNamedClusterServiceLocator(), meta.getNamedClusterResolver().resolveNamedCluster( parquetFileName ), parquetFileName ); } catch ( Exception ex ) { if ( !failQuietly ) { logError( BaseMessages.getString( parquetStepDialogClass, UNABLE_TO_LOAD_SCHEMA_FROM_CONTAINER_FILE ), ex ); new ErrorDialog( shell, stepname, BaseMessages.getString( parquetStepDialogClass, UNABLE_TO_LOAD_SCHEMA_FROM_CONTAINER_FILE, parquetFileName ), ex ); } } return inputFields; } private void setField( TableItem item, String fieldValue, int fieldIndex ) { if ( !Utils.isEmpty( fieldValue ) ) { item.setText( fieldIndex, fieldValue ); } } /** * Read the data from the meta object and show it in this dialog. */ @Override protected void getData( ParquetInputMeta meta ) { if ( meta.getFilename() != null && meta.getFilename().length() > 0 ) { wPath.setText( meta.getFilename() ); } wPassThruFields.setSelection( meta.inputFiles.passingThruFields ); wIgnoreEmptyFolder.setSelection( meta.isIgnoreEmptyFolder() ); int itemIndex = 0; for ( IParquetInputField inputField : meta.getInputFields() ) { TableItem item = null; if ( itemIndex < wInputFields.table.getItemCount() ) { item = wInputFields.table.getItem( itemIndex ); } else { item = new TableItem( wInputFields.table, SWT.NONE ); } if ( inputField.getFormatFieldName() != null ) { item.setText( PARQUET_PATH_COLUMN_INDEX, concatenateParquetNameAndType( inputField ) ); } if ( inputField.getPentahoFieldName() != null ) { item.setText( FIELD_NAME_COLUMN_INDEX, inputField.getPentahoFieldName() ); } if ( getTypeDesc( inputField.getPentahoType() ) != null ) { item.setText( FIELD_TYPE_COLUMN_INDEX, getTypeDesc( inputField.getPentahoType() ) ); } if ( getSourceTypeDesc( inputField.getFormatType() ) != null ) { item.setText( FIELD_SOURCE_TYPE_COLUMN_INDEX, getSourceTypeDesc( inputField.getFormatType() ) ); } if ( inputField.getStringFormat() != null ) { item.setText( FORMAT_COLUMN_INDEX, inputField.getStringFormat() ); } else { item.setText( FORMAT_COLUMN_INDEX, "" ); } itemIndex++; } } public String getTypeDesc( int type ) { return ValueMetaFactory.getValueMetaName( type ); } public String getSourceTypeDesc( int type ) { return ParquetSpec.DataType.getDataType( type ).getName(); } /** * Fill meta object from UI options. */ @Override protected void getInfo( ParquetInputMeta meta, boolean preview ) { String filePath = wPath.getText(); if ( filePath != null && !filePath.isEmpty() ) { meta.allocateFiles( 1 ); meta.setFilename( wPath.getText().trim() ); } meta.inputFiles.passingThruFields = wPassThruFields.getSelection(); meta.setIgnoreEmptyFolder( wIgnoreEmptyFolder.getSelection() ); List actualParquetFileInputFields = getInputFieldsFromParquetFile( true ); int nrFields = wInputFields.nrNonEmpty(); meta.setInputFields( new ParquetInputField[ nrFields ] ); for ( int i = 0; i < nrFields; i++ ) { TableItem item = wInputFields.getNonEmpty( i ); ParquetInputField field = new ParquetInputField(); field.setFormatFieldName( extractFieldName( item.getText( PARQUET_PATH_COLUMN_INDEX ) ) ); if ( actualParquetFileInputFields != null ) { IParquetInputField actualParquetField = actualParquetFileInputFields.stream() .filter( x -> field.getFormatFieldName().equals( x.getFormatFieldName() ) ) .findFirst( ).orElse( null ); if ( actualParquetField != null ) { field.setFormatType( actualParquetField.getFormatType() ); } else { ParquetSpec.DataType sourceType = extractParquetType( item.getText( PARQUET_PATH_COLUMN_INDEX ) ); if ( ( sourceType == null ) ) { String uiTypeTrimmed = item.getText( FIELD_SOURCE_TYPE_COLUMN_INDEX ).trim(); for ( ParquetSpec.DataType temp : ParquetSpec.DataType.values() ) { if ( temp.getName().equalsIgnoreCase( uiTypeTrimmed ) ) { sourceType = temp; } } } field.setFormatType( sourceType.getId() ); item.setText( concatenateParquetNameAndType( field ) ); } } field.setPentahoFieldName( item.getText( FIELD_NAME_COLUMN_INDEX ) ); field.setPentahoType( ValueMetaFactory.getIdForValueMeta( item.getText( FIELD_TYPE_COLUMN_INDEX ) ) ); field.setStringFormat( item.getText( FORMAT_COLUMN_INDEX ) ); meta.inputFields[ i ] = field; } } /** * When all else fails, extract he parquet type from the field description. * * @see #concatenateParquetNameAndType(IParquetInputField) */ private ParquetSpec.DataType extractParquetType( String parquetNameTypeFromUI ) { if ( parquetNameTypeFromUI != null ) { String uiType = StringUtils.substringBetween( parquetNameTypeFromUI, "(", ")" ); if ( uiType != null ) { String uiTypeTrimmed = uiType.trim(); for ( ParquetSpec.DataType temp : ParquetSpec.DataType.values() ) { if ( temp.getName().equalsIgnoreCase( uiTypeTrimmed ) ) { return temp; } } } } return null; } /** * Get the field name from the UI path column * * @see #concatenateParquetNameAndType(IParquetInputField) */ private String extractFieldName( String parquetNameTypeFromUI ) { if ( parquetNameTypeFromUI != null ) { return StringUtils.substringBefore( parquetNameTypeFromUI, "(" ).trim(); } return parquetNameTypeFromUI; } /** * this method must be changed only with change {@link #extractParquetType(String)} * since this method converts the field for show user and the extract methods myst convert to internal format */ private String concatenateParquetNameAndType( IParquetInputField field ) { String typeName; ParquetSpec.DataType parquetDataType = ParquetSpec.DataType.getDataType( field.getFormatType() ); if ( parquetDataType == null ) { typeName = "unknown"; } else { typeName = ParquetSpec.DataType.getDataType( field.getFormatType() ).getName(); } return field.getFormatFieldName() + " (" + typeName + ")"; } private void doPreview() { getInfo( meta, true ); TransMeta previewMeta = TransPreviewFactory.generatePreviewTransformation( transMeta, meta, wStepname.getText() ); transMeta.getVariable( "Internal.Transformation.Filename.Directory" ); previewMeta.getVariable( "Internal.Transformation.Filename.Directory" ); EnterNumberDialog numberDialog = new EnterNumberDialog( shell, props.getDefaultPreviewSize(), BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.PreviewSize.DialogTitle" ), BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.PreviewSize.DialogMessage" ) ); int previewSize = numberDialog.open(); if ( previewSize > 0 ) { TransPreviewProgressDialog progressDialog = new TransPreviewProgressDialog( shell, previewMeta, new String[] { wStepname.getText() }, new int[] { previewSize } ); progressDialog.open(); Trans trans = progressDialog.getTrans(); String loggingText = progressDialog.getLoggingText(); if ( !progressDialog.isCancelled() ) { if ( trans.getResult() != null && trans.getResult().getNrErrors() > 0 ) { EnterTextDialog etd = new EnterTextDialog( shell, BaseMessages.getString( parquetStepDialogClass, "System.Dialog.PreviewError.Title" ), BaseMessages.getString( parquetStepDialogClass, "System.Dialog.PreviewError.Message" ), loggingText, true ); etd.setReadOnly(); etd.open(); } } PreviewRowsDialog prd = new PreviewRowsDialog( shell, transMeta, SWT.NONE, wStepname.getText(), progressDialog .getPreviewRowsMeta( wStepname.getText() ), progressDialog.getPreviewRows( wStepname.getText() ), loggingText ); prd.open(); } } @Override protected int getWidth() { return SHELL_WIDTH; } @Override protected int getHeight() { return SHELL_HEIGHT; } @Override protected String getStepTitle() { return BaseMessages.getString( parquetStepDialogClass, "ParquetInputDialog.Shell.Title" ); } @Override protected Listener getPreview() { return e -> doPreview(); } @Override protected SelectionOperation selectionOperation() { return SelectionOperation.FILE_OR_FOLDER; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/input/ParquetInputMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.input; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.big.data.kettle.plugins.formats.parquet.input.ParquetInputMetaBase; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; @Step( id = "ParquetInput", image = "PI.svg", name = "ParquetInput.Name", description = "ParquetInput.Description", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", i18nPackageName = "org.pentaho.di.trans.steps.parquet" ) @InjectionSupported( localizationPrefix = "ParquetInput.Injection.", groups = { "FILENAME_LINES", "FIELDS" }, hide = { "FILEMASK", "EXCLUDE_FILEMASK", "FILE_REQUIRED", "INCLUDE_SUBFOLDERS", "FIELD_POSITION", "FIELD_LENGTH", "FIELD_IGNORE", "FIELD_FORMAT", "FIELD_PRECISION", "FIELD_CURRENCY", "FIELD_DECIMAL", "FIELD_GROUP", "FIELD_REPEAT", "FIELD_TRIM_TYPE", "FIELD_NULL_STRING", "FIELD_IF_NULL", "FIELD_NULLABLE", "ACCEPT_FILE_NAMES", "ACCEPT_FILE_STEP", "PASS_THROUGH_FIELDS", "ACCEPT_FILE_FIELD", "ADD_FILES_TO_RESULT", "IGNORE_ERRORS", "FILE_ERROR_FIELD", "FILE_ERROR_MESSAGE_FIELD", "SKIP_BAD_FILES", "WARNING_FILES_TARGET_DIR", "WARNING_FILES_EXTENTION", "ERROR_FILES_TARGET_DIR", "ERROR_FILES_EXTENTION", "LINE_NR_FILES_TARGET_DIR", "LINE_NR_FILES_EXTENTION", "FILE_SHORT_FILE_FIELDNAME", "FILE_EXTENSION_FIELDNAME", "FILE_PATH_FIELDNAME", "FILE_SIZE_FIELDNAME", "FILE_HIDDEN_FIELDNAME", "FILE_LAST_MODIFICATION_FIELDNAME", "FILE_URI_FIELDNAME", "FILE_ROOT_URI_FIELDNAME" } ) public class ParquetInputMeta extends ParquetInputMetaBase { private final NamedClusterResolver namedClusterResolver; public ParquetInputMeta() { this( NamedClusterResolver.getInstance() ); } public ParquetInputMeta( NamedClusterResolver namedClusterResolver ) { this.namedClusterResolver = namedClusterResolver; } @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return new ParquetInput( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } @Override public StepDataInterface getStepData() { return new ParquetInputData(); } public NamedClusterResolver getNamedClusterResolver() { return namedClusterResolver; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/input/VFSScheme.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.input; public class VFSScheme { private final String scheme; private final String schemeName; public VFSScheme( String scheme, String schemeName ) { this.scheme = scheme; this.schemeName = schemeName; } public String getScheme() { return scheme; } public String getSchemeName() { return schemeName; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/output/ParquetOutput.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.output; import org.apache.parquet.hadoop.metadata.CompressionCodecName; import org.pentaho.big.data.kettle.plugins.formats.impl.output.PvfsFileAliaser; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.big.data.kettle.plugins.formats.parquet.output.ParquetOutputMetaBase; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.hadoop.shim.api.format.FormatService; import org.pentaho.hadoop.shim.api.format.IPentahoParquetOutputFormat; import java.io.IOException; public class ParquetOutput extends BaseStep implements StepInterface { private ParquetOutputMeta meta; private ParquetOutputData data; private PvfsFileAliaser pvfsFileAliaser; public ParquetOutput( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } @Override public synchronized boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { try { if ( data.output == null ) { init( getInputRowMeta() ); } Object[] currentRow = getRow(); if ( currentRow != null ) { RowMetaAndData row = new RowMetaAndData( getInputRowMeta(), currentRow ); data.writer.write( row ); incrementLinesOutput(); putRow( row.getRowMeta(), row.getData() ); // in case we want it to go further or DET... return true; } else { // no more input to be expected... closeWriter(); pvfsFileAliaser.copyFileToFinalDestination(); pvfsFileAliaser.deleteTempFileAndFolder(); setOutputDone(); return false; } } catch ( KettleException ex ) { try { closeWriter(); pvfsFileAliaser.deleteTempFileAndFolder(); } catch ( Exception ex2 ) { // Do nothing } throw ex; } catch ( IllegalStateException e ) { getLogChannel().logError( e.getMessage() ); setErrors( 1 ); pvfsFileAliaser.deleteTempFileAndFolder(); setOutputDone(); return false; } catch ( Exception ex ) { try { closeWriter(); pvfsFileAliaser.deleteTempFileAndFolder(); } catch ( Exception ex2 ) { // Do nothing } throw new KettleException( ex ); } } public void init( RowMetaInterface rowMeta ) throws Exception { FormatService formatService; try { formatService = meta.getNamedClusterResolver().getNamedClusterServiceLocator() .getService( getNamedCluster(), FormatService.class ); } catch ( ClusterInitializationException e ) { throw new KettleException( "can't get service format shim ", e ); } if ( meta.getFilename() == null ) { throw new KettleException( "No output files defined" ); } data.output = formatService.createOutputFormat( IPentahoParquetOutputFormat.class, getNamedCluster() ); String outputFileName = environmentSubstitute( meta.constructOutputFilename() ); pvfsFileAliaser = new PvfsFileAliaser( getTransMeta().getBowl(), outputFileName, getTransMeta(), data.output, meta.overrideOutput, getLogChannel() ); data.output.setOutputFile( pvfsFileAliaser.generateAlias(), meta.overrideOutput ); data.output.setFields( meta.getOutputFields() ); CompressionCodecName compression; try { compression = CompressionCodecName.valueOf( meta.getCompressionType( variables ).name().toUpperCase() ); } catch ( Exception ex ) { compression = CompressionCodecName.UNCOMPRESSED; } data.output.setCompression( compression ); data.output .setVersion( ParquetOutputMetaBase.ParquetVersion.PARQUET_1.equals( meta.getParquetVersion( variables ) ) ? IPentahoParquetOutputFormat.VERSION.VERSION_1_0 : IPentahoParquetOutputFormat.VERSION.VERSION_2_0 ); if ( meta.getRowGroupSize( variables ) > 0 ) { data.output.setRowGroupSize( meta.getRowGroupSize( variables ) * 1024 * 1024 ); } if ( meta.getDataPageSize( variables ) > 0 ) { data.output.setDataPageSize( meta.getDataPageSize( variables ) * 1024 ); } data.output.enableDictionary( meta.enableDictionary ); if ( meta.getDictPageSize( variables ) > 0 ) { data.output.setDictionaryPageSize( meta.getDictPageSize( variables ) * 1024 ); } data.writer = data.output.createRecordWriter(); } private NamedCluster getNamedCluster() { return meta.getNamedClusterResolver().resolveNamedCluster( environmentSubstitute( meta.getFilename() ) ); } public void closeWriter() throws KettleException { try { data.writer.close(); } catch ( IOException e ) { throw new KettleException( e ); } data.output = null; } @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (ParquetOutputMeta) smi; data = (ParquetOutputData) sdi; if ( !super.init( smi, sdi ) ) { return false; } //Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS if ( getTransMeta().getNamedClusterEmbedManager() != null ) { getTransMeta().getNamedClusterEmbedManager() .passEmbeddedMetastoreKey( getTransMeta(), getTransMeta().getEmbeddedMetastoreProviderKey() ); } return true; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/output/ParquetOutputData.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.output; import org.pentaho.di.trans.step.BaseStepData; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.hadoop.shim.api.format.IPentahoOutputFormat.IPentahoRecordWriter; import org.pentaho.hadoop.shim.api.format.IPentahoParquetOutputFormat; public class ParquetOutputData extends BaseStepData implements StepDataInterface { public IPentahoParquetOutputFormat output; public IPentahoRecordWriter writer; } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/output/ParquetOutputDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.output; import org.apache.commons.lang.StringUtils; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableItem; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.big.data.kettle.plugins.formats.impl.NullableValuesEnum; import org.pentaho.big.data.kettle.plugins.formats.impl.parquet.BaseParquetStepDialog; import org.pentaho.big.data.kettle.plugins.formats.parquet.ParquetTypeConverter; import org.pentaho.big.data.kettle.plugins.formats.parquet.output.ParquetOutputField; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.events.dialog.SelectionOperation; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.ColumnsResizer; import org.pentaho.di.ui.core.widget.ComboVar; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.step.TableItemInsertListener; import org.pentaho.hadoop.shim.api.format.ParquetSpec; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.function.BiConsumer; @PluginDialog( id = "ParquetOutput", image = "PO.svg", pluginType = PluginDialog.PluginType.STEP, documentationUrl = "pdi-transformation-steps-reference-overview/parquet-output" ) public class ParquetOutputDialog extends BaseParquetStepDialog implements StepDialogInterface { public static final Class PKG = ParquetOutputMeta.class; public static final int PARQUET_OUTPUT_FIELD_TINY = Const.isLinux() ? FIELD_TINY + 20 : FIELD_TINY; public static final int COLUMNS_SEP = 5 * MARGIN; public static final int OFFSET = 16; public static final ParquetSpec.DataType[] SUPPORTED_PARQUET_TYPES = { ParquetSpec.DataType.BINARY, ParquetSpec.DataType.BOOLEAN, ParquetSpec.DataType.DATE, ParquetSpec.DataType.DECIMAL, ParquetSpec.DataType.DOUBLE, ParquetSpec.DataType.FLOAT, ParquetSpec.DataType.INT_32, ParquetSpec.DataType.INT_64, ParquetSpec.DataType.INT_96, ParquetSpec.DataType.TIMESTAMP_MILLIS, ParquetSpec.DataType.UTF8 }; private static final int SHELL_WIDTH = 698; private static final int SHELL_HEIGHT = 620; private TableView wOutputFields; private Button wOverwriteExistingFile; private ComboVar wCompression; private ComboVar wVersion; private TextVar wRowSize; private TextVar wPageSize; private TextVar wExtension; private TextVar wDictPageSize; private Label lDict; private Button wDictionaryEncoding; private Button wIncludeDateInFilename; private Button wIncludeTimeInFilename; private Button wSpecifyDateTimeFormat; private ComboVar wDateTimeFormat; public ParquetOutputDialog( Shell parent, Object parquetOutputMeta, TransMeta transMeta, String sname ) { this( parent, (ParquetOutputMeta) parquetOutputMeta, transMeta, sname ); } public ParquetOutputDialog( Shell parent, ParquetOutputMeta parquetOutputMeta, TransMeta transMeta, String sname ) { super( parent, parquetOutputMeta, transMeta, sname ); this.meta = parquetOutputMeta; } public static ComboVar createComboVar( TransMeta transMeta, ModifyListener lsMod, Composite container, String[] options ) { ComboVar combo = new ComboVar( transMeta, container, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); combo.setItems( options ); combo.addModifyListener( lsMod ); return combo; } public static Label createLabel( Composite container, String labelRef, PropsUI props ) { Label label = new Label( container, SWT.NONE ); label.setText( BaseMessages.getString( PKG, labelRef ) ); props.setLook( label ); return label; } public static MessageDialog getFieldsChoiceDialog( Shell shell, int newFields ) { MessageDialog messageDialog = new MessageDialog( shell, BaseMessages.getString( PKG, "ParquetOutput.GetFieldsChoice.Title" ), // "Warning!" null, BaseMessages.getString( PKG, "ParquetOutput.GetFieldsChoice.Message", "" + newFields ), MessageDialog.WARNING, new String[] { BaseMessages.getString( PKG, "ParquetOutput.GetFieldsChoice.AddNew" ), BaseMessages.getString( PKG, "ParquetOutput.GetFieldsChoice.Add" ), BaseMessages.getString( PKG, "ParquetOutput.GetFieldsChoice.ClearAndAdd" ), BaseMessages.getString( PKG, "ParquetOutput.GetFieldsChoice.Cancel" ), }, 0 ) { @Override public void create() { super.create(); getShell().setBackground( GUIResource.getInstance().getColorWhite() ); } @Override protected Control createMessageArea( Composite composite ) { Control control = super.createMessageArea( composite ); imageLabel.setBackground( GUIResource.getInstance().getColorWhite() ); messageLabel.setBackground( GUIResource.getInstance().getColorWhite() ); return control; } @Override protected Control createDialogArea( Composite parent ) { Control control = super.createDialogArea( parent ); control.setBackground( GUIResource.getInstance().getColorWhite() ); return control; } @Override protected Control createButtonBar( Composite parent ) { Control control = super.createButtonBar( parent ); control.setBackground( GUIResource.getInstance().getColorWhite() ); return control; } }; org.eclipse.jface.window.Window.setDefaultImage( GUIResource.getInstance().getImageSpoon() ); return messageDialog; } protected void createUI() { Control prev = createHeader(); //main fields prev = addFileWidgets( prev ); createFooter( shell ); Composite afterFile = new Composite( shell, SWT.NONE ); afterFile.setLayout( new FormLayout() ); Label separator = new Label( shell, SWT.HORIZONTAL | SWT.SEPARATOR ); FormData fdSpacer = new FormData(); fdSpacer.height = 2; fdSpacer.left = new FormAttachment( 0, 0 ); fdSpacer.bottom = new FormAttachment( wCancel, -MARGIN ); fdSpacer.right = new FormAttachment( 100, 0 ); separator.setLayoutData( fdSpacer ); new FD( afterFile ).left( 0, 0 ).top( prev, 0 ).right( 100, 0 ).bottom( separator, -MARGIN ).apply(); createAfterFile( afterFile ); } protected Control createAfterFile( Composite afterFile ) { wOverwriteExistingFile = new Button( afterFile, SWT.CHECK ); wOverwriteExistingFile.setText( BaseMessages.getString( PKG, "ParquetOutputDialog.OverwriteFile.Label" ) ); props.setLook( wOverwriteExistingFile ); new FD( wOverwriteExistingFile ).left( 0, 0 ).top( afterFile, FIELDS_SEP ).apply(); wOverwriteExistingFile.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { meta.setChanged(); } } ); CTabFolder wTabFolder = new CTabFolder( afterFile, SWT.BORDER ); props.setLook( wTabFolder, Props.WIDGET_STYLE_TAB ); wTabFolder.setSimple( false ); addFieldsTab( wTabFolder ); addOptionsTab( wTabFolder ); new FD( wTabFolder ).left( 0, 0 ).top( wOverwriteExistingFile, MARGIN ).right( 100, 0 ).bottom( 100, 0 ).apply(); wTabFolder.setSelection( 0 ); return wTabFolder; } @Override protected String getStepTitle() { return BaseMessages.getString( PKG, "ParquetOutputDialog.Shell.Title" ); } private void addFieldsTab( CTabFolder wTabFolder ) { CTabItem wTab = new CTabItem( wTabFolder, SWT.NONE ); wTab.setText( BaseMessages.getString( PKG, "ParquetOutputDialog.FieldsTab.TabTitle" ) ); Composite wComp = new Composite( wTabFolder, SWT.NONE ); props.setLook( wComp ); FormLayout layout = new FormLayout(); layout.marginWidth = MARGIN; layout.marginHeight = MARGIN; wComp.setLayout( layout ); lsGet = e -> getFields(); Button wGetFields = new Button( wComp, SWT.PUSH ); wGetFields.setText( BaseMessages.getString( PKG, "ParquetOutputDialog.Fields.Get" ) ); props.setLook( wGetFields ); new FD( wGetFields ).bottom( 100, 0 ).right( 100, 0 ).apply(); wGetFields.addListener( SWT.Selection, lsGet ); String[] typeNames = new String[ SUPPORTED_PARQUET_TYPES.length ]; for ( int i = 0; i < typeNames.length; i++ ) { typeNames[ i ] = SUPPORTED_PARQUET_TYPES[ i ].getName(); } ColumnInfo[] parameterColumns = new ColumnInfo[] { new ColumnInfo( BaseMessages.getString( PKG, "ParquetOutputDialog.Fields.column.Path" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ), new ColumnInfo( BaseMessages.getString( PKG, "ParquetOutputDialog.Fields.column.Name" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ), new ColumnInfo( BaseMessages.getString( PKG, "ParquetOutputDialog.Fields.column.Type" ), ColumnInfo.COLUMN_TYPE_CCOMBO, typeNames, false ), new ColumnInfo( BaseMessages.getString( PKG, "ParquetOutputDialog.Fields.column.Precision" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ), new ColumnInfo( BaseMessages.getString( PKG, "ParquetOutputDialog.Fields.column.Scale" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ), new ColumnInfo( BaseMessages.getString( PKG, "ParquetOutputDialog.Fields.column.Default" ), ColumnInfo.COLUMN_TYPE_TEXT, false, false ), new ColumnInfo( BaseMessages.getString( PKG, "ParquetOutputDialog.Fields.column.Null" ), ColumnInfo.COLUMN_TYPE_CCOMBO, NullableValuesEnum.getValuesArr(), true ) }; parameterColumns[ 0 ].setAutoResize( false ); parameterColumns[ 1 ].setUsingVariables( true ); wOutputFields = new TableView( transMeta, wComp, SWT.FULL_SELECTION | SWT.SINGLE | SWT.BORDER | SWT.NO_SCROLL | SWT.V_SCROLL, parameterColumns, 7, lsMod, props ); ColumnsResizer resizer = new ColumnsResizer( 0, 30, 20, 10, 10, 10, 15, 5 ); wOutputFields.getTable().addListener( SWT.Resize, resizer ); props.setLook( wOutputFields ); new FD( wOutputFields ).left( 0, 0 ).right( 100, 0 ).top( wComp, 0 ).bottom( wGetFields, -FIELDS_SEP ).apply(); wOutputFields.setRowNums(); wOutputFields.optWidth( true ); new FD( wComp ).left( 0, 0 ).top( 0, 0 ).right( 100, 0 ).bottom( 100, 0 ).apply(); wTab.setControl( wComp ); for ( ColumnInfo col : parameterColumns ) { col.setAutoResize( false ); } resizer.addColumnResizeListeners( wOutputFields.getTable() ); setTruncatedColumn( wOutputFields.getTable(), 1 ); if ( !Const.isWindows() ) { addColumnTooltip( wOutputFields.getTable(), 1 ); } } private void addOptionsTab( CTabFolder wTabFolder ) { CTabItem wTab = new CTabItem( wTabFolder, SWT.NONE ); wTab.setText( BaseMessages.getString( PKG, "ParquetOutputDialog.Options.TabTitle" ) ); Composite wComp = new Composite( wTabFolder, SWT.NONE ); wTab.setControl( wComp ); props.setLook( wComp ); FormLayout formLayout = new FormLayout(); formLayout.marginHeight = formLayout.marginWidth = MARGIN; wComp.setLayout( formLayout ); Label lCompression = createLabel( wComp, "ParquetOutputDialog.Options.Compression", props ); new FD( lCompression ).left( 0, 0 ).top( wComp, 0 ).apply(); wCompression = createComboVar( transMeta, lsMod, wComp, meta.getCompressionTypes() ); new FD( wCompression ).left( 0, 0 ).top( lCompression, FIELD_LABEL_SEP ) .width( PARQUET_OUTPUT_FIELD_TINY + VAR_EXTRA_WIDTH ).apply(); Label lVersion = createLabel( wComp, "ParquetOutputDialog.Options.Version", props ); new FD( lVersion ).left( 0, 0 ).top( wCompression, FIELDS_SEP ).apply(); wVersion = createComboVar( transMeta, lsMod, wComp, meta.getVersionTypes() ); new FD( wVersion ).left( 0, 0 ).top( lVersion, FIELD_LABEL_SEP ) .width( PARQUET_OUTPUT_FIELD_TINY + VAR_EXTRA_WIDTH ).apply(); Label lRowSize = createLabel( wComp, "ParquetOutputDialog.Options.RowSize", props ); new FD( lRowSize ).left( 0, 0 ).top( wVersion, FIELDS_SEP ).apply(); wRowSize = new TextVar( transMeta, wComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); new FD( wRowSize ).left( 0, 0 ).top( lRowSize, FIELD_LABEL_SEP ) .width( PARQUET_OUTPUT_FIELD_TINY + VAR_EXTRA_WIDTH ).apply(); setIntegerOnly( wRowSize ); wRowSize.addModifyListener( lsMod ); Label lDataPageSize = createLabel( wComp, "ParquetOutputDialog.Options.PageSize", props ); new FD( lDataPageSize ).left( 0, 0 ).top( wRowSize, FIELDS_SEP ).apply(); wPageSize = new TextVar( transMeta, wComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); new FD( wPageSize ).left( 0, 0 ).top( lDataPageSize, FIELD_LABEL_SEP ) .width( PARQUET_OUTPUT_FIELD_TINY + VAR_EXTRA_WIDTH ).apply(); setIntegerOnly( wPageSize ); wPageSize.addModifyListener( lsMod ); wDictionaryEncoding = new Button( wComp, SWT.CHECK ); wDictionaryEncoding.setText( BaseMessages.getString( PKG, "ParquetOutputDialog.Options.DictionaryEncoding" ) ); props.setLook( wDictionaryEncoding ); new FD( wDictionaryEncoding ).left( 0, 0 ).top( wPageSize, FIELDS_SEP ).apply(); wDictionaryEncoding.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { meta.setChanged(); actualizeDictionaryPageSizeControl(); } } ); lDict = new Label( wComp, SWT.NONE ); lDict.setText( BaseMessages.getString( PKG, "ParquetOutputDialog.Options.DictPageSize" ) ); new FD( lDict ).left( 0, OFFSET ).top( wDictionaryEncoding, FIELD_LABEL_SEP ).apply(); wDictPageSize = new TextVar( transMeta, wComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); new FD( wDictPageSize ).left( 0, OFFSET ).top( lDict, FIELD_LABEL_SEP ) .width( PARQUET_OUTPUT_FIELD_TINY + VAR_EXTRA_WIDTH - OFFSET ).apply(); setIntegerOnly( wDictPageSize ); wDictPageSize.addModifyListener( lsMod ); Control leftRef = wCompression; // 2nd column Label lExtension = new Label( wComp, SWT.NONE ); lExtension.setText( BaseMessages.getString( PKG, "ParquetOutputDialog.Options.Extension" ) ); new FD( lExtension ).left( leftRef, COLUMNS_SEP ).top( wComp, 0 ).apply(); wExtension = new TextVar( transMeta, wComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); new FD( wExtension ).left( leftRef, COLUMNS_SEP ).top( lExtension, FIELD_LABEL_SEP ) .width( PARQUET_OUTPUT_FIELD_TINY + VAR_EXTRA_WIDTH ).apply(); wExtension.addModifyListener( lsMod ); wIncludeDateInFilename = new Button( wComp, SWT.CHECK ); wIncludeDateInFilename .setText( BaseMessages.getString( PKG, "ParquetOutputDialog.Options.IncludeDateInFilename" ) ); props.setLook( wIncludeDateInFilename ); new FD( wIncludeDateInFilename ).left( leftRef, COLUMNS_SEP ).top( wExtension, MARGIN ).apply(); wIncludeDateInFilename.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { meta.setChanged(); } } ); wIncludeTimeInFilename = new Button( wComp, SWT.CHECK ); wIncludeTimeInFilename .setText( BaseMessages.getString( PKG, "ParquetOutputDialog.Options.IncludeTimeInFilename" ) ); props.setLook( wIncludeTimeInFilename ); new FD( wIncludeTimeInFilename ).left( leftRef, COLUMNS_SEP ).top( wIncludeDateInFilename, FIELDS_SEP ).apply(); wIncludeTimeInFilename.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { meta.setChanged(); } } ); wSpecifyDateTimeFormat = new Button( wComp, SWT.CHECK ); wSpecifyDateTimeFormat .setText( BaseMessages.getString( PKG, "ParquetOutputDialog.Options.SpecifyDateTimeFormat" ) ); props.setLook( wSpecifyDateTimeFormat ); new FD( wSpecifyDateTimeFormat ).left( leftRef, COLUMNS_SEP ).top( wIncludeTimeInFilename, FIELDS_SEP ).apply(); wSpecifyDateTimeFormat.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { meta.setChanged(); wDateTimeFormat.setEnabled( wSpecifyDateTimeFormat.getSelection() ); actualizeDateTimeControls(); } } ); String[] dates = Const.getDateFormats(); dates = Arrays.stream( dates ).filter( d -> d.indexOf( '/' ) < 0 && d.indexOf( '\\' ) < 0 && d.indexOf( ':' ) < 0 ) .toArray( String[]::new ); // remove formats with slashes and colons wDateTimeFormat = createComboVar( transMeta, lsMod, wComp, dates ); props.setLook( wDateTimeFormat ); new FD( wDateTimeFormat ).left( leftRef, COLUMNS_SEP + OFFSET ).top( wSpecifyDateTimeFormat, FIELD_LABEL_SEP ) .width( 200 ).apply(); wDateTimeFormat.addModifyListener( lsMod ); } void actualizeDictionaryPageSizeControl() { boolean dictionaryEncoding = wDictionaryEncoding.getSelection(); lDict.setEnabled( dictionaryEncoding ); wDictPageSize.setEnabled( dictionaryEncoding ); } void actualizeDateTimeControls() { boolean allowedToIncludeDateTime = !wSpecifyDateTimeFormat.getSelection(); wIncludeDateInFilename.setEnabled( allowedToIncludeDateTime ); wIncludeTimeInFilename.setEnabled( allowedToIncludeDateTime ); if ( !allowedToIncludeDateTime ) { wIncludeDateInFilename.setSelection( false ); wIncludeTimeInFilename.setSelection( false ); } } protected String getComboVarValue( ComboVar combo ) { String text = combo.getText(); String data = (String) combo.getData( text ); return data != null ? data : text; } /** * Read the data from the meta object and show it in this dialog. */ protected void getData( ParquetOutputMeta meta ) { if ( meta.getFilename() != null ) { wPath.setText( meta.getFilename() ); } wOverwriteExistingFile.setSelection( meta.isOverrideOutput() ); populateFieldsUI( meta, wOutputFields ); wCompression.setText( coalesce( meta.getCompressionType() ) ); wVersion.setText( coalesce( meta.getParquetVersion() ) ); wDictionaryEncoding.setSelection( meta.isEnableDictionary() ); wDictPageSize.setText( coalesce( meta.getDictPageSize() ) ); wRowSize.setText( coalesce( meta.getRowGroupSize() ) ); wPageSize.setText( coalesce( meta.getDataPageSize() ) ); wExtension.setText( coalesce( meta.getExtension() ) ); wIncludeDateInFilename.setSelection( meta.isDateInFilename() ); wIncludeTimeInFilename.setSelection( meta.isTimeInFilename() ); String dateTimeFormat = coalesce( meta.getDateTimeFormat() ); if ( !dateTimeFormat.isEmpty() ) { wSpecifyDateTimeFormat.setSelection( true ); wDateTimeFormat.setText( dateTimeFormat ); } else { wSpecifyDateTimeFormat.setSelection( false ); wDateTimeFormat.setEnabled( false ); } actualizeDictionaryPageSizeControl(); actualizeDateTimeControls(); } public static String coalesce( String value ) { return value == null ? "" : value; } // ui -> meta @Override protected void getInfo( ParquetOutputMeta meta, boolean preview ) { meta.setFilename( wPath.getText() ); meta.setOverrideOutput( wOverwriteExistingFile.getSelection() ); saveOutputFields( wOutputFields, meta ); meta.setCompressionType( wCompression.getText() ); meta.setParquetVersion( wVersion.getText() ); meta.setEnableDictionary( wDictionaryEncoding.getSelection() ); meta.setDictPageSize( wDictPageSize.getText() ); meta.setRowGroupSize( wRowSize.getText() ); meta.setDataPageSize( wPageSize.getText() ); meta.setExtension( wExtension.getText() ); if ( wSpecifyDateTimeFormat.getSelection() ) { meta.setDateTimeFormat( wDateTimeFormat.getText() ); meta.setDateInFilename( false ); meta.setTimeInFilename( false ); } else { meta.setDateTimeFormat( null ); meta.setDateInFilename( wIncludeDateInFilename.getSelection() ); meta.setTimeInFilename( wIncludeTimeInFilename.getSelection() ); } } private void saveOutputFields( TableView wFields, ParquetOutputMeta meta ) { int nrFields = wFields.nrNonEmpty(); List outputFields = new ArrayList<>(); for ( int i = 0; i < nrFields; i++ ) { TableItem item = wFields.getNonEmpty( i ); int j = 1; ParquetOutputField field = new ParquetOutputField(); field.setFormatFieldName( item.getText( j++ ) ); field.setPentahoFieldName( item.getText( j++ ) ); String typeName = item.getText( j++ ); for ( ParquetSpec.DataType parqueType : SUPPORTED_PARQUET_TYPES ) { if ( parqueType.getName().equals( typeName ) ) { field.setFormatType( parqueType.getId() ); } } if ( field.getParquetType().equals( ParquetSpec.DataType.DECIMAL ) ) { field.setPrecision( item.getText( j++ ) ); field.setScale( item.getText( j++ ) ); } else if ( field.getParquetType().equals( ParquetSpec.DataType.FLOAT ) || field.getParquetType() .equals( ParquetSpec.DataType.DOUBLE ) ) { j++; field.setScale( item.getText( j++ ) ); } else { j += 2; } field.setDefaultValue( item.getText( j++ ) ); field.setAllowNull( NullableValuesEnum.YES.getValue().equals( item.getText( j ) ) ); outputFields.add( field ); } meta.setOutputFields( outputFields ); } private void populateFieldsUI( ParquetOutputMeta meta, TableView wOutputFields ) { populateFieldsUI( meta.getOutputFields(), wOutputFields, ( field, item ) -> { int i = 1; item.setText( i++, coalesce( field.getFormatFieldName() ) ); item.setText( i++, coalesce( field.getPentahoFieldName() ) ); item.setText( i++, coalesce( field.getParquetType().getName() ) ); if ( field.getParquetType().equals( ParquetSpec.DataType.DECIMAL ) ) { item.setText( i++, coalesce( String.valueOf( field.getPrecision() ) ) ); item.setText( i++, coalesce( String.valueOf( field.getScale() ) ) ); } else if ( field.getParquetType().equals( ParquetSpec.DataType.FLOAT ) || field.getParquetType() .equals( ParquetSpec.DataType.DOUBLE ) ) { item.setText( i++, "" ); item.setText( i++, field.getScale() > 0 ? String.valueOf( field.getScale() ) : "" ); } else { item.setText( i++, "" ); item.setText( i++, "" ); } item.setText( i++, coalesce( field.getDefaultValue() ) ); item.setText( i++, field.getAllowNull() ? NullableValuesEnum.YES.getValue() : NullableValuesEnum.NO.getValue() ); } ); } private void populateFieldsUI( List fields, TableView wFields, BiConsumer converter ) { for ( int i = 0; i < fields.size(); i++ ) { TableItem item = null; if ( i < wFields.table.getItemCount() ) { item = wFields.table.getItem( i ); } else { item = new TableItem( wFields.table, SWT.NONE ); } converter.accept( fields.get( i ), item ); } } private void getFieldsFromPreviousStep( RowMetaInterface row, TableView tableView, int keyColumn, int[] nameColumn, int[] dataTypeColumn, int lengthColumn, int precisionColumn, boolean optimizeWidth, TableItemInsertListener listener ) { if ( row == null || row.size() == 0 ) { return; // nothing to do } Table table = tableView.table; // get a list of all the non-empty keys (names) // List keys = new ArrayList<>(); for ( int i = 0; i < table.getItemCount(); i++ ) { TableItem tableItem = table.getItem( i ); String key = tableItem.getText( keyColumn ); if ( !Utils.isEmpty( key ) && keys.indexOf( key ) < 0 ) { keys.add( key ); } } int choice = 0; if ( !keys.isEmpty() ) { // Ask what we should do with the existing data in the step. // MessageDialog getFieldsChoiceDialog = getFieldsChoiceDialog( tableView.getShell(), row.size() ); int idx = getFieldsChoiceDialog.open(); choice = idx & 0xFF; } if ( choice == 3 || choice == 255 ) { return; // Cancel clicked } if ( choice == 2 ) { tableView.clearAll( false ); } for ( int i = 0; i < row.size(); i++ ) { ValueMetaInterface v = row.getValueMeta( i ); boolean add = true; // hang on, see if it's not yet in the table view if ( choice == 0 && keys.indexOf( v.getName() ) >= 0 ) { add = false; } if ( add ) { TableItem tableItem = new TableItem( table, SWT.NONE ); for ( int c = 0; c < nameColumn.length; c++ ) { tableItem.setText( nameColumn[ c ], Const.NVL( v.getName(), "" ) ); } String parquetTypeName = ParquetTypeConverter.convertToParquetType( v.getType() ); if ( dataTypeColumn != null ) { for ( int c = 0; c < dataTypeColumn.length; c++ ) { tableItem.setText( dataTypeColumn[ c ], parquetTypeName ); } } if ( parquetTypeName.equals( ParquetSpec.DataType.DECIMAL.getName() ) ) { if ( lengthColumn > 0 && v.getLength() > 0 ) { tableItem.setText( lengthColumn, Integer.toString( v.getLength() ) ); } else { // Set the default precision tableItem.setText( lengthColumn, Integer.toString( ParquetSpec.DEFAULT_DECIMAL_PRECISION ) ); } if ( precisionColumn > 0 && v.getPrecision() >= 0 ) { tableItem.setText( precisionColumn, Integer.toString( v.getPrecision() ) ); } else { // Set the default scale tableItem.setText( precisionColumn, Integer.toString( ParquetSpec.DEFAULT_DECIMAL_SCALE ) ); } } else if ( parquetTypeName.equals( ParquetSpec.DataType.FLOAT.getName() ) || parquetTypeName.equals( ParquetSpec.DataType.DOUBLE.getName() ) && ( precisionColumn > 0 && v.getPrecision() > 0 ) ) { tableItem.setText( precisionColumn, Integer.toString( v.getPrecision() ) ); } if ( listener != null && !listener.tableItemInserted( tableItem, v ) ) { tableItem.dispose(); // remove it again } } } tableView.removeEmptyRows(); tableView.setRowNums(); if ( optimizeWidth ) { tableView.optWidth( true ); } } protected void getFields() { try { RowMetaInterface r = transMeta.getPrevStepFields( stepname ); if ( r != null ) { TableItemInsertListener listener = ( tableItem, v ) -> true; getFieldsFromPreviousStep( r, wOutputFields, 1, new int[] { 1, 2 }, new int[] { 3 }, 4, 5, true, listener ); // fix empty null fields to nullable for ( int i = 0; i < wOutputFields.table.getItemCount(); i++ ) { TableItem tableItem = wOutputFields.table.getItem( i ); if ( StringUtils.isEmpty( tableItem.getText( 7 ) ) ) { tableItem.setText( 7, "Yes" ); } } meta.setChanged(); } } catch ( KettleException ke ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.GetFieldsFailed.Title" ), BaseMessages .getString( PKG, "System.Dialog.GetFieldsFailed.Message" ), ke ); } } @Override protected int getWidth() { return SHELL_WIDTH; } @Override protected int getHeight() { return SHELL_HEIGHT; } @Override protected Listener getPreview() { // no preview return null; } @Override protected SelectionOperation selectionOperation() { return SelectionOperation.SAVE_TO_FILE_FOLDER; } } ================================================ FILE: kettle-plugins/formats/core/src/main/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/output/ParquetOutputMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.output; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.big.data.kettle.plugins.formats.parquet.output.ParquetOutputMetaBase; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; @Step( id = "ParquetOutput", image = "PO.svg", name = "ParquetOutput.Name", description = "ParquetOutput.Description", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", i18nPackageName = "org.pentaho.di.trans.steps.parquet" ) @InjectionSupported( localizationPrefix = "ParquetOutput.Injection.", groups = { "FILENAME_LINES", "FIELDS" }, hide = { "FIELD_POSITION", "FIELD_LENGTH", "FIELD_IGNORE", "FIELD_FORMAT", "FIELD_PRECISION", "FIELD_CURRENCY", "FIELD_DECIMAL", "FIELD_GROUP", "FIELD_REPEAT", "FIELD_TRIM_TYPE", "FIELD_NULL_STRING" } ) public class ParquetOutputMeta extends ParquetOutputMetaBase { private final NamedClusterResolver namedClusterResolver; public ParquetOutputMeta() { this( NamedClusterResolver.getInstance() ); } public ParquetOutputMeta( NamedClusterResolver namedClusterResolver ) { this.namedClusterResolver = namedClusterResolver; } @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return new ParquetOutput( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } @Override public StepDataInterface getStepData() { return new ParquetOutputData(); } public NamedClusterResolver getNamedClusterResolver() { return namedClusterResolver; } } ================================================ FILE: kettle-plugins/formats/core/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: kettle-plugins/formats/core/src/main/resources/org/pentaho/big/data/kettle/plugins/formats/impl/orc/input/messages/messages_en_US.properties ================================================ OrcInput.Name=ORC input OrcInput.Description=Reads data from ORC file OrcInputDialog.StepName.Label=Step name OrcInputDialog.Shell.Title=ORC input OrcInputDialog.Fields.Label=Fields OrcInputDialog.Fields.column.Name=Name OrcInputDialog.Fields.column.Path=ORC path (ORC type) OrcInputDialog.Fields.column.Type=Type OrcInputDialog.Fields.column.SourceType=Source Type OrcInputDialog.Fields.column.Format=Format OrcInputDialog.Fields.Get=Get fields OrcInputDialog.PassThruFields.Tooltip=Enable this if you have other fields in the previous step\nand you want those fields to appear in every record OrcInputDialog.PassThruFields.Label=Pass through fields from previous step OrcInputDialog.FileBrowser.KettleFileException=Kettle File Exception OrcInputDialog.FileBrowser.FileSystemException=File System Exception OrcInputDialog.PreviewSize.DialogTitle=Preview size OrcInputDialog.PreviewSize.DialogMessage=Enter the number of rows to preview OrcInput.Error.UnableToLoadSchemaFromContainerFile=Unable to find schema OrcInput.Injection.FILENAME=The name of the ORC file to use as input. OrcInput.Injection.FIELD_NAME=The name of the field to output to the Kettle stream. OrcInput.Injection.FIELD_PATH=The column name in the ORC file. OrcInput.Injection.FIELD_TYPE=The Kettle field type. OrcInput.Injection.ORC_TYPE=The ORC type for the field. OrcInput.Injection.FIELD_IF_NULL=Specify whether the incoming field will contain null values. If no, then the default value will be used. OrcInput.Injection.FIELD_NULL_STRING=This option will skip errors when specified paths or fields are not present in the active ORC schema. OrcInput.Injection.FIELDS=Fields. OrcInput.Injection.FILENAME_LINES=The list of file definitions. ================================================ FILE: kettle-plugins/formats/core/src/main/resources/org/pentaho/big/data/kettle/plugins/formats/impl/orc/messages/messages_en_US.properties ================================================ BaseStepDialog.StepName=Step name: BaseStepDialog.Preview=Preview OrcDialog.Location.Label=Location: OrcDialog.Filename.Label=Folder/File name: #ToDo OrcDialog.FileBrowser.KettleFileException= OrcDialog.FileBrowser.FileSystemException= OrcDialog.SchemaFileBrowser.KettleFileException= OrcDialog.SchemaFileBrowser.FileSystemException= ================================================ FILE: kettle-plugins/formats/core/src/main/resources/org/pentaho/big/data/kettle/plugins/formats/impl/orc/output/messages/messages_en_US.properties ================================================ OrcOutput.Name=ORC output OrcOutput.Description=Writes data to an Orc file according to a mapping OrcOutputDialog.Shell.Title=ORC output OrcOutputDialog.OverwriteFile.Label=Overwrite existing output file OrcOutputDialog.FieldsTab.TabTitle=Fields OrcOutputDialog.Fields.column.Name=Name OrcOutputDialog.Fields.column.Path=ORC path OrcOutputDialog.Fields.column.Type=ORC type OrcOutputDialog.Fields.column.Precision=Precision OrcOutputDialog.Fields.column.Scale=Scale OrcOutputDialog.Fields.column.Default=Default value OrcOutputDialog.Fields.column.Null=Null OrcOutputDialog.Fields.Get=Get fields OrcOutputDialog.Options.TabTitle=Options OrcOutputDialog.Options.Compression=Compression: OrcOutputDialog.Options.StripeSize=Stripe size (MB): OrcOutputDialog.Options.CompressSize=Compress size (KB): OrcOutputDialog.Options.InlineIndexes=Inline Indexes OrcOutputDialog.Options.RowsBetweenEntries=Rows between entries: OrcOutputDialog.Options.DateInFileName=Include date in file name OrcOutputDialog.Options.TimeInFileName=Include time in file name OrcOutputDialog.Options.SpecifyDateTimeFormat=Specify date time format OrcOutputDialog.AddNew=Add &new OrcOutputDialog.Add=Add &all OrcOutputDialog.ClearAndAdd=C&lear and add all OrcOutputDialog.Cancel=&Cancel OrcOutputDialog.GetFieldsChoice.Title=Question OrcOutputDialog.GetFieldsChoice.Message=There already is data entered, {0} lines were found.\nHow do you want to add the {1} field that were found? OrcOutput.CompressionType.NONE=None OrcOutput.MissingDefaultFields.Title=Missing values OrcOutput.MissingDefaultFields.Msg=One or more fields are missing a value. Please set a default value for all fields that have ''Null'' set to ''No''. OrcOutput.Injection.OPTIONS_COMPRESSION=This option will let you specify the type of compression to use on the file output. OrcOutput.Injection.OPTIONS_STRIPE_SIZE=This option defines the file stripe size. OrcOutput.Injection.OPTIONS_COMPRESS_SIZE=This option defines the file compression. OrcOutput.Injection.OPTIONS_ROWS_BETWEEN_ENTRIES=This options defines the number of rows between entries. OrcOutput.Injection.OPTIONS_DATE_IN_FILE_NAME=This defines whether to include the current date in the output file/directory name. OrcOutput.Injection.OPTIONS_TIME_IN_FILE_NAME=This defines whether to include the current time in the output file/directory name. OrcOutput.Injection.OPTIONS_DATE_FORMAT=This option defines the format of the output date format. OrcOutput.Injection.OVERRIDE_OUTPUT=Enable this option to overwrite the existing output file(s). OrcOutput.Injection.FILENAME=The name of the folder/file to write to. OrcOutput.Injection.FIELD_PATH=The path to the field in the Orc file. OrcOutput.Injection.FIELD_NAME=The name of the output field. OrcOutput.Injection.FIELD_TYPE=The Kettle field type. OrcOutput.Injection.FIELD_IF_NULL=The default value to use in case the incoming field value is null. OrcOutput.Injection.FIELD_NULLABLE=Specify whether the incoming field will contain null values. If no, then the default value will be used. OrcOutput.Injection.FIELD_NULL_STRING=Deprecated: Replaced by FIELD_NULLABLE. OrcOutput.Injection.FIELD_DECIMAL_PRECISION=Maximum number of digits allowed in the number. (only applies to numbers stored as decimal type) OrcOutput.Injection.FIELD_DECIMAL_SCALE=Maximum number of digits after the decimal point. (only applies to numbers stored as decimal type) OrcOutput.Injection.FIELD_POSITION=Position OrcOutput.Injection.FIELD_LENGTH=Length OrcOutput.Injection.FIELD_IGNORE=Ignore? (Y/N) OrcOutput.Injection.FIELD_FORMAT=Format OrcOutput.Injection.FIELD_PRECISION=Precision OrcOutput.Injection.FIELD_CURRENCY=Currency symbol OrcOutput.Injection.FIELD_DECIMAL=Decimal symbol OrcOutput.Injection.FIELD_GROUP=Grouping symbol OrcOutput.Injection.FIELD_REPEAT=Repeat values? (Y/N) OrcOutput.Injection.FIELD_TRIM_TYPE=Trim Type OrcOutput.Injection.FIELDS= ================================================ FILE: kettle-plugins/formats/core/src/main/resources/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/input/messages/messages_en_US.properties ================================================ ParquetInput.Name=Parquet input ParquetInput.Description=Reads data from a Parquet file. ParquetInputDialog.Shell.Title=Parquet input ParquetInputDialog.StepName.Label=Step name ParquetInputDialog.FileTab.TabTitle=Source ParquetInputDialog.FieldsTab.TabTitle=Input ParquetInputDialog.PassThruFields.Tooltip=Enable this if you have other fields in the previous step\nand you want those fields to appear in every record ParquetInputDialog.PassThruFields.Label=Pass through fields from previous step ParquetInputDialog.IgnoreEmptyFolder.Tooltip=Enable this if you wish transformation to keep running even if the target folder is empty. ParquetInputDialog.IgnoreEmptyFolder.Label=Ignore empty folder ParquetInputDialog.Fields.Label=Fields: ParquetInputDialog.Fields.Get=Get Fields ParquetInputDialog.Fields.column.Name=Name ParquetInputDialog.Fields.column.Path=Path ParquetInputDialog.Fields.column.Type=Type ParquetInputDialog.Fields.column.Format=Format ParquetInputDialog.FileBrowser.KettleFileException=Kettle File Exception ParquetInputDialog.FileBrowser.FileSystemException=File System Exception ParquetInputDialog.PreviewSize.DialogTitle=Enter preview size ParquetInputDialog.PreviewSize.DialogMessage=Enter the number of rows you would like to preview\: ParquetInput.Injection.FILENAME_LINES=The list of file definitions. ParquetInput.Injection.FILENAME=The name of the folder/file where the Parquet data comes from. ParquetInput.Injection.FIELDS=Fields. ParquetInput.Injection.FIELD_PATH=The path to the field in the Parquet file. ParquetInput.Injection.FIELD_NAME=The name of the field to output to the Kettle stream. ParquetInput.Injection.FIELD_TYPE=The Kettle field type. ParquetInput.Injection.IGNORE_EMPTY_FOLDER=Enable this if you wish transformation to keep running even if the target folder is empty. ParquetInput.Injection.PARQUET_TYPE=The Parquet type for the field. ParquetInput.GetFieldsChoice.Title=New fields were found ParquetInput.GetFieldsChoice.Message=We found {0} new fields. What would you like to do with the new fields? ParquetInput.GetFieldsChoice.AddNew=Add &new fields ParquetInput.GetFieldsChoice.Add=Add &all fields ParquetInput.GetFieldsChoice.ClearAndAdd=C&lear and add all ParquetInput.GetFieldsChoice.Cancel=&Cancel ================================================ FILE: kettle-plugins/formats/core/src/main/resources/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/messages/messages_en_US.properties ================================================ BaseStepDialog.StepName=Step name: BaseStepDialog.Preview=Preview ParquetDialog.Location.Label=Location: ParquetDialog.Filename.Label=Folder/File name: #ToDo ParquetDialog.FileBrowser.KettleFileException= ParquetDialog.FileBrowser.FileSystemException= ================================================ FILE: kettle-plugins/formats/core/src/main/resources/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/output/messages/messages_en_US.properties ================================================ ParquetOutput.Name=Parquet output ParquetOutput.Description=Writes data to a Parquet file according to a mapping. ParquetOutputDialog.OverwriteFile.Label=Overwrite existing output file ParquetOutputDialog.Shell.Title=Parquet output ParquetOutputDialog.FieldsTab.TabTitle=Fields ParquetOutputDialog.Fields.column.Name=Name ParquetOutputDialog.Fields.column.Path=Parquet Path ParquetOutputDialog.Fields.column.Type=Parquet Type ParquetOutputDialog.Fields.column.Default=Default value ParquetOutputDialog.Fields.column.Null=Null ParquetOutputDialog.Fields.column.Precision=Precision ParquetOutputDialog.Fields.column.Scale=Scale ParquetOutputDialog.Fields.Get=Get Fields ParquetOutputDialog.Options.TabTitle=Options ParquetOutputDialog.Options.Compression=Compression: ParquetOutputDialog.Options.Version=Version: ParquetOutputDialog.Options.RowSize=Row group size (MB): ParquetOutputDialog.Options.PageSize=Data page size (KB): ParquetOutputDialog.Options.Extension=Extension: ParquetOutputDialog.Options.DictionaryEncoding=Dictionary encoding ParquetOutputDialog.Options.IncludeDateInFilename=Include date in file name ParquetOutputDialog.Options.IncludeTimeInFilename=Include time in file name ParquetOutputDialog.Options.SpecifyDateTimeFormat=Specify date time format ParquetOutputDialog.Options.DictPageSize=Page size (KB): ParquetOutput.Injection.FILENAME_LINES=The list of file definitions. ParquetOutput.Injection.FILENAME=The name of the folder/file to write to. ParquetOutput.Injection.OVERRIDE_OUTPUT=Enable this option to overwrite the existing output file(s). ParquetOutput.Injection.FIELDS=Fields. ParquetOutput.Injection.FIELD_PATH=The path of the output field. ParquetOutput.Injection.FIELD_NAME=The name of the output field. ParquetOutput.Injection.FIELD_TYPE=(Deprecated: Use FIELD_PARQUET_TYPE) The Kettle field type. ParquetOutput.Injection.FIELD_PARQUET_TYPE=The Parquet output field type. ParquetOutput.Injection.FIELD_NULLABLE=Specify whether the incoming field will contain null values. If no, then the default value will be used. ParquetOutput.Injection.FIELD_IF_NULL=The default value to use in case the incoming field value is null. ParquetOutput.Injection.FIELD_DECIMAL_PRECISION=Maximum number of digits allowed in the number. (only applies to numbers stored as decimal type) ParquetOutput.Injection.FIELD_DECIMAL_SCALE=Maximum number of digits after the decimal point. (only applies to numbers stored as decimal type) ParquetOutput.Injection.COMPRESSION=This option will let you specify the type of compression to use on the file output. ParquetOutput.Injection.PARQUET_VERSION=Specify the parquet version. ParquetOutput.Injection.ROW_GROUP_SIZE=Specify the group size for the rows. ParquetOutput.Injection.DATA_PAGE_SIZE=Specify the page size for the data. ParquetOutput.Injection.ENABLE_DICTIONARY=Enable this option to indicate that the data will have dictionary encoding. ParquetOutput.Injection.DICT_PAGE_SIZE=Specify the dictionary page size. ParquetOutput.Injection.INC_DATE_IN_FILENAME=This option will include the system date in the file name. ParquetOutput.Injection.INC_TIME_IN_FILENAME=This option will include the system time in the file name. ParquetOutput.Injection.DATE_FORMAT=Specify which date & time format you want to go into each file name. ParquetOutput.Injection.EXTENSION=The extension of the output file. ParquetOutput.GetFieldsChoice.Title=New fields were found ParquetOutput.GetFieldsChoice.Message=We found {0} new fields. What would you like to do with the new fields? ParquetOutput.GetFieldsChoice.AddNew=Add &new fields ParquetOutput.GetFieldsChoice.Add=Add &all fields ParquetOutput.GetFieldsChoice.ClearAndAdd=C&lear and add all ParquetOutput.GetFieldsChoice.Cancel=&Cancel ================================================ FILE: kettle-plugins/formats/core/src/test/java/org/pentaho/big/data/kettle/plugins/formats/impl/NamedClusterResolverTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import org.junit.After; import org.junit.AfterClass; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentMatchers; import org.mockito.Mock; import org.mockito.MockedStatic; import org.mockito.Mockito; import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.internal.verification.VerificationModeFactory.times; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.KettleLoggingEventListener; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.locator.api.MetastoreLocator; @RunWith(MockitoJUnitRunner.class) public class NamedClusterResolverTest { @Mock private MetastoreLocator metaStoreService; @Mock private IMetaStore metaStore; @Mock private NamedClusterService namedClusterService; @Mock private KettleLoggingEventListener kettleLoggingEventListener; @Mock private NamedCluster namedCluster; @Mock private NamedClusterServiceLocator namedClusterServiceLocator; private NamedClusterResolver namedClusterResolver; private static MockedStatic pluginServiceLoaderMockedStatic; @BeforeClass public static void setupClass() { // Create a class-level static mock that will stay open for all tests pluginServiceLoaderMockedStatic = Mockito.mockStatic( PluginServiceLoader.class ); } @AfterClass public static void tearDownClass() { // Close the static mock after all tests complete if ( pluginServiceLoaderMockedStatic != null ) { pluginServiceLoaderMockedStatic.close(); } } @Before public void before() throws Exception { // Reset the singleton before each test resetSingleton(); KettleLogStore.init(); KettleLogStore.getAppender().addLoggingEventListener( kettleLoggingEventListener ); // Mock the metastore locator to return a metastore lenient().when( metaStoreService.getMetastore() ).thenReturn( metaStore ); lenient().when( metaStoreService.getMetastore( ArgumentMatchers.any() ) ).thenReturn( metaStore ); lenient().when( metaStoreService.getExplicitMetastore( ArgumentMatchers.any() ) ).thenReturn( metaStore ); // Use the specific metaStore object in the mocks to ensure matching lenient().when( namedClusterService.getNamedClusterByName( ArgumentMatchers.eq( "testhc" ), ArgumentMatchers.same( metaStore ) ) ) .thenReturn( namedCluster ); lenient().when( namedClusterService.getNamedClusterByHost( ArgumentMatchers.eq( "somehost" ), ArgumentMatchers.same( metaStore ) ) ) .thenReturn( namedCluster ); // Mock MetastoreLocator loading in the @Before so it's available for all tests Collection metastoreLocatorCollection = new ArrayList<>(); metastoreLocatorCollection.add( metaStoreService ); pluginServiceLoaderMockedStatic.when( () -> PluginServiceLoader.loadServices( MetastoreLocator.class ) ) .thenReturn( metastoreLocatorCollection ); Collection namedClusterServiceLocatorCollection = new ArrayList<>(); namedClusterServiceLocatorCollection.add( namedClusterServiceLocator ); pluginServiceLoaderMockedStatic.when( () -> PluginServiceLoader.loadServices( NamedClusterServiceLocator.class ) ) .thenReturn( namedClusterServiceLocatorCollection ); Collection namedClusterServiceCollection = new ArrayList<>(); namedClusterServiceCollection.add( namedClusterService ); pluginServiceLoaderMockedStatic.when( () -> PluginServiceLoader.loadServices( NamedClusterService.class ) ) .thenReturn( namedClusterServiceCollection ); // Create NamedClusterResolver with mocked dependencies using the package-private constructor namedClusterResolver = createResolverWithMocks(); // Set it as the singleton instance Field instance = NamedClusterResolver.class.getDeclaredField( "namedClusterResolver" ); instance.setAccessible( true ); instance.set( null, namedClusterResolver ); } private NamedClusterResolver createResolverWithMocks() throws Exception { // Use reflection to call the package-private constructor java.lang.reflect.Constructor constructor = NamedClusterResolver.class.getDeclaredConstructor( NamedClusterServiceLocator.class, NamedClusterService.class ); constructor.setAccessible( true ); return constructor.newInstance( namedClusterServiceLocator, namedClusterService ); } @After public void after() throws Exception { // Reset the singleton after each test to ensure test isolation resetSingleton(); } private void resetSingleton() throws Exception { // Use reflection to reset the singleton instance Field instance = NamedClusterResolver.class.getDeclaredField( "namedClusterResolver" ); instance.setAccessible( true ); instance.set( null, null ); } @Test public void windowsFilePathsAreHandled() { assertNull( namedClusterResolver.resolveNamedCluster( "C:/path/to some/file" ) ); verify( kettleLoggingEventListener, times( 0 ) ).eventAdded( ArgumentMatchers.any() ); } @Test public void testNamedClusterByName() throws Exception { // Reset the metastoreLocator cache to force reload Field metaStoreServiceField = NamedClusterResolver.class.getDeclaredField( "metaStoreService" ); metaStoreServiceField.setAccessible( true ); metaStoreServiceField.set( namedClusterResolver, null ); NamedCluster cluster = namedClusterResolver.resolveNamedCluster( "hc://testhc/path" ); assertEquals( namedCluster, cluster ); cluster = namedClusterResolver.resolveNamedCluster( "hc://nosuchhc/path" ); assertNull( cluster ); } @Test public void testNamedClusterByHost() throws Exception { // Reset the metastoreLocator cache to force reload Field metaStoreServiceField = NamedClusterResolver.class.getDeclaredField( "metaStoreService" ); metaStoreServiceField.setAccessible( true ); metaStoreServiceField.set( namedClusterResolver, null ); NamedCluster cluster = namedClusterResolver.resolveNamedCluster( "hdfs://somehost/path" ); assertEquals( namedCluster, cluster ); } } ================================================ FILE: kettle-plugins/formats/core/src/test/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/input/OrcInputMetaInjectionTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.input; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.big.data.kettle.plugins.formats.orc.OrcInputField; import org.pentaho.di.core.injection.BaseMetadataInjectionTest; import org.pentaho.di.junit.rules.RestorePDIEngineEnvironment; import org.pentaho.hadoop.shim.api.format.OrcSpec; import static org.mockito.Mockito.mock; public class OrcInputMetaInjectionTest extends BaseMetadataInjectionTest { @ClassRule public static RestorePDIEngineEnvironment env = new RestorePDIEngineEnvironment(); @Before public void setup() { NamedClusterResolver mockNamedClusterResolver = mock( NamedClusterResolver.class ); setup( new OrcInputMeta( mockNamedClusterResolver ) ); OrcInputField orcInputField = new OrcInputField(); meta.setInputFields( new OrcInputField[] { orcInputField } ); } @Test public void test() throws Exception { check( "FILENAME", () -> meta.inputFiles.fileName[0] ); checkStringToEnum( "ORC_TYPE", () -> meta.getInputFields()[0].getOrcType(), OrcSpec.DataType.class ); check( "FIELD_PATH", () -> meta.getInputFields()[ 0 ].getFormatFieldName() ); check( "FIELD_NAME", () -> meta.getInputFields()[ 0 ].getName() ); checkPdiTypes( "FIELD_TYPE", () -> meta.getInputFields()[ 0 ].getType() ); } } ================================================ FILE: kettle-plugins/formats/core/src/test/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/input/OrcInputTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.input; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.di.core.bowl.DefaultBowl; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogLevel; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBoolean; import org.pentaho.di.core.row.value.ValueMetaInteger; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.RowHandler; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.format.FormatService; import org.pentaho.hadoop.shim.api.format.IPentahoOrcInputFormat; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class OrcInputTest { private static final String INPUT_STEP_NAME = "Input Step Name"; private static final String INPUT_STREAM_FIELD_NAME = "inputStreamFieldName"; private static final String PASS_FIELD_NAME = "passFieldName"; private static final String FILENAME = "orcFile"; @Mock private StepMeta mockStepMeta; @Mock private StepDataInterface mockStepDataInterface; @Mock private TransMeta mockTransMeta; @Mock private Trans mockTrans; @Mock private NamedClusterServiceLocator mockNamedClusterServiceLocator; @Mock private NamedClusterService mockNamedClusterService; @Mock private MetastoreLocator mockMetaStoreLocator; @Mock private FormatService mockFormatService; @Mock private OrcInputData orcInputData; @Mock private RowHandler mockRowHandler; @Mock private IPentahoOrcInputFormat mockPentahoOrcInputFormat; @Mock private IPentahoOrcInputFormat.IPentahoRecordReader mockPentahoOrcRecordReader; private OrcInputMeta orcInputMeta; private OrcInput orcInput; private RowMeta orcRowMeta; private RowMetaAndData[] orcRows; private RowMeta inputRowMeta; private RowMetaAndData[] inputRows; private int currentOrcInputRow; @Before public void setUp() throws Exception { KettleLogStore.init(); currentOrcInputRow = 0; setInputRows(); setOrcRows(); Collection metastoreLocatorCollection = new ArrayList<>(); metastoreLocatorCollection.add( mockMetaStoreLocator ); NamedClusterResolver namedClusterResolver; try ( MockedStatic pluginServiceLoaderMockedStatic = Mockito.mockStatic( PluginServiceLoader.class ) ) { pluginServiceLoaderMockedStatic.when( () -> PluginServiceLoader.loadServices( MetastoreLocator.class ) ) .thenReturn( metastoreLocatorCollection ); // Mock the NamedClusterResolver instead of using the singleton namedClusterResolver = Mockito.mock( NamedClusterResolver.class ); when( namedClusterResolver.getNamedClusterServiceLocator() ).thenReturn( mockNamedClusterServiceLocator ); when( namedClusterResolver.resolveNamedCluster( any( String.class ) ) ).thenReturn( null ); orcInputMeta = spy( new OrcInputMeta( namedClusterResolver ) ); orcInputMeta.inputFiles.fileName = new String[1]; orcInputMeta.setFilename( INPUT_STREAM_FIELD_NAME ); orcInputMeta.setParentStepMeta( mockStepMeta ); when( mockStepMeta.getParentTransMeta() ).thenReturn( mockTransMeta ); when( mockStepMeta.getName() ).thenReturn( INPUT_STEP_NAME ); when( mockTransMeta.findStep( INPUT_STEP_NAME ) ).thenReturn( mockStepMeta ); when( mockTransMeta.getBowl() ).thenReturn( DefaultBowl.getInstance() ); orcInputData.input = mockPentahoOrcInputFormat; when( mockFormatService.createInputFormat( IPentahoOrcInputFormat.class, orcInputMeta.getNamedClusterResolver().resolveNamedCluster( orcInputMeta.getFilename() ) ) ) .thenReturn( mockPentahoOrcInputFormat ); when( mockNamedClusterServiceLocator.getService( nullable( NamedCluster.class ), any( Class.class ) ) ) .thenReturn( mockFormatService ); when( mockTransMeta.environmentSubstitute( INPUT_STREAM_FIELD_NAME ) ).thenReturn( INPUT_STREAM_FIELD_NAME ); when( mockPentahoOrcInputFormat.createRecordReader( null ) ).thenReturn( mockPentahoOrcRecordReader ); when( mockPentahoOrcRecordReader.iterator() ).thenReturn( new OrcInputTest.OrcRecordIterator() ); orcInput = spy( new OrcInput( mockStepMeta, mockStepDataInterface, 0, mockTransMeta, mockTrans ) ); orcInput.setRowHandler( mockRowHandler ); orcInput.setInputRowMeta( inputRowMeta ); orcInput.setLogLevel( LogLevel.ERROR ); orcInput.setTransMeta( mockTransMeta ); } } private Object[] returnNextInputRow() { Object[] result = null; if ( currentOrcInputRow < inputRows.length ) { result = inputRows[currentOrcInputRow].getData().clone(); currentOrcInputRow++; } return result; } @Test public void testProcessRow() throws Exception { boolean result; int rowsProcessed = 0; ArgumentCaptor rowMetaCaptor = ArgumentCaptor.forClass( RowMeta.class ); ArgumentCaptor dataCaptor = ArgumentCaptor.forClass( Object[].class ); do { result = orcInput.processRow( orcInputMeta, orcInputData ); if ( result ) { rowsProcessed++; } } while ( result ); // 1 file, 2 rows. assertEquals( 2, rowsProcessed ); verify( mockRowHandler, times( 2 ) ).putRow( rowMetaCaptor.capture(), dataCaptor.capture() ); List rowMeta = rowMetaCaptor.getAllValues(); List dataCaptured = dataCaptor.getAllValues(); for ( int rowNum = 0; rowNum < 2; rowNum++ ) { assertEquals( 0, rowMeta.get( rowNum ).indexOfValue( "str" ) ); assertEquals( "string" + ( rowNum % 2 + 1 ), dataCaptured.get( rowNum )[0] ); } } @Test public void testInit() { assertEquals( true, orcInput.init() ); } @Test public void testProcessRowKettleFailure() { String expectedMessage = "KettleExceptionMessage"; try { doThrow( new KettleException( expectedMessage ) ) .when( mockPentahoOrcInputFormat ).createRecordReader( null ); orcInput.processRow( orcInputMeta, orcInputData ); fail( "No Kettle Exception thrown" ); } catch ( KettleException kex ) { assertTrue( kex.getMessage().contains( expectedMessage ) ); } catch ( Exception ex ) { fail( "No other type of exception should be thrown" ); } } @Test public void testProcessRowGeneralFailure() { String expectedMessage = "KettleExceptionMessage"; try { doThrow( new Exception( expectedMessage ) ) .when( mockPentahoOrcInputFormat ).createRecordReader( null ); orcInput.processRow( orcInputMeta, orcInputData ); fail( "No Kettle Exception thrown" ); } catch ( KettleException kex ) { assertTrue( kex.getMessage().contains( expectedMessage ) ); } catch ( Exception ex ) { fail( "No other type of exception should be thrown" ); } } private RowMeta setOrcRowMeta() { orcRowMeta = new RowMeta(); ValueMetaInterface valueMetaString = new ValueMetaString( "str" ); orcRowMeta.addValueMeta( valueMetaString ); ValueMetaInterface valueMetaBoolean = new ValueMetaBoolean( "bool" ); orcRowMeta.addValueMeta( valueMetaBoolean ); ValueMetaInterface valueMetaInteger = new ValueMetaInteger( "int" ); orcRowMeta.addValueMeta( valueMetaInteger ); return orcRowMeta; } private RowMeta setInputRowMeta() { inputRowMeta = new RowMeta(); ValueMetaInterface valueMetaString = new ValueMetaString( INPUT_STREAM_FIELD_NAME ); inputRowMeta.addValueMeta( valueMetaString ); ValueMetaInterface valueMetaString2 = new ValueMetaString( PASS_FIELD_NAME ); inputRowMeta.addValueMeta( valueMetaString2 ); return inputRowMeta; } private void setInputRows() { setInputRowMeta(); inputRows = new RowMetaAndData[] { new RowMetaAndData( orcRowMeta, FILENAME, "pass1" ) }; } private void setOrcRows() { setOrcRowMeta(); orcRows = new RowMetaAndData[] { new RowMetaAndData( orcRowMeta, "string1", true, new Integer( 123 ) ), new RowMetaAndData( orcRowMeta, "string2", true, new Integer( 321 ) ) }; } private class OrcRecordIterator implements Iterator { private Iterator iter; private boolean reset; OrcRecordIterator() { init(); } private void init() { iter = Arrays.asList( orcRows ).iterator(); reset = false; } @Override public boolean hasNext() { if ( reset ) { init(); } if ( !iter.hasNext() ) { reset = true; } return iter.hasNext(); } @Override public RowMetaAndData next() { if ( reset ) { init(); // Simultate a new iterator for the new file } return iter.next().clone(); } } } ================================================ FILE: kettle-plugins/formats/core/src/test/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/output/OrcOutputMetaInjectionTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.output; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.big.data.kettle.plugins.formats.orc.output.OrcOutputField; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.injection.BaseMetadataInjectionTest; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.junit.rules.RestorePDIEngineEnvironment; import org.pentaho.hadoop.shim.api.format.OrcSpec; import java.util.Arrays; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; public class OrcOutputMetaInjectionTest extends BaseMetadataInjectionTest { @ClassRule public static RestorePDIEngineEnvironment env = new RestorePDIEngineEnvironment(); @Before public void setup() { NamedClusterResolver mockNamedClusterResolver = mock( NamedClusterResolver.class ); setup( new OrcOutputMeta( mockNamedClusterResolver ) ); OrcOutputField orcOutputField = new OrcOutputField(); meta.setOutputFields( Arrays.asList( orcOutputField ) ); } @Test public void test() throws Exception { check( "FILENAME", () -> meta.getFilename() ); check( "OPTIONS_COMPRESS_SIZE", () -> meta.getCompressSize() ); check( "OPTIONS_DATE_FORMAT", () -> meta.getDateTimeFormat() ); check( "OPTIONS_DATE_IN_FILE_NAME", () -> meta.isDateInFileName() ); check( "OPTIONS_ROWS_BETWEEN_ENTRIES", () -> meta.getRowsBetweenEntries() ); check( "OPTIONS_STRIPE_SIZE", () -> meta.getStripeSize() ); check( "OPTIONS_TIME_IN_FILE_NAME", () -> meta.isTimeInFileName() ); check( "OVERRIDE_OUTPUT", () -> meta.isOverrideOutput() ); check( "FIELD_DECIMAL_PRECISION", () -> meta.getOutputFields().get( 0 ).getPrecision() ); check( "FIELD_DECIMAL_SCALE", () -> meta.getOutputFields().get( 0 ).getScale() ); check( "FIELD_IF_NULL", () -> meta.getOutputFields().get( 0 ).getDefaultValue() ); check( "FIELD_NAME", () -> meta.getOutputFields().get( 0 ).getPentahoFieldName() ); check( "FIELD_NULLABLE", () -> meta.getOutputFields().get( 0 ).getAllowNull() ); check( "FIELD_NULL_STRING", () -> meta.getOutputFields().get( 0 ).getAllowNull() ); check( "FIELD_PATH", () -> meta.getOutputFields().get( 0 ).getFormatFieldName() ); checkOrcTypes( "FIELD_TYPE", () -> meta.getOutputFields().get( 0 ).getFormatType(), OrcSpec.DataType.class ); check( "OPTIONS_COMPRESSION", () -> meta.getCompressionType().toUpperCase(), "SNAPPY" ); } protected void checkOrcTypes( String propertyName, IntGetter getter, Class enumType ) throws KettleException { OrcSpec.DataType[] values = OrcSpec.DataType.values(); ValueMetaInterface valueMeta = new ValueMetaString( "f" ); for ( OrcSpec.DataType v : values ) { injector.setProperty( meta, propertyName, setValue( valueMeta, v.toString() ), "f" ); assertEquals( v.getId(), getter.get() ); } skipPropertyTest( propertyName ); } } ================================================ FILE: kettle-plugins/formats/core/src/test/java/org/pentaho/big/data/kettle/plugins/formats/impl/orc/output/OrcOutputTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.orc.output; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.big.data.kettle.plugins.formats.orc.output.OrcOutputField; import org.pentaho.di.core.bowl.DefaultBowl; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.logging.LogLevel; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.RowHandler; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.format.FormatService; import org.pentaho.hadoop.shim.api.format.IPentahoOrcOutputFormat; import org.pentaho.hadoop.shim.api.format.OrcSpec; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.io.File; import java.nio.file.Files; import java.util.ArrayList; import java.util.Collection; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class OrcOutputTest { private static final String OUTPUT_STEP_NAME = "Output Step Name"; private static final String OUTPUT_TRANS_NAME = "Output Trans Name"; private static final String OUTPUT_FILE_NAME = "outputFileName"; @Mock private StepMeta mockStepMeta; @Mock private StepDataInterface mockStepDataInterface; @Mock private TransMeta mockTransMeta; @Mock private Trans mockTrans; @Mock private NamedClusterServiceLocator mockNamedClusterServiceLocator; @Mock private NamedClusterService mockNamedClusterService; @Mock private MetastoreLocator mockMetaStoreLocator; @Mock private FormatService mockFormatService; @Mock private OrcOutputData orcOutputData; @Mock private RowHandler mockRowHandler; @Mock private IPentahoOrcOutputFormat mockPentahoOrcOutputFormat; @Mock private LogChannelInterface mockLogChannelInterface; @Mock private IPentahoOrcOutputFormat.IPentahoRecordWriter mockPentahoOrcRecordWriter; private OrcOutput orcOutput; private List orcOutputFields; private OrcOutputMeta orcOutputMeta; private RowMeta dataInputRowMeta; private RowMetaAndData[] dataInputRows; private int currentOrcRow; @Before public void setUp() throws Exception { KettleLogStore.init(); currentOrcRow = 0; setDataInputRows(); setOrcOutputRows(); Collection metastoreLocatorCollection = new ArrayList<>(); metastoreLocatorCollection.add( mockMetaStoreLocator ); NamedClusterResolver namedClusterResolver; try ( MockedStatic pluginServiceLoaderMockedStatic = Mockito.mockStatic( PluginServiceLoader.class ) ) { pluginServiceLoaderMockedStatic.when( () -> PluginServiceLoader.loadServices( MetastoreLocator.class ) ) .thenReturn( metastoreLocatorCollection ); // Mock the NamedClusterResolver instead of using the singleton namedClusterResolver = Mockito.mock( NamedClusterResolver.class ); when( namedClusterResolver.getNamedClusterServiceLocator() ).thenReturn( mockNamedClusterServiceLocator ); when( namedClusterResolver.resolveNamedCluster( any( String.class ) ) ).thenReturn( null ); orcOutputMeta = new OrcOutputMeta( namedClusterResolver ); orcOutputMeta.setFilename( OUTPUT_FILE_NAME ); orcOutputMeta.setOutputFields( orcOutputFields ); orcOutputMeta.setOverrideOutput( true ); orcOutputMeta.setParentStepMeta( mockStepMeta ); when( mockStepMeta.getName() ).thenReturn( OUTPUT_STEP_NAME ); when( mockTransMeta.findStep( OUTPUT_STEP_NAME ) ).thenReturn( mockStepMeta ); when( mockTransMeta.findStep( OUTPUT_STEP_NAME ) ).thenReturn( mockStepMeta ); when( mockTransMeta.getBowl() ).thenReturn( DefaultBowl.getInstance() ); try { when( mockRowHandler.getRow() ).thenAnswer( answer -> returnNextParquetRow() ); } catch ( KettleException ke ) { ke.printStackTrace(); } when( mockFormatService.createOutputFormat( IPentahoOrcOutputFormat.class, orcOutputMeta.getNamedClusterResolver().resolveNamedCluster( orcOutputMeta.getFilename() ) ) ) .thenReturn( mockPentahoOrcOutputFormat ); when( mockNamedClusterServiceLocator.getService( nullable( NamedCluster.class ), any( Class.class ) ) ) .thenReturn( mockFormatService ); when( mockPentahoOrcOutputFormat.createRecordWriter() ).thenReturn( mockPentahoOrcRecordWriter ); orcOutput = spy( new OrcOutput( mockStepMeta, mockStepDataInterface, 0, mockTransMeta, mockTrans ) ); orcOutput.init( orcOutputMeta, orcOutputData ); orcOutput.setInputRowMeta( dataInputRowMeta ); orcOutput.setRowHandler( mockRowHandler ); orcOutput.setLogLevel( LogLevel.ERROR ); orcOutput.setTransMeta( mockTransMeta ); } } @Test public void testProcessRow() throws Exception { boolean result; int rowsProcessed = 0; ArgumentCaptor rowMetaCaptor = ArgumentCaptor.forClass( RowMeta.class ); ArgumentCaptor dataCaptor = ArgumentCaptor.forClass( Object[].class ); do { result = orcOutput.processRow( orcOutputMeta, orcOutputData ); if ( result ) { rowsProcessed++; } } while ( result ); // 3 rows to be outputted to an Orc file assertEquals( 3, rowsProcessed ); verify( mockRowHandler, times( 3 ) ).putRow( rowMetaCaptor.capture(), dataCaptor.capture() ); List rowMetaCaptured = rowMetaCaptor.getAllValues(); List dataCaptured = dataCaptor.getAllValues(); for ( int rowNum = 0; rowNum < 3; rowNum++ ) { assertEquals( 0, rowMetaCaptured.get( rowNum ).indexOfValue( "StringName" ) ); assertEquals( "string" + ( rowNum % 3 + 1 ), dataCaptured.get( rowNum )[0] ); } } @Test public void testProcessRowIllegalState() throws Exception { doThrow( new IllegalStateException( "IllegalStateExceptionMessage" ) ).when( mockPentahoOrcOutputFormat ) .setOutputFile( anyString(), anyBoolean() ); when( orcOutput.getLogChannel() ).thenReturn( mockLogChannelInterface ); assertFalse( orcOutput.processRow( orcOutputMeta, orcOutputData ) ); verify( mockLogChannelInterface, times( 1 ) ).logError( "IllegalStateExceptionMessage" ); } @Test public void testProcessRowKettleFailure() { String expectedMessage = "KettleExceptionMessage"; try { doThrow( new KettleException( expectedMessage ) ).when( orcOutput ).init(); orcOutput.processRow( orcOutputMeta, orcOutputData ); fail( "No Kettle Exception thrown" ); } catch ( KettleException kex ) { assertTrue( kex.getMessage().contains( expectedMessage ) ); } catch ( Exception ex ) { fail( "No other type of exception should be thrown" ); } } @Test public void testProcessRowGeneralFailure() { String expectedMessage = "GeneralExceptionMessage"; try { doThrow( new Exception( expectedMessage ) ).when( orcOutput ).init(); orcOutput.processRow( orcOutputMeta, orcOutputData ); fail( "No Kettle Exception thrown" ); } catch ( KettleException kex ) { assertTrue( kex.getMessage().contains( expectedMessage ) ); } catch ( Exception ex ) { fail( "No other type of exception should be thrown" ); } } private Object[] returnNextParquetRow() { Object[] result = null; if ( currentOrcRow < dataInputRows.length ) { result = dataInputRows[currentOrcRow].getData().clone(); currentOrcRow++; } return result; } private void setOrcOutputRows() { OrcOutputField orcOutputField = mock( OrcOutputField.class ); when( orcOutputField.getPentahoFieldName() ).thenReturn( "StringName" ); orcOutputFields = new ArrayList<>(); orcOutputFields.add( orcOutputField ); } private void setDataInputRowMeta() { dataInputRowMeta = new RowMeta(); ValueMetaInterface valueMetaString = new ValueMetaString( "StringName" ); dataInputRowMeta.addValueMeta( valueMetaString ); } private void setDataInputRows() { setDataInputRowMeta(); dataInputRows = new RowMetaAndData[] { new RowMetaAndData( dataInputRowMeta, "string1" ), new RowMetaAndData( dataInputRowMeta, "string2" ), new RowMetaAndData( dataInputRowMeta, "string3" ) }; } @Test public void testAliasFile() throws Exception { String aliasPath = Files.createTempDirectory( "testAliasFile" ) + File.separator + "dummyFile"; new File( aliasPath ).createNewFile(); //create the alias file so it and it's parent can be successfully deleted when( mockPentahoOrcOutputFormat.generateAlias( anyString() ) ).thenReturn( aliasPath ); boolean result; int rowsProcessed = 0; ArgumentCaptor rowMetaCaptor = ArgumentCaptor.forClass( RowMeta.class ); ArgumentCaptor dataCaptor = ArgumentCaptor.forClass( Object[].class ); do { result = orcOutput.processRow( orcOutputMeta, orcOutputData ); if ( result ) { rowsProcessed++; } } while ( result ); // 3 rows to be outputted to an Orc file assertEquals( 3, rowsProcessed ); verify( mockRowHandler, times( 3 ) ).putRow( rowMetaCaptor.capture(), dataCaptor.capture() ); List rowMetaCaptured = rowMetaCaptor.getAllValues(); List dataCaptured = dataCaptor.getAllValues(); for ( int rowNum = 0; rowNum < 3; rowNum++ ) { assertEquals( 0, rowMetaCaptured.get( rowNum ).indexOfValue( "StringName" ) ); assertEquals( "string" + ( rowNum % 3 + 1 ), dataCaptured.get( rowNum )[0] ); } assertFalse( new File( aliasPath ).exists() ); File outputFile = new File( OUTPUT_FILE_NAME ); assertTrue( outputFile.exists() ); outputFile.delete(); } } ================================================ FILE: kettle-plugins/formats/core/src/test/java/org/pentaho/big/data/kettle/plugins/formats/impl/output/PvfsFileAliaserTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.output; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.di.core.bowl.DefaultBowl; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.hadoop.shim.api.format.IPvfsAliasGenerator; import java.io.File; import java.nio.file.Files; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.when; @RunWith( MockitoJUnitRunner.class ) public class PvfsFileAliaserTest { PvfsFileAliaser pvfsFileAliaser; @Mock VariableSpace variableSpace; @Mock IPvfsAliasGenerator aliasGenerator; @Mock LogChannelInterface log; private static final String TEMP_DIR_PREFIX = "PvfsFileAliaserTest"; private static String finalPath; private static File finalFile; private String temporaryPath; @BeforeClass public static void setup() throws Exception { finalPath = Files.createTempDirectory( TEMP_DIR_PREFIX ) + File.separator + "finalFile"; finalFile = new File( finalPath ); } @Before public void setUp() throws Exception { finalFile.delete(); temporaryPath = Files.createTempDirectory( TEMP_DIR_PREFIX ) + File.separator + "temporaryile"; new File( temporaryPath ) .createNewFile(); //create the alias file so it and it's parent can be successfully deleted when( aliasGenerator.generateAlias( anyString() ) ).thenReturn( temporaryPath ); pvfsFileAliaser = new PvfsFileAliaser( DefaultBowl.getInstance(), finalPath, variableSpace, aliasGenerator, true, log ); } @Test public void testGenerateWithActiveAlias() throws Exception { String aliasPath = pvfsFileAliaser.generateAlias(); assertEquals( temporaryPath, aliasPath ); assertFalse( finalFile.exists() ); pvfsFileAliaser.copyFileToFinalDestination(); assertTrue( finalFile.exists() ); pvfsFileAliaser.deleteTempFileAndFolder(); assertFalse( new File( new File( temporaryPath ).getParent() ).exists() ); } @Test public void testGenerateWithInactiveAlias() throws Exception { when( aliasGenerator.generateAlias( anyString() ) ).thenReturn( null ); String aliasPath = pvfsFileAliaser.generateAlias(); assertEquals( finalPath, aliasPath ); assertFalse( finalFile.exists() ); pvfsFileAliaser.copyFileToFinalDestination(); assertFalse( finalFile.exists() ); } @Test public void testCopyFileToFinalDestinationWithoutGenerate() throws Exception { pvfsFileAliaser.copyFileToFinalDestination(); assertFalse( finalFile.exists() ); assertTempFileExistsAndDelete(); } @Test public void testDeleteTempFileAndFolderWithoutGenerate() { pvfsFileAliaser.deleteTempFileAndFolder(); assertFalse( finalFile.exists() ); assertTempFileExistsAndDelete(); } private void assertTempFileExistsAndDelete() { File tempFile = new File( temporaryPath ); assertTrue( tempFile.exists() ); deleteTempFile(); } private void deleteTempFile() { File tempFile = new File( temporaryPath ); tempFile.delete(); new File( tempFile.getParent() ).delete(); } } ================================================ FILE: kettle-plugins/formats/core/src/test/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/input/ParquetInputMetaInjectionTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.input; import org.junit.Before; import org.junit.Test; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.big.data.kettle.plugins.formats.parquet.input.ParquetInputField; import org.pentaho.di.core.injection.BaseMetadataInjectionTest; import org.pentaho.di.core.row.value.ValueMetaBase; import org.pentaho.hadoop.shim.api.format.ParquetSpec; import static org.mockito.Mockito.mock; public class ParquetInputMetaInjectionTest extends BaseMetadataInjectionTest { @Before public void setup() { NamedClusterResolver namedClusterResolver = mock( NamedClusterResolver.class ); setup( new ParquetInputMeta( namedClusterResolver ) ); } @Test public void test() throws Exception { check( "FILENAME", new StringGetter() { public String get() { return meta.inputFiles.fileName[ 0 ]; } } ); check( "FIELD_NAME", new StringGetter() { public String get() { return meta.inputFields[ 0 ].getPentahoFieldName(); } } ); check( "IGNORE_EMPTY_FOLDER", new BooleanGetter() { public boolean get() { return meta.isIgnoreEmptyFolder(); } } ); String[] typeNames = ValueMetaBase.getAllTypes(); checkStringToInt( "FIELD_TYPE", new IntGetter() { public int get() { return meta.inputFields[ 0 ].getPentahoType(); } }, typeNames, getTypeCodes( typeNames ) ); check( "FIELD_PATH", new StringGetter() { public String get() { return meta.inputFields[ 0 ].getFormatFieldName(); } } ); String[] parquetTypeNames = ParquetSpec.DataType.getDisplayableTypeNames(); checkStringToInt( "PARQUET_TYPE", new IntGetter() { public int get() { return meta.inputFields[ 0 ].getParquetType().getId(); } }, parquetTypeNames, getParquetTypeCodes( parquetTypeNames ) ); } public static int[] getParquetTypeCodes( String[] parquetTypeNames ) { int[] parquetTypeCodes = new int[ parquetTypeNames.length ]; for ( int i = 0; i < parquetTypeNames.length; ++i ) { ParquetInputField field = new ParquetInputField(); field.setParquetType( parquetTypeNames[ i ] ); parquetTypeCodes[ i ] = field.getParquetType().getId(); } return parquetTypeCodes; } } ================================================ FILE: kettle-plugins/formats/core/src/test/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/input/ParquetInputTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.input; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.di.core.bowl.DefaultBowl; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogLevel; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBoolean; import org.pentaho.di.core.row.value.ValueMetaInteger; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.RowHandler; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.format.FormatService; import org.pentaho.hadoop.shim.api.format.IPentahoInputFormat; import org.pentaho.hadoop.shim.api.format.IPentahoParquetInputFormat; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.nio.file.NoSuchFileException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class ParquetInputTest { private static final String INPUT_STEP_NAME = "Input Step Name"; private static final String INPUT_STREAM_FIELD_NAME = "inputStreamFieldName"; private static final String PASS_FIELD_NAME = "passFieldName"; @Mock private StepMeta mockStepMeta; @Mock private StepDataInterface mockStepDataInterface; @Mock private TransMeta mockTransMeta; @Mock private Trans mockTrans; @Mock private NamedClusterServiceLocator mockNamedClusterServiceLocator; @Mock private NamedClusterService mockNamedClusterService; @Mock private MetastoreLocator mockMetaStoreLocator; @Mock private FormatService mockFormatService; @Mock private ParquetInputData parquetInputData; @Mock private RowHandler mockRowHandler; @Mock private IPentahoParquetInputFormat mockPentahoParquetInputFormat; @Mock private IPentahoParquetInputFormat.IPentahoRecordReader mockPentahoParquetRecordReader; @Mock private IPentahoParquetInputFormat.IPentahoInputSplit mockPentahoInputSplit; private ParquetInputMeta parquetInputMeta; private ParquetInput parquetInput; private RowMeta parquetRowMeta; private RowMetaAndData[] parquetRows; private RowMeta inputRowMeta; private RowMetaAndData[] inputRows; private int currentParquetInputRow; @Before public void setUp() throws Exception { KettleLogStore.init(); currentParquetInputRow = 0; setInputRows(); setParquetRows(); Collection metastoreLocatorCollection = new ArrayList<>(); metastoreLocatorCollection.add( mockMetaStoreLocator ); NamedClusterResolver namedClusterResolver; try ( MockedStatic pluginServiceLoaderMockedStatic = Mockito.mockStatic( PluginServiceLoader.class ) ) { pluginServiceLoaderMockedStatic.when( () -> PluginServiceLoader.loadServices( MetastoreLocator.class ) ) .thenReturn( metastoreLocatorCollection ); namedClusterResolver = Mockito.mock( NamedClusterResolver.class ); when( namedClusterResolver.getNamedClusterServiceLocator() ).thenReturn( mockNamedClusterServiceLocator ); when( namedClusterResolver.resolveNamedCluster( any( String.class ) ) ).thenReturn( null ); parquetInputMeta = new ParquetInputMeta( namedClusterResolver ); parquetInputMeta.inputFiles.fileName = new String[1]; parquetInputMeta.setFilename( INPUT_STREAM_FIELD_NAME ); parquetInputMeta.setParentStepMeta( mockStepMeta ); when( mockStepMeta.getName() ).thenReturn( INPUT_STEP_NAME ); when( mockTransMeta.findStep( INPUT_STEP_NAME ) ).thenReturn( mockStepMeta ); when( mockTransMeta.getBowl() ).thenReturn( DefaultBowl.getInstance() ); parquetInputData.input = mockPentahoParquetInputFormat; when( mockFormatService.createInputFormat( IPentahoParquetInputFormat.class, parquetInputMeta.getNamedClusterResolver().resolveNamedCluster( parquetInputMeta.getFilename() ) ) ) .thenReturn( mockPentahoParquetInputFormat ); when( mockNamedClusterServiceLocator.getService( nullable( NamedCluster.class ), any( Class.class ) ) ) .thenReturn( mockFormatService ); when( mockPentahoParquetInputFormat.createRecordReader( mockPentahoInputSplit ) ).thenReturn( mockPentahoParquetRecordReader ); when( mockPentahoParquetRecordReader.iterator() ).thenReturn( new ParquetInputTest.ParquetRecordIterator() ); List splits = new ArrayList<>(); splits.add( mockPentahoInputSplit ); when( parquetInputData.input.getSplits() ).thenReturn( splits ); parquetInput = spy( new ParquetInput( mockStepMeta, mockStepDataInterface, 0, mockTransMeta, mockTrans ) ); parquetInput.setRowHandler( mockRowHandler ); parquetInput.setInputRowMeta( inputRowMeta ); parquetInput.setLogLevel( LogLevel.ERROR ); parquetInput.setTransMeta( mockTransMeta ); } } private Object[] returnNextInputRow() { Object[] result = null; if ( currentParquetInputRow < inputRows.length ) { result = inputRows[currentParquetInputRow].getData().clone(); currentParquetInputRow++; } return result; } @Test public void testProcessRow() throws Exception { boolean result; int rowsProcessed = 0; ArgumentCaptor rowMetaCaptor = ArgumentCaptor.forClass( RowMeta.class ); ArgumentCaptor dataCaptor = ArgumentCaptor.forClass( Object[].class ); do { result = parquetInput.processRow( parquetInputMeta, parquetInputData ); if ( result ) { rowsProcessed++; } } while ( result ); // 1 file, 2 rows. The third run is to increase the split count, which will return false on the next processRow call assertEquals( 3, rowsProcessed ); verify( mockRowHandler, times( 2 ) ).putRow( rowMetaCaptor.capture(), dataCaptor.capture() ); List rowMeta = rowMetaCaptor.getAllValues(); List dataCaptured = dataCaptor.getAllValues(); for ( int rowNum = 0; rowNum < 2; rowNum++ ) { assertEquals( 0, rowMeta.get( rowNum ).indexOfValue( "str" ) ); assertEquals( "string" + ( rowNum % 2 + 1 ), dataCaptured.get( rowNum )[0] ); } } @Test public void testInit() { assertEquals( true, parquetInput.init() ); } @Test public void testProcessNoSuchFile() throws Exception { String expectedMessage = "No input file"; try { doThrow( new NoSuchFileException( "NoSuchFileExceptionMessage" ) ).when( parquetInput ).initSplits(); parquetInput.processRow( parquetInputMeta, parquetInputData ); fail( "No Kettle Exception thrown" ); } catch ( KettleException kex ) { assertTrue( kex.getMessage().contains( expectedMessage ) ); } } @Test public void testProcessRowKettleFailure() { String expectedMessage = "KettleExceptionMessage"; try { doThrow( new KettleException( expectedMessage ) ).when( parquetInput ).initSplits(); parquetInput.processRow( parquetInputMeta, parquetInputData ); fail( "No Kettle Exception thrown" ); } catch ( KettleException kex ) { assertTrue( kex.getMessage().contains( expectedMessage ) ); } catch ( Exception ex ) { fail( "No other type of exception should be thrown" ); } } @Test public void testProcessRowGeneralFailure() { String expectedMessage = "GeneralExceptionMessage"; try { doThrow( new Exception( expectedMessage ) ).when( parquetInput ).initSplits(); parquetInput.processRow( parquetInputMeta, parquetInputData ); fail( "No Kettle Exception thrown" ); } catch ( KettleException kex ) { assertTrue( kex.getMessage().contains( expectedMessage ) ); } catch ( Exception ex ) { fail( "No other type of exception should be thrown" ); } } private RowMeta setParquetRowMeta() { parquetRowMeta = new RowMeta(); ValueMetaInterface valueMetaString = new ValueMetaString( "str" ); parquetRowMeta.addValueMeta( valueMetaString ); ValueMetaInterface valueMetaBoolean = new ValueMetaBoolean( "bool" ); parquetRowMeta.addValueMeta( valueMetaBoolean ); ValueMetaInterface valueMetaInteger = new ValueMetaInteger( "int" ); parquetRowMeta.addValueMeta( valueMetaInteger ); return parquetRowMeta; } private RowMeta setInputRowMeta() { inputRowMeta = new RowMeta(); ValueMetaInterface valueMetaString = new ValueMetaString( INPUT_STREAM_FIELD_NAME ); inputRowMeta.addValueMeta( valueMetaString ); ValueMetaInterface valueMetaString2 = new ValueMetaString( PASS_FIELD_NAME ); inputRowMeta.addValueMeta( valueMetaString2 ); return inputRowMeta; } private void setInputRows() { setInputRowMeta(); inputRows = new RowMetaAndData[] { new RowMetaAndData( parquetRowMeta, "parquetFile", "pass1" ) }; } private void setParquetRows() { setParquetRowMeta(); parquetRows = new RowMetaAndData[] { new RowMetaAndData( parquetRowMeta, "string1", true, new Integer( 123 ) ), new RowMetaAndData( parquetRowMeta, "string2", true, new Integer( 321 ) ) }; } private class ParquetRecordIterator implements Iterator { private Iterator iter; private boolean reset; ParquetRecordIterator() { init(); } private void init() { iter = Arrays.asList( parquetRows ).iterator(); reset = false; } @Override public boolean hasNext() { if ( reset ) { init(); } if ( !iter.hasNext() ) { reset = true; } return iter.hasNext(); } @Override public RowMetaAndData next() { if ( reset ) { init(); // Simultate a new iterator for the new file } return iter.next().clone(); } } } ================================================ FILE: kettle-plugins/formats/core/src/test/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/output/ParquetOutputMetaInjectionTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.output; import org.junit.Before; import org.junit.Test; import static org.mockito.Mockito.mock; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.big.data.kettle.plugins.formats.parquet.ParquetTypeConverter; import org.pentaho.di.core.injection.BaseMetadataInjectionTest; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.hadoop.shim.api.format.ParquetSpec; public class ParquetOutputMetaInjectionTest extends BaseMetadataInjectionTest { @Before public void setup() { NamedClusterResolver namedClusterResolver = mock( NamedClusterResolver.class ); setup( new ParquetOutputMeta( namedClusterResolver ) ); } @Test public void test() throws Exception { check( "FILENAME", new StringGetter() { public String get() { return meta.getFilename(); } } ); check( "ROW_GROUP_SIZE", new StringGetter() { public String get() { return meta.getRowGroupSize(); } } ); check( "DATA_PAGE_SIZE", new StringGetter() { public String get() { return meta.getDataPageSize(); } } ); check( "ENABLE_DICTIONARY", new BooleanGetter() { public boolean get() { return meta.isEnableDictionary(); } } ); check( "DICT_PAGE_SIZE", new StringGetter() { public String get() { return meta.getDictPageSize(); } } ); check( "OVERRIDE_OUTPUT", new BooleanGetter() { public boolean get() { return meta.isOverrideOutput(); } } ); check( "INC_DATE_IN_FILENAME", new BooleanGetter() { public boolean get() { return meta.isDateInFilename(); } } ); check( "INC_TIME_IN_FILENAME", new BooleanGetter() { public boolean get() { return meta.isTimeInFilename(); } } ); check( "EXTENSION", new StringGetter() { public String get() { return meta.getExtension(); } } ); check( "DATE_FORMAT", new StringGetter() { public String get() { return meta.getDateTimeFormat(); } } ); check( "FIELD_NAME", new StringGetter() { public String get() { return meta.getOutputFields().get( 0 ).getPentahoFieldName(); } } ); int [] supportedPdiTypes = { ValueMetaInterface.TYPE_NUMBER, ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_DATE, ValueMetaInterface.TYPE_BOOLEAN, ValueMetaInterface.TYPE_INTEGER, ValueMetaInterface.TYPE_BIGNUMBER, ValueMetaInterface.TYPE_SERIALIZABLE, ValueMetaInterface.TYPE_BINARY, ValueMetaInterface.TYPE_TIMESTAMP, ValueMetaInterface.TYPE_INET }; String[] typeNames = new String[ supportedPdiTypes.length ]; int[] typeIds = new int[ supportedPdiTypes.length ]; for ( int j = 0; j < supportedPdiTypes.length; j++ ) { typeNames[ j ] = ValueMetaInterface.getTypeDescription( supportedPdiTypes[ j ] ); String parquetTypeName = ParquetTypeConverter.convertToParquetType( supportedPdiTypes[ j ] ); for ( ParquetSpec.DataType parquetType : ParquetSpec.DataType.values() ) { if ( parquetType.getName().equals( parquetTypeName ) ) { typeIds[ j ] = parquetType.getId(); break; } } } checkStringToInt( "FIELD_TYPE", new IntGetter() { public int get() { return meta.getOutputFields().get( 0 ).getFormatType(); } }, typeNames, typeIds ); ParquetSpec.DataType[] supportedParquetTypes = { ParquetSpec.DataType.UTF8, ParquetSpec.DataType.INT_32, ParquetSpec.DataType.INT_64, ParquetSpec.DataType.FLOAT, ParquetSpec.DataType.DOUBLE, ParquetSpec.DataType.BOOLEAN, ParquetSpec.DataType.DECIMAL, ParquetSpec.DataType.DATE, ParquetSpec.DataType.TIMESTAMP_MILLIS, ParquetSpec.DataType.BINARY }; typeNames = new String[ supportedParquetTypes.length ]; typeIds = new int[ supportedParquetTypes.length ]; for ( int i = 0; i < supportedParquetTypes.length; i++ ) { typeNames[ i ] = supportedParquetTypes[ i ].getName(); typeIds[ i ] = supportedParquetTypes[ i ].getId(); } checkStringToInt( "FIELD_PARQUET_TYPE", new IntGetter() { public int get() { return meta.getOutputFields().get( 0 ).getFormatType(); } }, typeNames, typeIds ); check( "FIELD_DECIMAL_PRECISION", new IntGetter() { public int get() { return meta.getOutputFields().get( 0 ).getPrecision(); } } ); check( "FIELD_DECIMAL_SCALE", new IntGetter() { public int get() { return meta.getOutputFields().get( 0 ).getScale(); } } ); check( "FIELD_PATH", new StringGetter() { public String get() { return meta.getOutputFields().get( 0 ).getFormatFieldName(); } } ); check( "FIELD_IF_NULL", new StringGetter() { public String get() { return meta.getOutputFields().get( 0 ).getDefaultValue(); } } ); check( "FIELD_NULLABLE", new BooleanGetter() { public boolean get() { return meta.getOutputFields().get( 0 ).getAllowNull(); } } ); skipPropertyTest( "COMPRESSION" ); skipPropertyTest( "PARQUET_VERSION" ); } } ================================================ FILE: kettle-plugins/formats/core/src/test/java/org/pentaho/big/data/kettle/plugins/formats/impl/parquet/output/ParquetOutputTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.impl.parquet.output; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.big.data.kettle.plugins.formats.impl.NamedClusterResolver; import org.pentaho.big.data.kettle.plugins.formats.parquet.output.ParquetOutputField; import org.pentaho.di.core.bowl.DefaultBowl; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.logging.LogLevel; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.RowHandler; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.named.cluster.NamedClusterEmbedManager; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.format.FormatService; import org.pentaho.hadoop.shim.api.format.IPentahoParquetOutputFormat; import org.pentaho.hadoop.shim.api.format.ParquetSpec; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.io.File; import java.nio.file.Files; import java.util.ArrayList; import java.util.Collection; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class ParquetOutputTest { private static final String OUTPUT_STEP_NAME = "Output Step Name"; private static final String OUTPUT_TRANS_NAME = "Output Trans Name"; private static final String OUTPUT_FILE_NAME = "outputFileName"; @Rule public ExpectedException expectedException; @Mock private StepMeta mockStepMeta; @Mock private StepDataInterface mockStepDataInterface; @Mock private TransMeta mockTransMeta; @Mock private Trans mockTrans; @Mock private NamedClusterServiceLocator mockNamedClusterServiceLocator; @Mock private NamedClusterService mockNamedClusterService; @Mock private MetastoreLocator mockMetaStoreLocator; @Mock private FormatService mockFormatService; @Mock private ParquetOutputData parquetOutputData; @Mock private RowHandler mockRowHandler; @Mock private IPentahoParquetOutputFormat mockPentahoParquetOutputFormat; @Mock private LogChannelInterface mockLogChannelInterface; @Mock private IPentahoParquetOutputFormat.IPentahoRecordWriter mockPentahoParquetRecordWriter; private ParquetOutput parquetOutput; private List parquetOutputFields; private ParquetOutputMeta parquetOutputMeta; private RowMeta dataInputRowMeta; private RowMetaAndData[] dataInputRows; private int currentParquetRow; @Before public void setUp() throws Exception { KettleLogStore.init(); expectedException = ExpectedException.none(); currentParquetRow = 0; setDataInputRows(); setParquetOutputRows(); Collection metastoreLocatorCollection = new ArrayList<>(); metastoreLocatorCollection.add( mockMetaStoreLocator ); NamedClusterResolver namedClusterResolver; try ( MockedStatic pluginServiceLoaderMockedStatic = Mockito.mockStatic( PluginServiceLoader.class ) ) { pluginServiceLoaderMockedStatic.when( () -> PluginServiceLoader.loadServices( MetastoreLocator.class ) ) .thenReturn( metastoreLocatorCollection ); // Mock the NamedClusterResolver instead of using the singleton namedClusterResolver = Mockito.mock( NamedClusterResolver.class ); when( namedClusterResolver.getNamedClusterServiceLocator() ).thenReturn( mockNamedClusterServiceLocator ); when( namedClusterResolver.resolveNamedCluster( any( String.class ) ) ).thenReturn( null ); parquetOutputMeta = new ParquetOutputMeta( namedClusterResolver ); parquetOutputMeta.setFilename( OUTPUT_FILE_NAME ); parquetOutputMeta.setOverrideOutput( true ); parquetOutputMeta.setOutputFields( parquetOutputFields ); parquetOutputMeta.setParentStepMeta( mockStepMeta ); when( mockStepMeta.getName() ).thenReturn( OUTPUT_STEP_NAME ); when( mockTransMeta.findStep( OUTPUT_STEP_NAME ) ).thenReturn( mockStepMeta ); when( mockTransMeta.findStep( OUTPUT_STEP_NAME ) ).thenReturn( mockStepMeta ); when( mockTransMeta.getBowl() ).thenReturn( DefaultBowl.getInstance() ); try { when( mockRowHandler.getRow() ).thenAnswer( answer -> returnNextParquetRow() ); } catch ( KettleException ke ) { ke.printStackTrace(); } when( mockFormatService.createOutputFormat( IPentahoParquetOutputFormat.class, parquetOutputMeta.getNamedClusterResolver().resolveNamedCluster( parquetOutputMeta.getFilename() ) ) ) .thenReturn( mockPentahoParquetOutputFormat ); when( mockNamedClusterServiceLocator.getService( nullable( NamedCluster.class ), any( Class.class ) ) ) .thenReturn( mockFormatService ); when( mockPentahoParquetOutputFormat.createRecordWriter() ).thenReturn( mockPentahoParquetRecordWriter ); parquetOutput = spy( new ParquetOutput( mockStepMeta, mockStepDataInterface, 0, mockTransMeta, mockTrans ) ); parquetOutput.init( parquetOutputMeta, parquetOutputData ); parquetOutput.setInputRowMeta( dataInputRowMeta ); parquetOutput.setRowHandler( mockRowHandler ); parquetOutput.setLogLevel( LogLevel.ERROR ); parquetOutput.setTransMeta( mockTransMeta ); } } @Test public void testProcessRow() throws Exception { boolean result; int rowsProcessed = 0; ArgumentCaptor rowMetaCaptor = ArgumentCaptor.forClass( RowMeta.class ); ArgumentCaptor dataCaptor = ArgumentCaptor.forClass( Object[].class ); do { result = parquetOutput.processRow( parquetOutputMeta, parquetOutputData ); if ( result ) { rowsProcessed++; } } while ( result ); // 3 rows to be outputted to an parquet file assertEquals( 3, rowsProcessed ); verify( mockRowHandler, times( 3 ) ).putRow( rowMetaCaptor.capture(), dataCaptor.capture() ); verify( parquetOutput, times( 3 ) ).incrementLinesOutput(); List rowMetaCaptured = rowMetaCaptor.getAllValues(); List dataCaptured = dataCaptor.getAllValues(); for ( int rowNum = 0; rowNum < 3; rowNum++ ) { assertEquals( 0, rowMetaCaptured.get( rowNum ).indexOfValue( "StringName" ) ); assertEquals( "string" + ( rowNum % 3 + 1 ), dataCaptured.get( rowNum )[0] ); } } @Test public void initShouldPassEmbeddedMetastoreKey() { ParquetOutputMeta stepMetaInterface = mock( ParquetOutputMeta.class ); ParquetOutputData stepDataInterface = mock( ParquetOutputData.class ); NamedClusterEmbedManager namedClusterEmbedManager = mock( NamedClusterEmbedManager.class ); when( mockTransMeta.getNamedClusterEmbedManager() ).thenReturn( namedClusterEmbedManager ); when( mockTransMeta.getEmbeddedMetastoreProviderKey() ).thenReturn( "metastoreProviderKey" ); parquetOutput.init( stepMetaInterface, stepDataInterface ); verify( namedClusterEmbedManager ).passEmbeddedMetastoreKey( mockTransMeta, "metastoreProviderKey" ); } @Test public void testProcessRowIllegalState() throws Exception { doThrow( new IllegalStateException( "IllegalStateExceptionMessage" ) ).when( mockPentahoParquetOutputFormat ) .setOutputFile( anyString(), anyBoolean() ); when( parquetOutput.getLogChannel() ).thenReturn( mockLogChannelInterface ); assertFalse( parquetOutput.processRow( parquetOutputMeta, parquetOutputData ) ); verify( mockLogChannelInterface, times( 1 ) ) .logError( "IllegalStateExceptionMessage" ); } @Test public void testProcessRowKettleFailure() { String expectedMessage = "KettleExceptionMessage"; try { doNothing().when( parquetOutput ).closeWriter(); doThrow( new KettleException( expectedMessage ) ).when( parquetOutput ).init( any() ); parquetOutput.processRow( parquetOutputMeta, parquetOutputData ); fail( "No Kettle Exception thrown" ); } catch ( KettleException kex ) { assertTrue( kex.getMessage().contains( expectedMessage ) ); } catch ( Exception ex ) { fail( "No other type of exception should be thrown" ); } } @Test public void testProcessRowGeneralFailure() { String expectedMessage = "GeneralExceptionMessage"; try { doNothing().when( parquetOutput ).closeWriter(); doThrow( new Exception( expectedMessage ) ).when( parquetOutput ).init( any() ); parquetOutput.processRow( parquetOutputMeta, parquetOutputData ); fail( "No Kettle Exception thrown" ); } catch ( KettleException kex ) { assertTrue( kex.getMessage().contains( expectedMessage ) ); } catch ( Exception ex ) { fail( "No other type of exception should be thrown" ); } } private Object[] returnNextParquetRow() { Object[] result = null; if ( currentParquetRow < dataInputRows.length ) { result = dataInputRows[currentParquetRow].getData().clone(); currentParquetRow++; } return result; } private void setParquetOutputRows() { ParquetOutputField parquetOutputField = mock( ParquetOutputField.class ); parquetOutputFields = new ArrayList<>(); parquetOutputFields.add( parquetOutputField ); } private void setDataInputRowMeta() { dataInputRowMeta = new RowMeta(); ValueMetaInterface valueMetaString = new ValueMetaString( "StringName" ); dataInputRowMeta.addValueMeta( valueMetaString ); } private void setDataInputRows() { setDataInputRowMeta(); dataInputRows = new RowMetaAndData[] { new RowMetaAndData( dataInputRowMeta, "string1" ), new RowMetaAndData( dataInputRowMeta, "string2" ), new RowMetaAndData( dataInputRowMeta, "string3" ) }; } @Test public void testAliasFile() throws Exception { String aliasPath = Files.createTempDirectory( "testAliasFile" ) + File.separator + "dummyFile"; new File( aliasPath ).createNewFile(); //create the alias file so it and it's parent can be successfully deleted when( mockPentahoParquetOutputFormat.generateAlias( anyString() ) ).thenReturn( aliasPath ); boolean result; int rowsProcessed = 0; ArgumentCaptor rowMetaCaptor = ArgumentCaptor.forClass( RowMeta.class ); ArgumentCaptor dataCaptor = ArgumentCaptor.forClass( Object[].class ); do { result = parquetOutput.processRow( parquetOutputMeta, parquetOutputData ); if ( result ) { rowsProcessed++; } } while ( result ); // 3 rows to be outputted to an parquet file assertEquals( 3, rowsProcessed ); verify( mockRowHandler, times( 3 ) ).putRow( rowMetaCaptor.capture(), dataCaptor.capture() ); List rowMetaCaptured = rowMetaCaptor.getAllValues(); List dataCaptured = dataCaptor.getAllValues(); for ( int rowNum = 0; rowNum < 3; rowNum++ ) { assertEquals( 0, rowMetaCaptured.get( rowNum ).indexOfValue( "StringName" ) ); assertEquals( "string" + ( rowNum % 3 + 1 ), dataCaptured.get( rowNum )[0] ); } assertFalse( new File( aliasPath ).exists() ); File outputFile = new File( OUTPUT_FILE_NAME ); assertTrue( outputFile.exists() ); outputFile.delete(); } } ================================================ FILE: kettle-plugins/formats/pom.xml ================================================ 4.0.0 pentaho-big-data-kettle-plugins pentaho 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-formats pom Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com Apache License, Version 2.0 https://www.apache.org/licenses/LICENSE-2.0.txt repo A business-friendly OSS license assemblies core ================================================ FILE: kettle-plugins/formats-meta/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-formats-meta jar Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com site 3.0 org.pentaho shim-api ${pentaho-hadoop-shims.version} pentaho-kettle kettle-core ${pdi.version} pentaho-kettle kettle-engine ${pdi.version} junit junit ${dependency.junit.revision} test org.mockito mockito-all ${dependency.mockito.revision} test org.apache.parquet parquet-hadoop ${parquet.version} provided ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/BaseFormatInputField.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.trans.steps.file.BaseFileField; import org.pentaho.hadoop.shim.api.format.IFormatInputField; /** * Base input step field for various big data file formats * * @author tkafalas */ public class BaseFormatInputField extends BaseFileField implements IFormatInputField { @Injection( name = "FIELD_PATH", group = "FIELDS" ) protected String formatFieldName = null; private int formatType; private int precision = 0; private int scale = 0; private String stringFormat = ""; @Override public String getFormatFieldName() { return formatFieldName; } @Override public void setFormatFieldName( String formatFieldName ) { this.formatFieldName = formatFieldName; } @Override public String getPentahoFieldName() { return getName(); } @Override public void setPentahoFieldName( String pentahoFieldName ) { setName( pentahoFieldName ); } @Override public int getPentahoType() { return getType(); } @Override public void setPentahoType( int pentahoType ) { setType( pentahoType ); } @Override public int getFormatType() { return formatType; } @Override public void setFormatType( int formatType ) { this.formatType = formatType; } @Override public int getPrecision() { return this.precision; } @Override public void setPrecision( int precision ) { this.precision = precision; } @Override public int getScale() { return scale; } @Override public void setScale( int scale ) { this.scale = scale; } @Override public String getStringFormat() { return stringFormat; } @Override public void setStringFormat( String stringFormat ) { this.stringFormat = stringFormat == null ? "" : stringFormat; } public void setPentahoType( String value ) { setType( value ); } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/BaseFormatOutputField.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats; import org.pentaho.di.core.injection.Injection; import org.pentaho.hadoop.shim.api.format.IFormatOutputField; public class BaseFormatOutputField implements IFormatOutputField { public static final int DEFAULT_DECIMAL_PRECISION = 10; public static final int DEFAULT_DECIMAL_SCALE = 0; protected int formatType; protected int pentahoType; @Injection( name = "FIELD_PATH", group = "FIELDS" ) protected String formatFieldName; @Injection( name = "FIELD_NAME", group = "FIELDS" ) protected String pentahoFieldName; @Injection( name = "FIELD_NULLABLE", group = "FIELDS" ) protected boolean allowNull; @Injection( name = "FIELD_IF_NULL", group = "FIELDS" ) protected String defaultValue; @Injection( name = "FIELD_DECIMAL_PRECISION", group = "FIELDS" ) protected int precision; @Injection( name = "FIELD_DECIMAL_SCALE", group = "FIELDS" ) protected int scale; @Override public String getFormatFieldName() { return formatFieldName; } @Override public void setFormatFieldName( String formatFieldName ) { this.formatFieldName = formatFieldName; } @Override public String getPentahoFieldName() { return pentahoFieldName; } @Override public void setPentahoFieldName( String pentahoFieldName ) { this.pentahoFieldName = pentahoFieldName; } @Override public boolean getAllowNull() { return allowNull; } @Override public void setAllowNull( boolean allowNull ) { this.allowNull = allowNull; } @Override public String getDefaultValue() { return defaultValue; } @Override public void setDefaultValue( String defaultValue ) { this.defaultValue = defaultValue; } @Injection( name = "FIELD_NULL_STRING", group = "FIELDS" ) public void setAllowNull( String allowNull ) { if ( allowNull != null && allowNull.length() > 0 ) { if ( allowNull.equalsIgnoreCase( "yes" ) || allowNull.equalsIgnoreCase( "y" ) ) { this.allowNull = true; } else if ( allowNull.equalsIgnoreCase( "no" ) || allowNull.equalsIgnoreCase( "n" ) ) { this.allowNull = false; } else { this.allowNull = Boolean.parseBoolean( allowNull ); } } } @Override public int getFormatType() { return formatType; } @Override public void setFormatType( int formatType ) { this.formatType = formatType; } @Override public int getPrecision() { return precision; } @Override public void setPrecision( String precision ) { if ( precision == null || precision.equals( "" ) ) { this.precision = DEFAULT_DECIMAL_PRECISION; } else { this.precision = Integer.valueOf( precision ); if ( this.precision <= 0 ) { this.precision = DEFAULT_DECIMAL_PRECISION; } } } @Override public int getScale() { return scale; } @Override public void setScale( String scale ) { if ( scale == null || scale.equals( "" ) ) { this.scale = DEFAULT_DECIMAL_SCALE; } else { this.scale = Integer.valueOf( scale ); if ( this.scale < 0 ) { this.scale = DEFAULT_DECIMAL_SCALE; } } } @Override public int getPentahoType() { return pentahoType; } @Override public void setPentahoType( int pentahoType ) { this.pentahoType = pentahoType; } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/FormatInputFile.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats; import org.pentaho.di.trans.steps.file.BaseFileInputFiles; /** * Base class for format's input file - env added. * * @author */ public class FormatInputFile extends BaseFileInputFiles { public String[] environment = {}; /** * we need to reallocate {@link #environment} too since it can have other length */ @Override public void normalizeAllocation( int length ) { super.normalizeAllocation( length ); environment = normalizeAllocation( environment, length ); } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/FormatInputOutputField.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.di.trans.steps.file.BaseFileField; /** * Base class for format's input/output field - path added. * * @author */ public class FormatInputOutputField extends BaseFileField { @Injection( name = "FIELD_PATH", group = "FIELDS" ) protected String path; @Injection( name = "FIELD_NULLABLE", group = "FIELDS" ) protected boolean nullable = true; protected int sourceType; public String getPath() { return path; } public void setPath( String path ) { this.path = path; } public boolean isNullable() { return nullable; } public void setNullable( boolean nullable ) { this.nullable = nullable; } /** * @return The field type when read from the source before it was possibly overriden in the UI * (eg. AvroInput step) */ public int getSourceType() { return sourceType; } public void setSourceType( int sourceType ) { this.sourceType = sourceType; } @Injection( name = "FIELD_SOURCE_TYPE", group = "FIELDS" ) public void setSourceType( String value ) { this.sourceType = ValueMetaFactory.getIdForValueMeta( value ); } public String getSourceTypeDesc() { return ValueMetaFactory.getValueMetaName( sourceType ); } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/orc/OrcFormatInputOutputField.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.orc; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.row.value.ValueMetaFactory; /** * Base class for format's input/output field - path added. * * @author JRice */ public class OrcFormatInputOutputField { @Injection( name = "FIELD_PATH", group = "FIELDS" ) protected String path; @Injection( name = "FIELD_NAME", group = "FIELDS" ) private String name; @Injection( name = "FIELD_NULL_STRING", group = "FIELDS" ) private String nullString; @Injection( name = "FIELD_IF_NULL", group = "FIELDS" ) private String ifNullValue; private int type; public String getPath() { return path; } public void setPath( String path ) { this.path = path; } public String getName() { return name; } public void setName( String name ) { this.name = name; } public String getNullString() { return nullString; } public void setNullString( String nullString ) { this.nullString = nullString; } public String getIfNullValue() { return ifNullValue; } public void setIfNullValue( String ifNullValue ) { this.ifNullValue = ifNullValue; } public int getType() { return type; } public void setType( int type ) { this.type = type; } public String getTypeDesc() { return ValueMetaFactory.getValueMetaName( type ); } @Injection( name = "FIELD_TYPE", group = "FIELDS" ) public void setType( String value ) { this.type = ValueMetaFactory.getIdForValueMeta( value ); } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/orc/OrcInputField.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.orc; import org.pentaho.big.data.kettle.plugins.formats.BaseFormatInputField; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.hadoop.shim.api.format.IOrcInputField; import org.pentaho.hadoop.shim.api.format.OrcSpec; /** * @Author tkafalas */ public class OrcInputField extends BaseFormatInputField implements IOrcInputField { public OrcSpec.DataType getOrcType() { return OrcSpec.DataType.getDataType( getFormatType() ); } @Override public void setOrcType( OrcSpec.DataType orcType ) { setFormatType( orcType.getId() ); } @Injection( name = "ORC_TYPE", group = "FIELDS" ) @Override public void setOrcType( String orcType ) { for ( OrcSpec.DataType tmpType : OrcSpec.DataType.values() ) { // Match on Name ( for dialog ) or Enum Name ( For metadata injection ), note that the former uses "Int" and // the latter uses "INTEGER" if ( tmpType.getName().equalsIgnoreCase( orcType ) || tmpType.toString().equalsIgnoreCase( orcType ) ) { setFormatType( tmpType.getId() ); break; } } } @Override public String getTypeDesc() { return ValueMetaFactory.getValueMetaName( getPentahoType() ); } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/orc/OrcTypeConverter.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.orc; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.hadoop.shim.api.format.OrcSpec; /** * Created by rmansoor on 8/8/2018. */ public class OrcTypeConverter { public static String convertToOrcType( int pdiType ) { switch ( pdiType ) { case ValueMetaInterface.TYPE_INET: case ValueMetaInterface.TYPE_STRING: return OrcSpec.DataType.STRING.getName(); case ValueMetaInterface.TYPE_TIMESTAMP: return OrcSpec.DataType.TIMESTAMP.getName(); case ValueMetaInterface.TYPE_BINARY: return OrcSpec.DataType.BINARY.getName(); case ValueMetaInterface.TYPE_BIGNUMBER: return OrcSpec.DataType.DECIMAL.getName(); case ValueMetaInterface.TYPE_BOOLEAN: return OrcSpec.DataType.BOOLEAN.getName(); case ValueMetaInterface.TYPE_DATE: return OrcSpec.DataType.DATE.getName(); case ValueMetaInterface.TYPE_INTEGER: return OrcSpec.DataType.INTEGER.getName(); case ValueMetaInterface.TYPE_NUMBER: return OrcSpec.DataType.DOUBLE.getName(); default: return OrcSpec.DataType.NULL.getName(); } } public static String convertToOrcType( String type ) { int pdiType = ValueMetaFactory.getIdForValueMeta( type ); if ( pdiType > 0 ) { return convertToOrcType( pdiType ); } else { return type; } } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/orc/input/OrcInputMetaBase.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.orc.input; import org.apache.commons.vfs2.FileObject; import org.pentaho.big.data.kettle.plugins.formats.FormatInputFile; import org.pentaho.big.data.kettle.plugins.formats.orc.OrcInputField; import org.pentaho.big.data.kettle.plugins.formats.orc.OrcTypeConverter; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.exception.KettlePluginException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBase; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.AliasedFileObject; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.file.BaseFileInputAdditionalField; import org.pentaho.di.trans.steps.file.BaseFileInputMeta; import org.pentaho.di.workarounds.ResolvableResource; import org.pentaho.hadoop.shim.api.format.IOrcInputField; import org.pentaho.hadoop.shim.api.format.OrcSpec; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; import java.util.List; /** * Orc input meta step without Hadoop-dependent classes. Required for read meta in the spark native code. * * @author Jacob Gminder */ @SuppressWarnings( "deprecation" ) public abstract class OrcInputMetaBase extends BaseFileInputMeta implements ResolvableResource { public OrcInputMetaBase() { additionalOutputFields = new BaseFileInputAdditionalField(); inputFiles = new FormatInputFile(); inputFields = new OrcInputField[ 0 ]; } public String getFilename() { if ( inputFiles != null && inputFiles.fileName != null && inputFiles.fileName.length > 0 ) { return inputFiles.fileName[0]; } else { return null; } } public void setFilename( String filename ) { inputFiles.fileName[0] = filename; } public OrcInputField[] getInputFields() { return inputFields; } public void setInputFields( OrcInputField[] inputFields ) { this.inputFields = inputFields; } public void setInputFields( List inputFields ) { this.inputFields = new OrcInputField[inputFields.size()]; this.inputFields = inputFields.toArray( this.inputFields ); } @Override public String getXML() { StringBuilder retval = new StringBuilder( 1500 ); retval.append( " " ).append( XMLHandler.addTagValue( "passing_through_fields", inputFiles.passingThruFields ) ); retval.append( " " ).append( Const.CR ); //we need the equals by size arrays for inputFiles.fileName[i], inputFiles.fileMask[i], inputFiles.fileRequired[i], inputFiles.includeSubFolders[i] //to prevent the ArrayIndexOutOfBoundsException inputFiles.normalizeAllocation( inputFiles.fileName.length ); for ( int i = 0; i < inputFiles.fileName.length; i++ ) { retval.append( " " ).append( XMLHandler.addTagValue( "environment", inputFiles.environment[i] ) ); if ( parentStepMeta != null && parentStepMeta.getParentTransMeta() != null ) { parentStepMeta.getParentTransMeta().getNamedClusterEmbedManager().registerUrl( inputFiles.fileName[i] ); } retval.append( " " ).append( XMLHandler.addTagValue( "name", inputFiles.fileName[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filemask", inputFiles.fileMask[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "exclude_filemask", inputFiles.excludeFileMask[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_required", inputFiles.fileRequired[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", inputFiles.includeSubFolders[i] ) ); } retval.append( " " ).append( Const.CR ); retval.append( " " ).append( Const.CR ); for ( int i = 0; i < inputFields.length; i++ ) { OrcInputField field = inputFields[ i ]; retval.append( " " ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "path", field.getFormatFieldName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field.getPentahoFieldName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field.getTypeDesc() ) ); OrcSpec.DataType orcDataType = field.getOrcType(); if ( orcDataType != null && orcDataType.getName() != null && !orcDataType.getName().equalsIgnoreCase( OrcSpec.DataType.NULL.getName() ) ) { retval.append( " " ).append( XMLHandler.addTagValue( "orc_type", orcDataType.getName() ) ); } else { retval.append( " " ).append( XMLHandler.addTagValue( "orc_type", OrcTypeConverter.convertToOrcType( field.getTypeDesc() ) ) ); } if ( field.getStringFormat() != null ) { retval.append( " " ).append( XMLHandler.addTagValue( "format", field.getStringFormat() ) ); } retval.append( " " ).append( Const.CR ); } retval.append( " " ).append( Const.CR ); return retval.toString(); } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "passing_through_fields", inputFiles.passingThruFields ); if ( !( inputFiles.fileName.length == 1 && inputFiles.fileName[0].equalsIgnoreCase( "" ) ) ) { for ( int i = 0; i < inputFiles.fileName.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "environment", inputFiles.environment[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "file_name", inputFiles.fileName[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "file_mask", inputFiles.fileMask[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "exclude_file_mask", inputFiles.excludeFileMask[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "file_required", inputFiles.fileRequired[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "include_subfolders", inputFiles.includeSubFolders[i] ); } } for ( int i = 0; i < inputFields.length; i++ ) { OrcInputField field = inputFields[ i ]; rep.saveStepAttribute( id_transformation, id_step, i, "path", field.getFormatFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "name", field.getPentahoFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "type", field.getTypeDesc() ); OrcSpec.DataType orcDataType = field.getOrcType(); if ( orcDataType != null && orcDataType.getName() != null && !orcDataType.getName().equalsIgnoreCase( OrcSpec.DataType.NULL.getName() ) ) { rep.saveStepAttribute( id_transformation, id_step, i, "orc_type", orcDataType.getName() ); } else { rep.saveStepAttribute( id_transformation, id_step, i, "orc_type", OrcTypeConverter.convertToOrcType( field.getTypeDesc() ) ); } if ( field.getStringFormat() != null ) { rep.saveStepAttribute( id_transformation, id_step, i, "format", field.getStringFormat() ); } } } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } } @Override public void loadXML( Node stepnode, List databases, IMetaStore metaStore ) throws KettleXMLException { Node filenode = XMLHandler.getSubNode( stepnode, "file" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrfiles = XMLHandler.countNodes( filenode, "name" ); int nrfields = XMLHandler.countNodes( fields, "field" ); String passThroughFields = XMLHandler.getTagValue( stepnode, "passing_through_fields" ) == null ? "false" : XMLHandler.getTagValue( stepnode, "passing_through_fields" ); allocateFiles( nrfiles ); inputFiles.passingThruFields = ValueMetaBase.convertStringToBoolean( passThroughFields ); for ( int i = 0; i < nrfiles; i++ ) { Node envnode = XMLHandler.getSubNodeByNr( filenode, "environment", i ); Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i ); Node excludefilemasknode = XMLHandler.getSubNodeByNr( filenode, "exclude_filemask", i ); Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i ); Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, "include_subfolders", i ); inputFiles.environment[i] = XMLHandler.getNodeValue( envnode ); inputFiles.fileName[i] = XMLHandler.getNodeValue( filenamenode ); inputFiles.fileMask[i] = XMLHandler.getNodeValue( filemasknode ); inputFiles.excludeFileMask[i] = XMLHandler.getNodeValue( excludefilemasknode ); inputFiles.fileRequired[i] = XMLHandler.getNodeValue( fileRequirednode ); inputFiles.includeSubFolders[i] = XMLHandler.getNodeValue( includeSubFoldersnode ); } inputFields = new OrcInputField[ nrfields ]; for ( int i = 0; i < nrfields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); OrcInputField field = new OrcInputField(); field.setFormatFieldName( XMLHandler.getTagValue( fnode, "path" ) ); field.setPentahoFieldName( XMLHandler.getTagValue( fnode, "name" ) ); field.setPentahoType( ValueMetaFactory.getIdForValueMeta( XMLHandler.getTagValue( fnode, "type" ) ) ); String orcType = XMLHandler.getTagValue( fnode, "orc_type" ); if ( orcType != null && !orcType.equalsIgnoreCase( "null" ) ) { field.setOrcType( orcType ); } else { field.setOrcType( OrcTypeConverter.convertToOrcType( field.getPentahoType() ) ); } String stringFormat = XMLHandler.getTagValue( fnode, "format" ); field.setStringFormat( stringFormat == null ? "" : stringFormat ); this.inputFields[ i ] = field; } } @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List databases ) throws KettleException { try { int nrfiles = rep.countNrStepAttributes( id_step, "file_name" ); allocateFiles( nrfiles ); inputFiles.passingThruFields = rep.getStepAttributeBoolean( id_step, "passing_through_fields" ); for ( int i = 0; i < nrfiles; i++ ) { inputFiles.environment[i] = rep.getStepAttributeString( id_step, i, "environment" ); inputFiles.fileName[i] = rep.getStepAttributeString( id_step, i, "file_name" ); inputFiles.fileMask[i] = rep.getStepAttributeString( id_step, i, "file_mask" ); inputFiles.excludeFileMask[i] = rep.getStepAttributeString( id_step, i, "exclude_file_mask" ); inputFiles.fileRequired[i] = rep.getStepAttributeString( id_step, i, "file_required" ); if ( !YES.equalsIgnoreCase( inputFiles.fileRequired[i] ) ) { inputFiles.fileRequired[i] = NO; } inputFiles.includeSubFolders[i] = rep.getStepAttributeString( id_step, i, "include_subfolders" ); if ( !YES.equalsIgnoreCase( inputFiles.includeSubFolders[i] ) ) { inputFiles.includeSubFolders[i] = NO; } } int nrfields = rep.countNrStepAttributes( id_step, "name" ); inputFields = new OrcInputField[ nrfields ]; for ( int i = 0; i < nrfields; i++ ) { OrcInputField field = new OrcInputField(); field.setFormatFieldName( rep.getStepAttributeString( id_step, i, "path" ) ); field.setPentahoFieldName( rep.getStepAttributeString( id_step, i, "name" ) ); field.setPentahoType( ValueMetaFactory.getIdForValueMeta( rep.getStepAttributeString( id_step, i, "type" ) ) ); field.setOrcType( rep.getStepAttributeString( id_step, i, "orc_type" ) ); String stringFormat = rep.getStepAttributeString( id_step, i, "format" ); field.setStringFormat( stringFormat == null ? "" : stringFormat ); this.inputFields[ i ] = field; } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } public void allocateFiles( int nrFiles ) { inputFiles.environment = new String[nrFiles]; inputFiles.fileName = new String[nrFiles]; inputFiles.fileMask = new String[nrFiles]; inputFiles.excludeFileMask = new String[nrFiles]; inputFiles.fileRequired = new String[nrFiles]; inputFiles.includeSubFolders = new String[nrFiles]; } /** * TODO: remove from base */ @Override public String getEncoding() { return null; } @Override public void setDefault() { allocateFiles( 0 ); inputFields = new OrcInputField[ 0 ]; } @Override public void resolve( Bowl bowl ) { if ( inputFiles != null && inputFiles.fileName != null ) { for ( int i = 0; i < inputFiles.fileName.length; i++ ) { try { String realFileName = getParentStepMeta().getParentTransMeta().environmentSubstitute( inputFiles.fileName[ i ] ); FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( realFileName ); if ( AliasedFileObject.isAliasedFile( fileObject ) ) { inputFiles.fileName[ i ] = ( (AliasedFileObject) fileObject ).getAELSafeURIString(); } } catch ( KettleFileException e ) { throw new RuntimeException( e ); } } } } @Override public void getFields( Bowl bowl, RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { try { if ( !inputFiles.passingThruFields ) { // all incoming fields are not transmitted ! rowMeta.clear(); } else { if ( info != null ) { boolean found = false; for ( int i = 0; i < info.length && !found; i++ ) { if ( info[i] != null ) { rowMeta.mergeRowMeta( info[i], origin ); found = true; } } } } for ( IOrcInputField field : getInputFields() ) { String value = space.environmentSubstitute( field.getPentahoFieldName() ); ValueMetaInterface v = ValueMetaFactory.createValueMeta( value, field.getPentahoType() ); v.setOrigin( origin ); rowMeta.addValueMeta( v ); } } catch ( KettlePluginException e ) { throw new KettleStepException( "Unable to create value type", e ); } } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/orc/output/OrcOutputField.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.orc.output; import org.pentaho.big.data.kettle.plugins.formats.BaseFormatOutputField; import org.pentaho.di.core.injection.Injection; import org.pentaho.hadoop.shim.api.format.IOrcOutputField; import org.pentaho.hadoop.shim.api.format.OrcSpec; public class OrcOutputField extends BaseFormatOutputField implements IOrcOutputField { public OrcSpec.DataType getOrcType() { return OrcSpec.DataType.values()[ formatType ]; } @Override public void setFormatType( OrcSpec.DataType orcType ) { this.formatType = orcType.getId(); } @Override public void setFormatType( int formatType ) { for ( OrcSpec.DataType orcType : OrcSpec.DataType.values() ) { if ( orcType.getId() == formatType ) { this.formatType = formatType; } } } @Injection( name = "FIELD_TYPE", group = "FIELDS" ) public void setFormatType( String typeName ) { try { setFormatType( Integer.parseInt( typeName ) ); } catch ( NumberFormatException nfe ) { for ( OrcSpec.DataType orcType : OrcSpec.DataType.values() ) { //Match on Name( for dialog ) or Enum Name ( For metadata injection ), note that the former uses "Int" and // the later uses "INTEGER" if ( orcType.getName().equalsIgnoreCase( typeName ) || orcType.toString().equalsIgnoreCase( typeName ) ) { this.formatType = orcType.getId(); return; } } } } public boolean isDecimalType() { return getOrcType().equals( OrcSpec.DataType.DECIMAL ); } @Override public void setPrecision( String precision ) { if ( ( precision == null ) || ( precision.trim().length() == 0 ) ) { this.precision = isDecimalType() ? OrcSpec.DEFAULT_DECIMAL_PRECISION : 0; } else { this.precision = Integer.valueOf( precision ); if ( ( this.precision <= 0 ) && isDecimalType() ) { this.precision = OrcSpec.DEFAULT_DECIMAL_PRECISION; } } } @Override public void setScale( String scale ) { if ( ( scale == null ) || ( scale.trim().length() == 0 ) ) { this.scale = isDecimalType() ? OrcSpec.DEFAULT_DECIMAL_SCALE : 0; } else { this.scale = Integer.valueOf( scale ); if ( ( this.scale < 0 ) ) { this.scale = isDecimalType() ? OrcSpec.DEFAULT_DECIMAL_SCALE : 0; } } } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/orc/output/OrcOutputMetaBase.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.orc.output; import org.apache.commons.vfs2.FileObject; import org.apache.orc.CompressionKind; import org.pentaho.di.core.Const; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionDeep; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.AliasedFileObject; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.workarounds.ResolvableResource; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.function.Function; /** * Orc output meta step without Hadoop-dependent classes. Required for read meta in the spark native code. * * @author Alexander Buloichik@epam.com> */ public abstract class OrcOutputMetaBase extends BaseStepMeta implements StepMetaInterface, ResolvableResource { private static final Class PKG = OrcOutputMetaBase.class; public static final int DEFAULT_ROWS_BETWEEN_ENTRIES = 10000; public static final int DEFAULT_STRIPE_SIZE = 64; // In megabytes public static final int DEFAULT_COMPRESS_SIZE = 256; // In kilobytes @Injection( name = "FILENAME" ) private String filename; @InjectionDeep private List outputFields = new ArrayList<>(); @Injection( name = "OPTIONS_COMPRESSION" ) protected String compressionType = ""; @Injection( name = "OPTIONS_STRIPE_SIZE" ) protected int stripeSize = 64; @Injection( name = "OPTIONS_COMPRESS_SIZE" ) protected int compressSize = 256; @Injection( name = "OPTIONS_ROWS_BETWEEN_ENTRIES" ) protected int rowsBetweenEntries = 0; @Injection( name = "OPTIONS_DATE_IN_FILE_NAME" ) protected boolean dateInFileName = false; @Injection( name = "OPTIONS_TIME_IN_FILE_NAME" ) protected boolean timeInFileName = false; @Injection( name = "OPTIONS_DATE_FORMAT" ) protected String dateTimeFormat = ""; @Injection( name = "OVERRIDE_OUTPUT" ) protected boolean overrideOutput; @Override public void setDefault() { // TODO Auto-generated method stub } public String getFilename() { return filename; } public boolean isOverrideOutput() { return overrideOutput; } public void setOverrideOutput( boolean overrideOutput ) { this.overrideOutput = overrideOutput; } public void setFilename( String filename ) { this.filename = filename; } public List getOutputFields() { return outputFields; } public void setOutputFields( List outputFields ) { this.outputFields = outputFields; } public int getStripeSize() { return stripeSize; } public void setStripeSize( int stripeSize ) { this.stripeSize = stripeSize; } public int getCompressSize() { return compressSize; } public void setCompressSize( int compressSize ) { this.compressSize = compressSize; } public int getRowsBetweenEntries() { return rowsBetweenEntries; } public void setRowsBetweenEntries( int rowsBetweenEntries ) { this.rowsBetweenEntries = rowsBetweenEntries; } public boolean isDateInFileName() { return dateInFileName; } public void setDateInFileName( boolean dateInFileName ) { this.dateInFileName = dateInFileName; } public boolean isTimeInFileName() { return timeInFileName; } public void setTimeInFileName( boolean timeInFileName ) { this.timeInFileName = timeInFileName; } public String getDateTimeFormat() { return dateTimeFormat; } public void setDateTimeFormat( String dateTimeFormat ) { this.dateTimeFormat = dateTimeFormat; } @Override public void loadXML( Node stepnode, List databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode, metaStore ); } private void readData( Node stepnode, IMetaStore metastore ) throws KettleXMLException { try { Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrfields = XMLHandler.countNodes( fields, "field" ); List orcOutputFields = new ArrayList<>(); for ( int i = 0; i < nrfields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); OrcOutputField outputField = new OrcOutputField(); outputField.setFormatFieldName( XMLHandler.getTagValue( fnode, "path" ) ); outputField.setPentahoFieldName( XMLHandler.getTagValue( fnode, "name" ) ); outputField.setFormatType( XMLHandler.getTagValue( fnode, "type" ) ); outputField.setPrecision( XMLHandler.getTagValue( fnode, "precision" ) ); outputField.setScale( XMLHandler.getTagValue( fnode, "scale" ) ); outputField.setAllowNull( XMLHandler.getTagValue( fnode, "nullable" ) ); outputField.setDefaultValue( XMLHandler.getTagValue( fnode, "default" ) ); orcOutputFields.add( outputField ); } this.outputFields = orcOutputFields; filename = XMLHandler.getTagValue( stepnode, FieldNames.FILE_NAME ); overrideOutput = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, FieldNames.OVERRIDE_OUTPUT ) ); compressionType = XMLHandler.getTagValue( stepnode, FieldNames.COMPRESSION ); stripeSize = Integer.parseInt( XMLHandler.getTagValue( stepnode, FieldNames.STRIPE_SIZE ), 10 ); compressSize = Integer.parseInt( XMLHandler.getTagValue( stepnode, FieldNames.COMPRESS_SIZE ), 10 ); rowsBetweenEntries = Integer.parseInt( XMLHandler.getTagValue( stepnode, FieldNames.ROWS_BETWEEN_ENTRIES ), 10 ); dateTimeFormat = XMLHandler.getTagValue( stepnode, FieldNames.DATE_FORMAT ); dateInFileName = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, FieldNames.DATE_IN_FILE_NAME ) ); timeInFileName = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, FieldNames.TIME_IN_FILE_NAME ) ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } } @Override public String getXML() { StringBuffer retval = new StringBuffer( 800 ); final String INDENT = " "; retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.FILE_NAME, filename ) ); retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.OVERRIDE_OUTPUT, overrideOutput ) ); retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.COMPRESSION, compressionType ) ); retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.STRIPE_SIZE, stripeSize ) ); retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.COMPRESS_SIZE, compressSize ) ); retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.ROWS_BETWEEN_ENTRIES, rowsBetweenEntries ) ); retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.DATE_FORMAT, dateTimeFormat ) ); retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.DATE_IN_FILE_NAME, dateInFileName ) ); retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.TIME_IN_FILE_NAME, timeInFileName ) ); retval.append( " " ).append( Const.CR ); for ( int i = 0; i < outputFields.size(); i++ ) { OrcOutputField field = outputFields.get( i ); if ( field.getPentahoFieldName() != null && field.getPentahoFieldName().length() != 0 ) { retval.append( " " ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "path", field.getFormatFieldName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field.getPentahoFieldName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field.getOrcType().getId() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "precision", field.getPrecision() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "scale", field.getScale() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nullable", field.getAllowNull() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "default", field.getDefaultValue() ) ); retval.append( " " ).append( Const.CR ); } } retval.append( " " ).append( Const.CR ); return retval.toString(); } @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List databases ) throws KettleException { try { filename = rep.getStepAttributeString( id_step, FieldNames.FILE_NAME ); overrideOutput = rep.getStepAttributeBoolean( id_step, FieldNames.OVERRIDE_OUTPUT ); compressionType = rep.getStepAttributeString( id_step, FieldNames.COMPRESSION ); stripeSize = Math.toIntExact( rep.getStepAttributeInteger( id_step, FieldNames.STRIPE_SIZE ) ); compressSize = Math.toIntExact( rep.getStepAttributeInteger( id_step, FieldNames.COMPRESS_SIZE ) ); rowsBetweenEntries = Math.toIntExact( rep.getStepAttributeInteger( id_step, FieldNames.ROWS_BETWEEN_ENTRIES ) ); dateTimeFormat = rep.getStepAttributeString( id_step, FieldNames.DATE_FORMAT ); dateInFileName = rep.getStepAttributeBoolean( id_step, FieldNames.DATE_IN_FILE_NAME ); timeInFileName = rep.getStepAttributeBoolean( id_step, FieldNames.TIME_IN_FILE_NAME ); // using the "type" column to get the number of field rows because "type" is guaranteed not to be null. int nrfields = rep.countNrStepAttributes( id_step, "type" ); List orcOutputFields = new ArrayList<>(); for ( int i = 0; i < nrfields; i++ ) { OrcOutputField outputField = new OrcOutputField(); outputField.setFormatFieldName( rep.getStepAttributeString( id_step, i, "path" ) ); outputField.setPentahoFieldName( rep.getStepAttributeString( id_step, i, "name" ) ); outputField.setFormatType( rep.getStepAttributeString( id_step, i, "type" ) ); outputField.setPrecision( rep.getStepAttributeString( id_step, i, "precision" ) ); outputField.setScale( rep.getStepAttributeString( id_step, i, "scale" ) ); outputField.setAllowNull( rep.getStepAttributeString( id_step, i, "nullable" ) ); outputField.setDefaultValue( rep.getStepAttributeString( id_step, i, "default" ) ); orcOutputFields.add( outputField ); } this.outputFields = orcOutputFields; } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { super.saveRep( rep, metaStore, id_transformation, id_step ); rep.saveStepAttribute( id_transformation, id_step, FieldNames.FILE_NAME, filename ); rep.saveStepAttribute( id_transformation, id_step, FieldNames.OVERRIDE_OUTPUT, overrideOutput ); rep.saveStepAttribute( id_transformation, id_step, FieldNames.COMPRESSION, compressionType ); rep.saveStepAttribute( id_transformation, id_step, FieldNames.STRIPE_SIZE, stripeSize ); rep.saveStepAttribute( id_transformation, id_step, FieldNames.COMPRESS_SIZE, compressSize ); rep.saveStepAttribute( id_transformation, id_step, FieldNames.ROWS_BETWEEN_ENTRIES, rowsBetweenEntries ); rep.saveStepAttribute( id_transformation, id_step, FieldNames.DATE_FORMAT, dateTimeFormat ); rep.saveStepAttribute( id_transformation, id_step, FieldNames.DATE_IN_FILE_NAME, dateInFileName ); rep.saveStepAttribute( id_transformation, id_step, FieldNames.TIME_IN_FILE_NAME, timeInFileName ); for ( int i = 0; i < outputFields.size(); i++ ) { OrcOutputField field = outputFields.get( i ); rep.saveStepAttribute( id_transformation, id_step, i, "path", field.getFormatFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "name", field.getPentahoFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "type", field.getOrcType().getId() ); rep.saveStepAttribute( id_transformation, id_step, i, "precision", field.getPrecision() ); rep.saveStepAttribute( id_transformation, id_step, i, "scale", field.getScale() ); rep.saveStepAttribute( id_transformation, id_step, i, "nullable", field.getAllowNull() ); rep.saveStepAttribute( id_transformation, id_step, i, "default", field.getDefaultValue() ); } } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } } @Override public void resolve( Bowl bowl ) { if ( filename != null && !filename.isEmpty() ) { try { String realFileName = getParentStepMeta().getParentTransMeta().environmentSubstitute( filename ); FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( realFileName ); if ( AliasedFileObject.isAliasedFile( fileObject ) ) { filename = ( (AliasedFileObject) fileObject ).getAELSafeURIString(); } } catch ( KettleFileException e ) { throw new RuntimeException( e ); } } } public String getCompressionType() { return StringUtil.isVariable( compressionType ) ? compressionType : getCompressionType( null ).toString(); } public void setCompressionType( String value ) { compressionType = StringUtil.isVariable( value ) ? value : parseFromToString( value, CompressionKind.values(), CompressionKind.NONE ).toString(); } public CompressionKind getCompressionType(VariableSpace vspace ) { return parseReplace( compressionType, vspace, str -> findCompressionType( str ), CompressionKind.NONE ); } public String[] getCompressionTypes() { return getStrings( CompressionKind.values() ); } private CompressionKind findCompressionType( String str ) { try { return CompressionKind.valueOf( str ); } catch ( Throwable th ) { return parseFromToString( str, CompressionKind.values(), CompressionKind.NONE ); } } protected static String[] getStrings( T[] objects ) { String[] names = new String[objects.length]; int i = 0; for ( T obj : objects ) { names[i++] = obj.toString(); } return names; } protected static T parseFromToString( String str, T[] values, T defaultValue ) { if ( !Utils.isEmpty( str ) ) { for ( T type : values ) { if ( str.equalsIgnoreCase( type.toString() ) ) { return type; } } } return defaultValue; } private T parseReplace( String value, VariableSpace vspace, Function parser, T defaultValue ) { String replaced = vspace != null ? vspace.environmentSubstitute( value ) : value; if ( !Utils.isEmpty( replaced ) ) { try { return parser.apply( replaced ); } catch ( Exception e ) { // ignored } } return defaultValue; } public String constructOutputFilename() { String outputFileName = filename; if ( dateTimeFormat != null && !dateTimeFormat.isEmpty() ) { String dateTimeFormatPattern = getParentStepMeta().getParentTransMeta().environmentSubstitute( dateTimeFormat ); outputFileName += new SimpleDateFormat( dateTimeFormatPattern ).format( new Date() ); } else { if ( dateInFileName ) { outputFileName += '_' + new SimpleDateFormat( "yyyyMMdd" ).format( new Date() ); } if ( timeInFileName ) { outputFileName += '_' + new SimpleDateFormat( "HHmmss" ).format( new Date() ); } } return outputFileName; } protected static class FieldNames { public static final String FILE_NAME = "filename"; public static final String OVERRIDE_OUTPUT = "overrideOutput"; public static final String COMPRESSION = "compression"; public static final String COMPRESS_SIZE = "compressSize"; public static final String INLINE_INDEXES = "inlineIndexes"; public static final String ROWS_BETWEEN_ENTRIES = "rowsBetweenEntries"; public static final String DATE_IN_FILE_NAME = "dateInFileName"; public static final String TIME_IN_FILE_NAME = "timeInFileName"; public static final String DATE_FORMAT = "dateTimeFormat"; public static final String STRIPE_SIZE = "stripeSize"; } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/parquet/ParquetTypeConverter.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.parquet; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.hadoop.shim.api.format.ParquetSpec; /** * Created by rmansoor on 8/8/2018. */ public class ParquetTypeConverter { public static String convertToParquetType( String pdiType ) { int pdiTypeId = -1; for ( int i = 0; i < ValueMetaInterface.typeCodes.length; i++ ) { if ( ValueMetaInterface.typeCodes[ i ].equals( pdiType ) ) { pdiTypeId = i; break; } } return convertToParquetType( pdiTypeId ); } public static String convertToParquetType( int pdiType ) { switch ( pdiType ) { case ValueMetaInterface.TYPE_INET: case ValueMetaInterface.TYPE_STRING: return ParquetSpec.DataType.UTF8.getName(); case ValueMetaInterface.TYPE_TIMESTAMP: return ParquetSpec.DataType.TIMESTAMP_MILLIS.getName(); case ValueMetaInterface.TYPE_BINARY: return ParquetSpec.DataType.BINARY.getName(); case ValueMetaInterface.TYPE_BIGNUMBER: return ParquetSpec.DataType.DECIMAL.getName(); case ValueMetaInterface.TYPE_BOOLEAN: return ParquetSpec.DataType.BOOLEAN.getName(); case ValueMetaInterface.TYPE_DATE: return ParquetSpec.DataType.DATE.getName(); case ValueMetaInterface.TYPE_INTEGER: return ParquetSpec.DataType.INT_64.getName(); case ValueMetaInterface.TYPE_NUMBER: return ParquetSpec.DataType.DOUBLE.getName(); default: return ParquetSpec.DataType.NULL.getName(); } } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/parquet/input/ParquetInputField.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.parquet.input; import org.pentaho.big.data.kettle.plugins.formats.BaseFormatInputField; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.hadoop.shim.api.format.IParquetInputField; import org.pentaho.hadoop.shim.api.format.ParquetSpec; public class ParquetInputField extends BaseFormatInputField implements IParquetInputField { @Override public void setParquetType( ParquetSpec.DataType parquetType ) { setFormatType( parquetType.getId() ); } @Injection( name = "PARQUET_TYPE", group = "FIELDS" ) @Override public void setParquetType( String parquetType ) { for ( ParquetSpec.DataType tmpType : ParquetSpec.DataType.values() ) { if ( tmpType.getName().equalsIgnoreCase( parquetType ) ) { setFormatType( tmpType.getId() ); break; } } } @Override public ParquetSpec.DataType getParquetType() { return ParquetSpec.DataType.getDataType( getFormatType() ); } public String getTypeDesc() { return ValueMetaFactory.getValueMetaName( getPentahoType() ); } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/parquet/input/ParquetInputMetaBase.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.parquet.input; import org.apache.commons.vfs2.FileObject; import org.pentaho.big.data.kettle.plugins.formats.FormatInputFile; import org.pentaho.big.data.kettle.plugins.formats.parquet.ParquetTypeConverter; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.exception.KettlePluginException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBase; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.AliasedFileObject; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.file.BaseFileInputAdditionalField; import org.pentaho.di.trans.steps.file.BaseFileInputMeta; import org.pentaho.di.workarounds.ResolvableResource; import org.pentaho.hadoop.shim.api.format.IParquetInputField; import org.pentaho.hadoop.shim.api.format.ParquetSpec; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; import java.util.List; /** * Parquet input meta step without Hadoop-dependent classes. Required for read meta in the spark native code. * * @author */ @SuppressWarnings( "deprecation" ) public abstract class ParquetInputMetaBase extends BaseFileInputMeta implements ResolvableResource { /** If receiving input rows, should we pass through existing fields? */ @Injection( name = "IGNORE_EMPTY_FOLDER" ) boolean ignoreEmptyFolder = false; public ParquetInputMetaBase() { additionalOutputFields = new BaseFileInputAdditionalField(); inputFiles = new FormatInputFile(); inputFields = new ParquetInputField[ 0 ]; } public boolean isIgnoreEmptyFolder() { return ignoreEmptyFolder; } public void setIgnoreEmptyFolder( boolean ignoreEmptyFolder ) { this.ignoreEmptyFolder = ignoreEmptyFolder; } public String getFilename() { if ( inputFiles != null && inputFiles.fileName != null && inputFiles.fileName.length > 0 ) { return inputFiles.fileName[0]; } else { return null; } } public String[] getFileNames() { if ( inputFiles != null && inputFiles.fileName != null && inputFiles.fileName.length > 0 ) { return inputFiles.fileName; } else { return null; } } public void setFilename( String filename ) { inputFiles.fileName[0] = filename; } public void setFilenames( String[] filenames ) { inputFiles.fileName = filenames; } public ParquetInputField[] getInputFields() { return inputFields; } public void setInputFields( ParquetInputField[] inputFields ) { this.inputFields = inputFields; } public void setInputFields( List inputFields ) { this.inputFields = new ParquetInputField[ inputFields.size() ]; this.inputFields = inputFields.toArray( this.inputFields ); } @Override public String getXML() { StringBuilder retval = new StringBuilder( 1500 ); retval.append( " " ).append( XMLHandler.addTagValue( "passing_through_fields", inputFiles.passingThruFields ) ); retval.append( " " ).append( XMLHandler.addTagValue( "ignore_empty_folder", ignoreEmptyFolder ) ); retval.append( " " ).append( Const.CR ); //we need the equals by size arrays for inputFiles.fileName[i], inputFiles.fileMask[i], inputFiles.fileRequired[i], inputFiles.includeSubFolders[i] //to prevent the ArrayIndexOutOfBoundsException inputFiles.normalizeAllocation( inputFiles.fileName.length ); for ( int i = 0; i < inputFiles.fileName.length; i++ ) { retval.append( " " ).append( XMLHandler.addTagValue( "environment", inputFiles.environment[ i ] ) ); if ( parentStepMeta != null && parentStepMeta.getParentTransMeta() != null ) { parentStepMeta.getParentTransMeta().getNamedClusterEmbedManager().registerUrl( inputFiles.fileName[ i ] ); } retval.append( " " ).append( XMLHandler.addTagValue( "name", inputFiles.fileName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filemask", inputFiles.fileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "exclude_filemask", inputFiles.excludeFileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_required", inputFiles.fileRequired[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", inputFiles.includeSubFolders[ i ] ) ); } retval.append( " " ).append( Const.CR ); retval.append( " " ).append( Const.CR ); for ( int i = 0; i < inputFields.length; i++ ) { ParquetInputField field = inputFields[ i ]; retval.append( " " ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "path", field.getFormatFieldName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field.getPentahoFieldName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field.getTypeDesc() ) ); ParquetSpec.DataType parquetType = field.getParquetType(); if ( parquetType != null && !parquetType.equals( ParquetSpec.DataType.NULL ) ) { retval.append( " " ) .append( XMLHandler.addTagValue( "parquet_type", parquetType.getName() ) ); } else { retval.append( " " ) .append( XMLHandler.addTagValue( "parquet_type", ParquetTypeConverter.convertToParquetType( field.getTypeDesc() ) ) ); } if ( field.getStringFormat() != null ) { retval.append( " " ).append( XMLHandler.addTagValue( "format", field.getStringFormat() ) ); } retval.append( " " ).append( Const.CR ); } retval.append( " " ).append( Const.CR ); return retval.toString(); } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "ignore_empty_folder", ignoreEmptyFolder ); rep.saveStepAttribute( id_transformation, id_step, "passing_through_fields", inputFiles.passingThruFields ); if ( !( inputFiles.fileName.length == 1 && inputFiles.fileName[0].equalsIgnoreCase( "" ) ) ) { for ( int i = 0; i < inputFiles.fileName.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "environment", inputFiles.environment[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "file_name", inputFiles.fileName[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "file_mask", inputFiles.fileMask[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "exclude_file_mask", inputFiles.excludeFileMask[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "file_required", inputFiles.fileRequired[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "include_subfolders", inputFiles.includeSubFolders[i] ); } } for ( int i = 0; i < inputFields.length; i++ ) { ParquetInputField field = inputFields[ i ]; rep.saveStepAttribute( id_transformation, id_step, i, "path", field.getFormatFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_name", field.getPentahoFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_type", field.getTypeDesc() ); ParquetSpec.DataType parquetType = field.getParquetType(); if ( parquetType != null && !parquetType.equals( ParquetSpec.DataType.NULL ) ) { rep.saveStepAttribute( id_transformation, id_step, i, "parquet_type", parquetType.getName() ); } else { rep.saveStepAttribute( id_transformation, id_step, i, "parquet_type", ParquetTypeConverter.convertToParquetType( field.getTypeDesc() ) ); } if ( field.getStringFormat() != null ) { rep.saveStepAttribute( id_transformation, id_step, i, "format", field.getStringFormat() ); } } } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } } @Override public void loadXML( Node stepnode, List databases, IMetaStore metaStore ) throws KettleXMLException { Node filenode = XMLHandler.getSubNode( stepnode, "file" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrfiles = XMLHandler.countNodes( filenode, "name" ); int nrfields = XMLHandler.countNodes( fields, "field" ); String passThroughFields = XMLHandler.getTagValue( stepnode, "passing_through_fields" ) == null ? "false" : XMLHandler.getTagValue( stepnode, "passing_through_fields" ); String skipIfNoFile = XMLHandler.getTagValue( stepnode, "ignore_empty_folder" ) == null ? "false" : XMLHandler.getTagValue( stepnode, "ignore_empty_folder" ); allocateFiles( nrfiles ); inputFiles.passingThruFields = ValueMetaBase.convertStringToBoolean( passThroughFields ); ignoreEmptyFolder = ValueMetaBase.convertStringToBoolean( skipIfNoFile ); for ( int i = 0; i < nrfiles; i++ ) { Node envnode = XMLHandler.getSubNodeByNr( filenode, "environment", i ); Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i ); Node excludefilemasknode = XMLHandler.getSubNodeByNr( filenode, "exclude_filemask", i ); Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i ); Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, "include_subfolders", i ); inputFiles.environment[ i ] = XMLHandler.getNodeValue( envnode ); inputFiles.fileName[ i ] = XMLHandler.getNodeValue( filenamenode ); inputFiles.fileMask[ i ] = XMLHandler.getNodeValue( filemasknode ); inputFiles.excludeFileMask[ i ] = XMLHandler.getNodeValue( excludefilemasknode ); inputFiles.fileRequired[ i ] = XMLHandler.getNodeValue( fileRequirednode ); inputFiles.includeSubFolders[ i ] = XMLHandler.getNodeValue( includeSubFoldersnode ); } this.inputFields = new ParquetInputField[ nrfields ]; for ( int i = 0; i < nrfields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); ParquetInputField field = new ParquetInputField(); field.setFormatFieldName( XMLHandler.getTagValue( fnode, "path" ) ); field.setPentahoFieldName( XMLHandler.getTagValue( fnode, "name" ) ); field.setPentahoType( ValueMetaFactory.getIdForValueMeta( XMLHandler.getTagValue( fnode, "type" ) ) ); String parquetType = XMLHandler.getTagValue( fnode, "parquet_type" ); if ( parquetType != null && !parquetType.equalsIgnoreCase( "null" ) ) { field.setParquetType( parquetType ); } else { field.setParquetType( ParquetTypeConverter.convertToParquetType( field.getPentahoType() ) ); } String stringFormat = XMLHandler.getTagValue( fnode, "format" ); field.setStringFormat( stringFormat == null ? "" : stringFormat ); this.inputFields[ i ] = field; } } @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List databases ) throws KettleException { try { int nrfiles = rep.countNrStepAttributes( id_step, "file_name" ); allocateFiles( nrfiles ); inputFiles.passingThruFields = rep.getStepAttributeBoolean( id_step, "passing_through_fields" ); ignoreEmptyFolder = rep.getStepAttributeBoolean( id_step, "ignore_empty_folder" ); for ( int i = 0; i < nrfiles; i++ ) { inputFiles.environment[ i ] = rep.getStepAttributeString( id_step, i, "environment" ); inputFiles.fileName[ i ] = rep.getStepAttributeString( id_step, i, "file_name" ); inputFiles.fileMask[ i ] = rep.getStepAttributeString( id_step, i, "file_mask" ); inputFiles.excludeFileMask[ i ] = rep.getStepAttributeString( id_step, i, "exclude_file_mask" ); inputFiles.fileRequired[ i ] = rep.getStepAttributeString( id_step, i, "file_required" ); if ( !YES.equalsIgnoreCase( inputFiles.fileRequired[ i ] ) ) { inputFiles.fileRequired[ i ] = NO; } inputFiles.includeSubFolders[ i ] = rep.getStepAttributeString( id_step, i, "include_subfolders" ); if ( !YES.equalsIgnoreCase( inputFiles.includeSubFolders[ i ] ) ) { inputFiles.includeSubFolders[ i ] = NO; } } int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); this.inputFields = new ParquetInputField[ nrfields ]; for ( int i = 0; i < nrfields; i++ ) { ParquetInputField field = new ParquetInputField(); field.setFormatFieldName( rep.getStepAttributeString( id_step, i, "path" ) ); field.setPentahoFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); field.setPentahoType( rep.getStepAttributeString( id_step, i, "field_type" ) ); String parquetType = rep.getStepAttributeString( id_step, i, "parquet_type" ); if ( parquetType != null && !parquetType.equalsIgnoreCase( "null" ) ) { field.setParquetType( parquetType ); } else { field.setParquetType( ParquetTypeConverter.convertToParquetType( field.getPentahoType() ) ); } String stringFormat = rep.getStepAttributeString( id_step, i, "format" ); field.setStringFormat( stringFormat == null ? "" : stringFormat ); this.inputFields[ i ] = field; } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } public void allocateFiles( int nrFiles ) { inputFiles.environment = new String[ nrFiles ]; inputFiles.fileName = new String[ nrFiles ]; inputFiles.fileMask = new String[ nrFiles ]; inputFiles.excludeFileMask = new String[ nrFiles ]; inputFiles.fileRequired = new String[ nrFiles ]; inputFiles.includeSubFolders = new String[ nrFiles ]; } /** * TODO: remove from base */ @Override public String getEncoding() { return null; } @Override public void setDefault() { allocateFiles( 0 ); inputFields = new ParquetInputField[ 0 ]; } @Override public void resolve( Bowl bowl ) { if ( inputFiles != null && inputFiles.fileName != null ) { for ( int i = 0; i < inputFiles.fileName.length; i++ ) { try { String realFileName = getParentStepMeta().getParentTransMeta().environmentSubstitute( inputFiles.fileName[ i ] ); FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( realFileName ); if ( AliasedFileObject.isAliasedFile( fileObject ) ) { inputFiles.fileName[ i ] = ( (AliasedFileObject) fileObject ).getAELSafeURIString(); } } catch ( KettleFileException e ) { throw new RuntimeException( e ); } } } } @Override public void getFields( Bowl bowl, RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { try { for ( int i = 0; i < inputFields.length; i++ ) { IParquetInputField field = inputFields[ i ]; String value = space.environmentSubstitute( field.getPentahoFieldName() ); ValueMetaInterface v = ValueMetaFactory.createValueMeta( value, field.getPentahoType() ); v.setOrigin( origin ); rowMeta.addValueMeta( v ); } } catch ( KettlePluginException e ) { throw new KettleStepException( "Unable to create value type", e ); } } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/parquet/output/ParquetOutputField.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.parquet.output; import org.pentaho.big.data.kettle.plugins.formats.BaseFormatOutputField; import org.pentaho.big.data.kettle.plugins.formats.parquet.ParquetTypeConverter; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.hadoop.shim.api.format.IParquetOutputField; import org.pentaho.hadoop.shim.api.format.ParquetSpec; public class ParquetOutputField extends BaseFormatOutputField implements IParquetOutputField { @Override public ParquetSpec.DataType getParquetType() { for ( ParquetSpec.DataType type : ParquetSpec.DataType.values() ) { if ( type.getId() == formatType ) { return type; } } return null; } public void setFormatType( ParquetSpec.DataType formatType ) { this.formatType = formatType.getId(); } @Injection( name = "FIELD_PARQUET_TYPE", group = "FIELDS" ) public void setFormatType( String typeName ) { try { setFormatType( Integer.parseInt( typeName ) ); } catch ( NumberFormatException nfe ) { for ( ParquetSpec.DataType parquetType : ParquetSpec.DataType.values() ) { if ( parquetType.getName().equals( typeName ) ) { this.formatType = parquetType.getId(); break; } } } } @Injection( name = "FIELD_TYPE", group = "FIELDS" ) @Deprecated public void setPentahoType( String typeName ) { for ( int i = 0; i < ValueMetaInterface.typeCodes.length; i++ ) { if ( typeName.equals( ValueMetaInterface.typeCodes[ i ] ) ) { setFormatType( ParquetTypeConverter.convertToParquetType( i ) ); break; } } } public boolean isDecimalType() { return getParquetType().equals( ParquetSpec.DataType.DECIMAL ); } @Override public void setPrecision( String precision ) { if ( ( precision == null ) || ( precision.trim().length() == 0 ) ) { this.precision = isDecimalType() ? ParquetSpec.DEFAULT_DECIMAL_PRECISION : 0; } else { this.precision = Integer.valueOf( precision ); if ( ( this.precision <= 0 ) && isDecimalType() ) { this.precision = ParquetSpec.DEFAULT_DECIMAL_PRECISION; } } } @Override public void setScale( String scale ) { if ( ( scale == null ) || ( scale.trim().length() == 0 ) ) { this.scale = isDecimalType() ? ParquetSpec.DEFAULT_DECIMAL_SCALE : 0; } else { this.scale = Integer.valueOf( scale ); if ( ( this.scale < 0 ) ) { this.scale = isDecimalType() ? ParquetSpec.DEFAULT_DECIMAL_SCALE : 0; } } } } ================================================ FILE: kettle-plugins/formats-meta/src/main/java/org/pentaho/big/data/kettle/plugins/formats/parquet/output/ParquetOutputMetaBase.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.parquet.output; import org.apache.commons.vfs2.FileObject; import org.apache.parquet.hadoop.metadata.CompressionCodecName; import org.pentaho.big.data.kettle.plugins.formats.parquet.ParquetTypeConverter; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionDeep; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.AliasedFileObject; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.workarounds.ResolvableResource; import org.pentaho.hadoop.shim.api.format.ParquetSpec; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.function.Function; /** * Parquet output meta step without Hadoop-dependent classes. Required for read meta in the spark native code. * * @author */ public abstract class ParquetOutputMetaBase extends BaseStepMeta implements StepMetaInterface, ResolvableResource { private static final Class PKG = ParquetOutputMetaBase.class; @Injection( name = "COMPRESSION" ) public String compressionType; @Injection( name = "PARQUET_VERSION" ) public String parquetVersion; @Injection( name = "ROW_GROUP_SIZE" ) public String rowGroupSize; @Injection( name = "DATA_PAGE_SIZE" ) public String dataPageSize; @Injection( name = "ENABLE_DICTIONARY" ) public boolean enableDictionary; @Injection( name = "DICT_PAGE_SIZE" ) public String dictPageSize; @Injection( name = "OVERRIDE_OUTPUT" ) public boolean overrideOutput; /** Flag: add the date in the filename */ @Injection( name = "INC_DATE_IN_FILENAME" ) private boolean dateInFilename; /** Flag: add the time in the filename */ @Injection( name = "INC_TIME_IN_FILENAME" ) private boolean timeInFilename; @Injection( name = "DATE_FORMAT" ) private String dateTimeFormat; /** The file extention in case of a generated filename */ @Injection( name = "EXTENSION" ) private String extension; @Injection( name = "FILENAME", group = "FILENAME_LINES" ) public String filename; @InjectionDeep private List outputFields = new ArrayList(); @Override public void setDefault() { outputFields = new ArrayList(); dictPageSize = String.valueOf( 1024 ); extension = "parquet"; } public String getFilename() { return filename; } public void setFilename( String filename ) { this.filename = filename; } public boolean isEnableDictionary() { return enableDictionary; } public void setEnableDictionary( boolean enableDictionary ) { this.enableDictionary = enableDictionary; } public boolean isOverrideOutput() { return overrideOutput; } public void setOverrideOutput( boolean overrideOutput ) { this.overrideOutput = overrideOutput; } public boolean isDateInFilename() { return dateInFilename; } public void setDateInFilename( boolean dateInFilename ) { this.dateInFilename = dateInFilename; } public boolean isTimeInFilename() { return timeInFilename; } public void setTimeInFilename( boolean timeInFilename ) { this.timeInFilename = timeInFilename; } public String getDateTimeFormat() { return dateTimeFormat; } public void setDateTimeFormat( String dateTimeFormat ) { this.dateTimeFormat = dateTimeFormat; } public String getExtension() { return extension; } public void setExtension( String extension ) { this.extension = extension; } public List getOutputFields() { return outputFields; } public void setOutputFields( List outputFields ) { this.outputFields = outputFields; } @Override public void loadXML( Node stepnode, List databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode, metaStore ); } private void readData( Node stepnode, IMetaStore metastore ) throws KettleXMLException { try { filename = XMLHandler.getTagValue( stepnode, "filename" ); overrideOutput = "Y".equalsIgnoreCase( ( XMLHandler.getTagValue( stepnode, "overrideOutput" ) ) ); enableDictionary = "Y".equalsIgnoreCase( ( XMLHandler.getTagValue( stepnode, "enableDictionary" ) ) ); compressionType = XMLHandler.getTagValue( stepnode, "compression" ); parquetVersion = XMLHandler.getTagValue( stepnode, "parquetVersion" ); rowGroupSize = XMLHandler.getTagValue( stepnode, "rowGroupSize" ); dataPageSize = XMLHandler.getTagValue( stepnode, "dataPageSize" ); dictPageSize = XMLHandler.getTagValue( stepnode, "dictPageSize" ); extension = XMLHandler.getTagValue( stepnode, "extension" ); dateInFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "dateInFilename" ) ); timeInFilename = "Y".equalsIgnoreCase( ( XMLHandler.getTagValue( stepnode, "timeInFilename" ) ) ); dateTimeFormat = XMLHandler.getTagValue( stepnode, "dateTimeFormat" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrfields = XMLHandler.countNodes( fields, "field" ); List parquetOutputFields = new ArrayList<>(); for ( int i = 0; i < nrfields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); ParquetOutputField outputField = new ParquetOutputField(); outputField.setFormatFieldName( XMLHandler.getTagValue( fnode, "path" ) ); outputField.setPentahoFieldName( XMLHandler.getTagValue( fnode, "name" ) ); int parquetTypeId = getParquetTypeId( XMLHandler.getTagValue( fnode, "type" ) ); outputField.setFormatType( parquetTypeId ); outputField.setPrecision( XMLHandler.getTagValue( fnode, "precision" ) ); outputField.setScale( XMLHandler.getTagValue( fnode, "scale" ) ); outputField.setAllowNull( "Y".equalsIgnoreCase( XMLHandler.getTagValue( fnode, "nullable" ) ) ); outputField.setDefaultValue( XMLHandler.getTagValue( fnode, "default" ) ); parquetOutputFields.add( outputField ); } this.outputFields = parquetOutputFields; } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } } @Override public String getXML() { StringBuffer retval = new StringBuffer( 800 ); if ( parentStepMeta != null && parentStepMeta.getParentTransMeta() != null ) { parentStepMeta.getParentTransMeta().getNamedClusterEmbedManager().registerUrl( filename ); } retval.append( " " ).append( XMLHandler.addTagValue( "filename", filename ) ); retval.append( " " ).append( XMLHandler.addTagValue( "overrideOutput", overrideOutput ) ); retval.append( " " ).append( XMLHandler.addTagValue( "compression", compressionType ) ); retval.append( " " ).append( XMLHandler.addTagValue( "parquetVersion", parquetVersion ) ); retval.append( " " ).append( XMLHandler.addTagValue( "enableDictionary", enableDictionary ) ); retval.append( " " ).append( XMLHandler.addTagValue( "dictPageSize", dictPageSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rowGroupSize", rowGroupSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "dataPageSize", dataPageSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "extension", extension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "dateInFilename", dateInFilename ) ); retval.append( " " ).append( XMLHandler.addTagValue( "timeInFilename", timeInFilename ) ); retval.append( " " ).append( XMLHandler.addTagValue( "dateTimeFormat", dateTimeFormat ) ); retval.append( " " ).append( Const.CR ); for ( int i = 0; i < outputFields.size(); i++ ) { ParquetOutputField field = outputFields.get( i ); if ( field.getPentahoFieldName() != null && field.getPentahoFieldName().length() != 0 ) { retval.append( " " ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "path", field.getFormatFieldName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field.getPentahoFieldName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field.getFormatType() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "precision", field.getPrecision() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "scale", field.getScale() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nullable", field.getAllowNull() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "default", field.getDefaultValue() ) ); retval.append( " " ).append( Const.CR ); } } retval.append( " " ).append( Const.CR ); return retval.toString(); } private int getParquetTypeId( String savedType ) { int parquetTypeId = 0; try { parquetTypeId = Integer.parseInt( savedType ); } catch ( NumberFormatException e ) { String parquetTypeName = ParquetTypeConverter.convertToParquetType( savedType ); for ( ParquetSpec.DataType parquetType : ParquetSpec.DataType.values() ) { if ( parquetType.getName().equals( parquetTypeName ) ) { parquetTypeId = parquetType.getId(); break; } } } return parquetTypeId; } @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List databases ) throws KettleException { try { filename = rep.getStepAttributeString( id_step, "filename" ); overrideOutput = rep.getStepAttributeBoolean( id_step, "overrideOutput" ); compressionType = rep.getStepAttributeString( id_step, "compression" ); parquetVersion = rep.getStepAttributeString( id_step, "parquetVersion" ); enableDictionary = rep.getStepAttributeBoolean( id_step, "enableDictionary" ); dictPageSize = rep.getStepAttributeString( id_step, "dictPageSize" ); rowGroupSize = rep.getStepAttributeString( id_step, "rowGroupSize" ); dataPageSize = rep.getStepAttributeString( id_step, "dataPageSize" ); extension = rep.getStepAttributeString( id_step, "extension" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "dateInFilename" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "timeInFilename" ); dateTimeFormat = rep.getStepAttributeString( id_step, "dateTimeFormat" ); // using the "type" column to get the number of field rows because "type" is guaranteed not to be null. int nrfields = rep.countNrStepAttributes( id_step, "type" ); List parquetOutputFields = new ArrayList<>(); for ( int i = 0; i < nrfields; i++ ) { ParquetOutputField outputField = new ParquetOutputField(); outputField.setFormatFieldName( rep.getStepAttributeString( id_step, i, "path" ) ); outputField.setPentahoFieldName( rep.getStepAttributeString( id_step, i, "name" ) ); int parquetTypeId = getParquetTypeId( rep.getStepAttributeString( id_step, i, "type" ) ); outputField.setFormatType( parquetTypeId ); outputField.setPrecision( rep.getStepAttributeString( id_step, i, "precision" ) ); outputField.setScale( rep.getStepAttributeString( id_step, i, "scale" ) ); outputField.setAllowNull( rep.getStepAttributeBoolean( id_step, i, "nullable" ) ); outputField.setDefaultValue( rep.getStepAttributeString( id_step, i, "default" ) ); parquetOutputFields.add( outputField ); } this.outputFields = parquetOutputFields; } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "filename", filename ); rep.saveStepAttribute( id_transformation, id_step, "overrideOutput", overrideOutput ); rep.saveStepAttribute( id_transformation, id_step, "compression", compressionType ); rep.saveStepAttribute( id_transformation, id_step, "parquetVersion", parquetVersion ); rep.saveStepAttribute( id_transformation, id_step, "enableDictionary", enableDictionary ); rep.saveStepAttribute( id_transformation, id_step, "dictPageSize", dictPageSize ); rep.saveStepAttribute( id_transformation, id_step, "rowGroupSize", rowGroupSize ); rep.saveStepAttribute( id_transformation, id_step, "dataPageSize", dataPageSize ); rep.saveStepAttribute( id_transformation, id_step, "extension", extension ); rep.saveStepAttribute( id_transformation, id_step, "dateInFilename", dateInFilename ); rep.saveStepAttribute( id_transformation, id_step, "timeInFilename", timeInFilename ); rep.saveStepAttribute( id_transformation, id_step, "dateTimeFormat", dateTimeFormat ); for ( int i = 0; i < outputFields.size(); i++ ) { ParquetOutputField field = outputFields.get( i ); rep.saveStepAttribute( id_transformation, id_step, i, "path", field.getFormatFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "name", field.getPentahoFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "type", field.getFormatType() ); rep.saveStepAttribute( id_transformation, id_step, i, "precision", field.getPrecision() ); rep.saveStepAttribute( id_transformation, id_step, i, "scale", field.getScale() ); rep.saveStepAttribute( id_transformation, id_step, i, "nullable", field.getAllowNull() ); rep.saveStepAttribute( id_transformation, id_step, i, "default", field.getDefaultValue() ); } } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } } @Override public void resolve( Bowl bowl ) { if ( filename != null && !filename.isEmpty() ) { try { String realFileName = getParentStepMeta().getParentTransMeta().environmentSubstitute( filename ); FileObject fileObject = KettleVFS.getInstance( bowl ).getFileObject( realFileName ); if ( AliasedFileObject.isAliasedFile( fileObject ) ) { filename = ( (AliasedFileObject) fileObject ).getAELSafeURIString(); } } catch ( KettleFileException e ) { throw new RuntimeException( e ); } } } public String constructOutputFilename() { String outputFileName = filename; if ( dateTimeFormat != null && !dateTimeFormat.isEmpty() ) { String dateTimeFormatPattern = getParentStepMeta().getParentTransMeta().environmentSubstitute( dateTimeFormat ); outputFileName += new SimpleDateFormat( dateTimeFormatPattern ).format( new Date() ); } else { if ( dateInFilename ) { outputFileName += '_' + new SimpleDateFormat( "yyyyMMdd" ).format( new Date() ); } if ( timeInFilename ) { outputFileName += '_' + new SimpleDateFormat( "HHmmss" ).format( new Date() ); } } if ( extension != null && !extension.isEmpty() ) { outputFileName += '.' + extension; } return outputFileName; } public int getRowGroupSize( VariableSpace vspace ) { return parseReplace( rowGroupSize, vspace, str -> Integer.parseInt( str ), 0 ); } protected T parseReplace( String value, VariableSpace vspace, Function parser, T defaultValue ) { String replaced = vspace != null ? vspace.environmentSubstitute( value ) : value; if ( !Utils.isEmpty( replaced ) ) { try { return parser.apply( replaced ); } catch ( Exception e ) { // ignored } } return defaultValue; } public String getRowGroupSize() { return rowGroupSize; } public void setRowGroupSize( String value ) { rowGroupSize = value; } public String getCompressionType() { return StringUtil.isVariable( compressionType ) ? compressionType : getCompressionType( null ).toString(); } public void setCompressionType( String value ) { compressionType = StringUtil.isVariable( value ) ? value : parseFromToString( value, CompressionCodecName.values(), CompressionCodecName.UNCOMPRESSED ).name(); } public CompressionCodecName getCompressionType(VariableSpace vspace ) { return parseReplace( compressionType, vspace, str -> findCompressionType( str ), CompressionCodecName.UNCOMPRESSED ); } public String getParquetVersion() { return StringUtil.isVariable( parquetVersion ) ? parquetVersion : getParquetVersion( null ).toString(); } public void setParquetVersion( String value ) { parquetVersion = StringUtil.isVariable( value ) ? value : parseFromToString( value, ParquetVersion.values(), CompressionCodecName.UNCOMPRESSED ).name(); } public ParquetVersion getParquetVersion( VariableSpace vspace ) { return parseReplace( parquetVersion, vspace, str -> findParquetVersion( str ), ParquetVersion.PARQUET_1 ); } public int getDataPageSize( VariableSpace vspace ) { return parseReplace( dataPageSize, vspace, s -> Integer.parseInt( s ), 0 ); } public String getDataPageSize() { return dataPageSize; } public void setDataPageSize( String dataPageSize ) { this.dataPageSize = dataPageSize; } public int getDictPageSize( VariableSpace vspace ) { return parseReplace( dictPageSize, vspace, s -> Integer.parseInt( s ), 0 ); } public String getDictPageSize() { return dictPageSize; } public void setDictPageSize( String dictPageSize ) { this.dictPageSize = dictPageSize; } public String[] getCompressionTypes() { return getStrings( CompressionCodecName.values() ); } public String[] getVersionTypes() { return getStrings( ParquetVersion.values() ); } private CompressionCodecName findCompressionType( String str ) { try { return CompressionCodecName.valueOf( str ); } catch ( Throwable th ) { return parseFromToString( str, CompressionCodecName.values(), CompressionCodecName.UNCOMPRESSED ); } } private ParquetVersion findParquetVersion( String str ) { try { return ParquetVersion.valueOf( str ); } catch ( Throwable th ) { return parseFromToString( str, ParquetVersion.values(), ParquetVersion.PARQUET_1 ); } } public static enum ParquetVersion { PARQUET_1( "Parquet 1.0" ), PARQUET_2( "Parquet 2.0" ); private final String uiName; private ParquetVersion( String name ) { this.uiName = name; } @Override public String toString() { return uiName; } } protected static String[] getStrings( T[] objects ) { String[] names = new String[ objects.length ]; int i = 0; for ( T obj : objects ) { names[ i++ ] = obj.toString(); } return names; } protected static T parseFromToString( String str, T[] values, T defaultValue ) { if ( !Utils.isEmpty( str ) ) { for ( T type : values ) { if ( str.equalsIgnoreCase( type.toString() ) ) { return type; } } } return defaultValue; } private static String getMsg( String key ) { return BaseMessages.getString( PKG, key ); } } ================================================ FILE: kettle-plugins/formats-meta/src/main/resources/org/pentaho/big/data/kettle/plugins/formats/parquet/output/messages/messages_en_US.properties ================================================ ParquetOutput.EncodingType.PLAIN=Plain ParquetOutput.EncodingType.DICTIONARY=Dictionary ParquetOutput.EncodingType.BIT_PACKED=Bit packed ParquetOutput.EncodingType.RLE=RLE ParquetOutput.CompressionType.NONE=None ================================================ FILE: kettle-plugins/formats-meta/src/test/java/org/pentaho/big/data/kettle/plugins/formats/orc/OrcInputFieldTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.orc; import org.junit.Test; import org.pentaho.hadoop.shim.api.format.OrcSpec; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; public class OrcInputFieldTest { @Test public void testSetOrcTypeByEnumToString() { // Test setting ORC type using enum.toString() format OrcInputField field = new OrcInputField(); field.setOrcType( "BIGINT" ); assertEquals( "BIGINT type should be set correctly", OrcSpec.DataType.BIGINT, field.getOrcType() ); assertEquals( "Format type ID should match BIGINT", OrcSpec.DataType.BIGINT.getId(), field.getFormatType() ); } @Test public void testSetOrcTypeByName() { // Test setting ORC type using getName() format OrcInputField field = new OrcInputField(); field.setOrcType( "BigInt" ); assertEquals( "BigInt name should be set correctly", OrcSpec.DataType.BIGINT, field.getOrcType() ); assertEquals( "Format type ID should match BIGINT", OrcSpec.DataType.BIGINT.getId(), field.getFormatType() ); } @Test public void testSetOrcTypeCaseInsensitive() { // Test that the method is case-insensitive OrcInputField field1 = new OrcInputField(); field1.setOrcType( "string" ); assertEquals( "Lowercase 'string' should work", OrcSpec.DataType.STRING, field1.getOrcType() ); OrcInputField field2 = new OrcInputField(); field2.setOrcType( "STRING" ); assertEquals( "Uppercase 'STRING' should work", OrcSpec.DataType.STRING, field2.getOrcType() ); OrcInputField field3 = new OrcInputField(); field3.setOrcType( "StRiNg" ); assertEquals( "Mixed case 'StRiNg' should work", OrcSpec.DataType.STRING, field3.getOrcType() ); } @Test public void testSetOrcTypeAllDataTypes() { // Test all ORC data types can be set correctly for ( OrcSpec.DataType dataType : OrcSpec.DataType.values() ) { // Test by enum toString() OrcInputField field1 = new OrcInputField(); field1.setOrcType( dataType.toString() ); assertEquals( "Setting by toString: " + dataType, dataType, field1.getOrcType() ); // Test by name OrcInputField field2 = new OrcInputField(); field2.setOrcType( dataType.getName() ); assertEquals( "Setting by name: " + dataType.getName(), dataType, field2.getOrcType() ); } } @Test public void testSetOrcTypeInvalidValue() { // Test that invalid type doesn't change the field OrcInputField field = new OrcInputField(); int initialFormatType = field.getFormatType(); field.setOrcType( "INVALID_TYPE" ); assertEquals( "Invalid type should not change formatType", initialFormatType, field.getFormatType() ); } @Test public void testSetOrcTypeEmptyString() { // Test that empty string doesn't change the field OrcInputField field = new OrcInputField(); int initialFormatType = field.getFormatType(); field.setOrcType( "" ); assertEquals( "Empty string should not change formatType", initialFormatType, field.getFormatType() ); } @Test public void testSetOrcTypeNull() { // Test that null doesn't throw exception and doesn't change the field OrcInputField field = new OrcInputField(); int initialFormatType = field.getFormatType(); try { field.setOrcType( (String) null ); assertEquals( "Null should not change formatType", initialFormatType, field.getFormatType() ); } catch ( NullPointerException e ) { fail( "Null should not throw NullPointerException" ); } } @Test public void testSetOrcTypeWithWhitespace() { // Test that types with leading/trailing whitespace don't match OrcInputField field = new OrcInputField(); int initialFormatType = field.getFormatType(); field.setOrcType( " STRING " ); assertEquals( "Type with whitespace should not match", initialFormatType, field.getFormatType() ); } @Test public void testSetOrcTypeTimestamp() { // Test specific type - TIMESTAMP OrcInputField field = new OrcInputField(); field.setOrcType( "Timestamp" ); assertEquals( "Timestamp type should be set correctly", OrcSpec.DataType.TIMESTAMP, field.getOrcType() ); } @Test public void testSetOrcTypeDate() { // Test specific type - DATE OrcInputField field = new OrcInputField(); field.setOrcType( "DATE" ); assertEquals( "Date type should be set correctly", OrcSpec.DataType.DATE, field.getOrcType() ); } @Test public void testSetOrcTypeDecimal() { // Test specific type - DECIMAL OrcInputField field = new OrcInputField(); field.setOrcType( "decimal" ); assertEquals( "Decimal type should be set correctly", OrcSpec.DataType.DECIMAL, field.getOrcType() ); } @Test public void testSetOrcTypeBoolean() { // Test specific type - BOOLEAN OrcInputField field = new OrcInputField(); field.setOrcType( "Boolean" ); assertEquals( "Boolean type should be set correctly", OrcSpec.DataType.BOOLEAN, field.getOrcType() ); } @Test public void testSetOrcTypeOverwrite() { // Test that setting type multiple times overwrites previous value OrcInputField field = new OrcInputField(); field.setOrcType( "STRING" ); assertEquals( "First type should be STRING", OrcSpec.DataType.STRING, field.getOrcType() ); field.setOrcType( "INTEGER" ); assertEquals( "Second type should overwrite to INTEGER", OrcSpec.DataType.INTEGER, field.getOrcType() ); field.setOrcType( "DOUBLE" ); assertEquals( "Third type should overwrite to DOUBLE", OrcSpec.DataType.DOUBLE, field.getOrcType() ); } @Test public void testSetOrcTypeIntegerBothFormats() { // Test that INTEGER type can be set using both "Int" (display name) and "INTEGER" (enum name) // Test with "Int" (display name from getName()) OrcInputField field1 = new OrcInputField(); field1.setOrcType( "Int" ); assertEquals( "Setting with 'Int' should result in INTEGER type", OrcSpec.DataType.INTEGER, field1.getOrcType() ); assertEquals( "Format type ID should match INTEGER", OrcSpec.DataType.INTEGER.getId(), field1.getFormatType() ); // Test with "INTEGER" (enum toString()) OrcInputField field2 = new OrcInputField(); field2.setOrcType( "INTEGER" ); assertEquals( "Setting with 'INTEGER' should result in INTEGER type", OrcSpec.DataType.INTEGER, field2.getOrcType() ); assertEquals( "Format type ID should match INTEGER", OrcSpec.DataType.INTEGER.getId(), field2.getFormatType() ); // Test with lowercase "int" OrcInputField field3 = new OrcInputField(); field3.setOrcType( "int" ); assertEquals( "Setting with lowercase 'int' should result in INTEGER type", OrcSpec.DataType.INTEGER, field3.getOrcType() ); assertEquals( "Format type ID should match INTEGER", OrcSpec.DataType.INTEGER.getId(), field3.getFormatType() ); // Test with lowercase "integer" OrcInputField field4 = new OrcInputField(); field4.setOrcType( "integer" ); assertEquals( "Setting with lowercase 'integer' should result in INTEGER type", OrcSpec.DataType.INTEGER, field4.getOrcType() ); assertEquals( "Format type ID should match INTEGER", OrcSpec.DataType.INTEGER.getId(), field4.getFormatType() ); // Test with mixed case "InTeGeR" OrcInputField field5 = new OrcInputField(); field5.setOrcType( "InTeGeR" ); assertEquals( "Setting with mixed case 'InTeGeR' should result in INTEGER type", OrcSpec.DataType.INTEGER, field5.getOrcType() ); assertEquals( "Format type ID should match INTEGER", OrcSpec.DataType.INTEGER.getId(), field5.getFormatType() ); // Verify both result in the same type assertEquals( "Both 'Int' and 'INTEGER' should result in the same type", field1.getOrcType(), field2.getOrcType() ); assertEquals( "Both should have the same format type ID", field1.getFormatType(), field2.getFormatType() ); } } ================================================ FILE: kettle-plugins/formats-meta/src/test/java/org/pentaho/big/data/kettle/plugins/formats/orc/input/OrcInputMetaBaseTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.orc.input; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import org.junit.Before; import org.junit.Test; import org.pentaho.big.data.kettle.plugins.formats.FormatInputFile; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.named.cluster.NamedClusterEmbedManager; public class OrcInputMetaBaseTest { private static final String FILE_NAME_VALID_PATH = "path/to/file"; private OrcInputMetaBase inputMeta; private VariableSpace variableSpace; @Before public void setUp() throws Exception { NamedClusterEmbedManager manager = mock( NamedClusterEmbedManager.class ); TransMeta parentTransMeta = mock( TransMeta.class ); doReturn( manager ).when( parentTransMeta ).getNamedClusterEmbedManager(); StepMeta parentStepMeta = mock( StepMeta.class ); doReturn( parentTransMeta ).when( parentStepMeta ).getParentTransMeta(); inputMeta = new OrcInputMetaBase() { @Override public StepDataInterface getStepData() { return null; } @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return null; } }; inputMeta.setParentStepMeta( parentStepMeta ); inputMeta = spy( inputMeta ); variableSpace = mock( VariableSpace.class ); doReturn( "" ).when( variableSpace ).environmentSubstitute( anyString() ); doReturn( FILE_NAME_VALID_PATH ).when( variableSpace ).environmentSubstitute( FILE_NAME_VALID_PATH ); } @Test public void testGetXmlWorksIfWeUpdateOnlyPartOfInputFilesInformation() throws Exception { inputMeta.inputFiles = new FormatInputFile(); inputMeta.inputFiles.fileName = new String[] { FILE_NAME_VALID_PATH }; inputMeta.getXML(); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.fileMask.length ); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.excludeFileMask.length ); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.fileRequired.length ); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.includeSubFolders.length ); //specific for bigdata format assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.environment.length ); } } ================================================ FILE: kettle-plugins/formats-meta/src/test/java/org/pentaho/big/data/kettle/plugins/formats/orc/output/OrcOutputFieldTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.orc.output; import org.junit.Test; import org.pentaho.hadoop.shim.api.format.OrcSpec; import static org.junit.Assert.*; public class OrcOutputFieldTest { @Test public void setFormatTypeTest() { //Names must be unique to each data type and should be addressable like the id OrcOutputField f; for ( OrcSpec.DataType dataType : OrcSpec.DataType.values() ) { //Set by Name f = new OrcOutputField(); f.setFormatType( dataType.getName() ); assertEquals( "Checking setting of \"" + dataType.getName() + "\"", dataType, f.getOrcType() ); //Set by Id f = new OrcOutputField(); f.setFormatType( String.valueOf( dataType.getId() ) ); assertEquals( "Checking setting of \"" + dataType.getId() + "\"", dataType, f.getOrcType() ); //Set by Enum f = new OrcOutputField(); f.setFormatType( String.valueOf( dataType.toString() ) ); assertEquals( "Checking setting of \"" + dataType.toString() + "\"", dataType, f.getOrcType() ); } } } ================================================ FILE: kettle-plugins/formats-meta/src/test/java/org/pentaho/big/data/kettle/plugins/formats/orc/output/OrcOutputMetabaseTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.orc.output; import org.apache.orc.CompressionKind; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.runners.MockitoJUnitRunner; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import static org.mockito.Mockito.spy; /** * Created by rmansoor on 4/8/2018. */ @RunWith( MockitoJUnitRunner.class ) public class OrcOutputMetabaseTest { private OrcOutputMetaBase metaBase; @Before public void setUp() throws Exception { metaBase = spy( new OrcOutputMetaBase() { @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return null; } @Override public StepDataInterface getStepData() { return null; } } ); } @Test public void setCompressionType() { metaBase.setCompressionType( "snappy" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.SNAPPY.toString() ) ); metaBase.setCompressionType( "Snappy" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.SNAPPY.toString() ) ); metaBase.setCompressionType( "SNAPPY" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.SNAPPY.toString() ) ); metaBase.setCompressionType( "zlib" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.ZLIB.toString() ) ); metaBase.setCompressionType( "Zlib" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.ZLIB.toString() ) ); metaBase.setCompressionType( "ZLIB" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.ZLIB.toString() ) ); metaBase.setCompressionType( "lzo" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.LZO.toString() ) ); metaBase.setCompressionType( "Lzo" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.LZO.toString() ) ); metaBase.setCompressionType( "LZO" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.LZO.toString() ) ); metaBase.setCompressionType( "None" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.NONE.toString() ) ); metaBase.setCompressionType( "none" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.NONE.toString() ) ); metaBase.setCompressionType( "NONE" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.NONE.toString() ) ); metaBase.setCompressionType( "lz4" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.LZ4.toString() ) ); metaBase.setCompressionType( "Lz4" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.LZ4.toString() ) ); metaBase.setCompressionType( "LZ4" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.LZ4.toString() ) ); metaBase.setCompressionType( "zstd" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.ZSTD.toString() ) ); metaBase.setCompressionType( "Zstd" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.ZSTD.toString() ) ); metaBase.setCompressionType( "ZSTD" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionKind.ZSTD.toString() ) ); } } ================================================ FILE: kettle-plugins/formats-meta/src/test/java/org/pentaho/big/data/kettle/plugins/formats/parquet/input/ParquetInputMetaBaseTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.parquet.input; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import org.junit.Before; import org.junit.Test; import org.pentaho.big.data.kettle.plugins.formats.FormatInputFile; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.named.cluster.NamedClusterEmbedManager; public class ParquetInputMetaBaseTest { private static final String FILE_NAME_VALID_PATH = "path/to/file"; private ParquetInputMetaBase inputMeta; private VariableSpace variableSpace; @Before public void setUp() throws Exception { NamedClusterEmbedManager manager = mock( NamedClusterEmbedManager.class ); TransMeta parentTransMeta = mock( TransMeta.class ); doReturn( manager ).when( parentTransMeta ).getNamedClusterEmbedManager(); StepMeta parentStepMeta = mock( StepMeta.class ); doReturn( parentTransMeta ).when( parentStepMeta ).getParentTransMeta(); inputMeta = new ParquetInputMetaBase() { @Override public StepDataInterface getStepData() { return null; } @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return null; } }; inputMeta.setParentStepMeta( parentStepMeta ); inputMeta = spy( inputMeta ); variableSpace = mock( VariableSpace.class ); doReturn( "" ).when( variableSpace ).environmentSubstitute( anyString() ); doReturn( FILE_NAME_VALID_PATH ).when( variableSpace ).environmentSubstitute( FILE_NAME_VALID_PATH ); } @Test public void testGetXmlWorksIfWeUpdateOnlyPartOfInputFilesInformation() throws Exception { inputMeta.inputFiles = new FormatInputFile(); inputMeta.inputFiles.fileName = new String[] { FILE_NAME_VALID_PATH }; inputMeta.getXML(); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.fileMask.length ); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.excludeFileMask.length ); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.fileRequired.length ); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.includeSubFolders.length ); //specific for bigdata format assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.environment.length ); } } ================================================ FILE: kettle-plugins/formats-meta/src/test/java/org/pentaho/big/data/kettle/plugins/formats/parquet/output/ParquetOutputMetaBaseTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.formats.parquet.output; import org.apache.parquet.hadoop.metadata.CompressionCodecName; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.named.cluster.NamedClusterEmbedManager; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith( MockitoJUnitRunner.class ) public class ParquetOutputMetaBaseTest { @Mock StepMeta parentStepMeta; @Mock TransMeta parentTransMeta; @Mock NamedClusterEmbedManager namedClusterEmbedManager; private ParquetOutputMetaBase metaBase; @Before public void setUp() throws Exception { metaBase = spy( new ParquetOutputMetaBase() { @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return null; } @Override public StepDataInterface getStepData() { return null; } } ); } @Test public void getXMLShouldCallRegisterUrl() { metaBase.setFilename( "hc://HC/fileName" ); when( parentStepMeta.getParentTransMeta() ).thenReturn( parentTransMeta ); when( parentTransMeta.getNamedClusterEmbedManager() ).thenReturn( namedClusterEmbedManager ); metaBase.setParentStepMeta( parentStepMeta ); metaBase.getXML(); verify( namedClusterEmbedManager ).registerUrl( eq( "hc://HC/fileName" ) ); } @Test public void setCompressionType() { metaBase.setCompressionType( "snappy" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.SNAPPY.toString() ) ); metaBase.setCompressionType( "Snappy" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.SNAPPY.toString() ) ); metaBase.setCompressionType( "SNAPPY" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.SNAPPY.toString() ) ); metaBase.setCompressionType( "gzip" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.GZIP.toString() ) ); metaBase.setCompressionType( "Gzip" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.GZIP.toString() ) ); metaBase.setCompressionType( "GZIP" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.GZIP.toString() ) ); metaBase.setCompressionType( "lzo" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.LZO.toString() ) ); metaBase.setCompressionType( "Lzo" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.LZO.toString() ) ); metaBase.setCompressionType( "LZO" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.LZO.toString() ) ); metaBase.setCompressionType( "brotli" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.BROTLI.toString() ) ); metaBase.setCompressionType( "Brotli" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.BROTLI.toString() ) ); metaBase.setCompressionType( "BROTLI" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.BROTLI.toString() ) ); metaBase.setCompressionType( "lz4" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.LZ4.toString() ) ); metaBase.setCompressionType( "Lz4" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.LZ4.toString() ) ); metaBase.setCompressionType( "LZ4" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.LZ4.toString() ) ); metaBase.setCompressionType( "zstd" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.ZSTD.toString() ) ); metaBase.setCompressionType( "Zstd" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.ZSTD.toString() ) ); metaBase.setCompressionType( "ZSTD" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.ZSTD.toString() ) ); metaBase.setCompressionType( "lz4_raw" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.LZ4_RAW.toString() ) ); metaBase.setCompressionType( "Lz4_raw" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.LZ4_RAW.toString() ) ); metaBase.setCompressionType( "LZ4_RAW" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.LZ4_RAW.toString() ) ); metaBase.setCompressionType( "uncompressed" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.UNCOMPRESSED.toString() ) ); metaBase.setCompressionType( "Uncompressed" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.UNCOMPRESSED.toString() ) ); metaBase.setCompressionType( "UNCOMPRESSED" ); Assert.assertTrue( metaBase.getCompressionType().equals( CompressionCodecName.UNCOMPRESSED.toString() ) ); } public void setParquetVersion() { metaBase.setParquetVersion( "Parquet 1.0" ); Assert.assertTrue( metaBase.getParquetVersion().equals( ParquetOutputMetaBase.ParquetVersion.PARQUET_1.toString() ) ); metaBase.setCompressionType( "PARQUET_1" ); Assert.assertTrue( metaBase.getCompressionType().equals( ParquetOutputMetaBase.ParquetVersion.PARQUET_1.toString() ) ); metaBase.setCompressionType( "Parquet 2.0" ); Assert.assertTrue( metaBase.getCompressionType().equals( ParquetOutputMetaBase.ParquetVersion.PARQUET_2.toString() ) ); metaBase.setCompressionType( "PARQUET_2" ); Assert.assertTrue( metaBase.getCompressionType().equals( ParquetOutputMetaBase.ParquetVersion.PARQUET_2.toString() ) ); metaBase.setCompressionType( "1235" ); Assert.assertTrue( metaBase.getCompressionType().equals( ParquetOutputMetaBase.ParquetVersion.PARQUET_1.toString() ) ); metaBase.setCompressionType( "ABC" ); Assert.assertTrue( metaBase.getCompressionType().equals( ParquetOutputMetaBase.ParquetVersion.PARQUET_1.toString() ) ); } } ================================================ FILE: kettle-plugins/formats-meta/src/test/resources/org/pentaho/big/data/kettle/plugins/formats/orc/input/OrcInput.xml ================================================ Orc Input OrcInputNew Y 1 none N SampleFileName SamplePath SampleName String false SampleDefault String 416 112 Y ================================================ FILE: kettle-plugins/guiTestActionHandlers/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-guiTestActionHandlers 11.1.0.0-SNAPSHOT jar pentaho pentaho-big-data-api-runtimeTest ${project.version} pentaho-kettle kettle-core ${pdi.version} pentaho-kettle kettle-engine ${pdi.version} pentaho-kettle kettle-ui-swt ${pdi.version} ================================================ FILE: kettle-plugins/guiTestActionHandlers/src/main/java/org/pentaho/big/data/plugins/gui/test/actionHandlers/ShowHelpDialogActionHandler.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.plugins.gui.test.actionHandlers; import org.eclipse.swt.widgets.Display; import org.pentaho.di.ui.core.dialog.ShowHelpDialog; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.runtime.test.action.RuntimeTestAction; import org.pentaho.runtime.test.action.RuntimeTestActionHandler; import org.pentaho.runtime.test.action.impl.HelpUrlPayload; /** * Created by bryan on 9/9/15. */ public class ShowHelpDialogActionHandler implements RuntimeTestActionHandler { @Override public boolean canHandle( RuntimeTestAction runtimeTestAction ) { return runtimeTestAction.getPayload() instanceof HelpUrlPayload; } @Override public void handle( RuntimeTestAction runtimeTestAction ) { // Cast checked in canHandle() final HelpUrlPayload helpUrlPayload = (HelpUrlPayload) runtimeTestAction.getPayload(); final Spoon spoon = Spoon.getInstance(); Display display = spoon.getDisplay(); Runnable showRunnable = new Runnable() { @Override public void run() { new ShowHelpDialog( spoon.getShell(), helpUrlPayload.getTitle(), helpUrlPayload.getUrl().toString(), helpUrlPayload.getHeader() ).open(); } }; if ( Thread.currentThread() == display.getThread() ) { showRunnable.run(); } else { display.asyncExec( showRunnable ); } } } ================================================ FILE: kettle-plugins/guiTestActionHandlers/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: kettle-plugins/hadoop-cluster/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-hadoop-cluster 11.1.0.0-SNAPSHOT pom Hadoop Cluster Plugin ui 11.1.0.0-SNAPSHOT 3.0.3 3.0.1 2.0 ${project.version} 11.1.0.0-SNAPSHOT 4.6 3.34.0 pentaho-kettle kettle-core ${pdi.version} provided pentaho-kettle kettle-ui-swt ${pdi.version} provided pentaho-kettle kettle-engine ${pdi.version} provided pentaho pentaho-big-data-impl-clusterTests ${project.version} provided pentaho pentaho-platform-extensions ${platform.version} provided * * javax.ws.rs javax.ws.rs-api ${dependency.javax.ws.rs-api.version} provided javax.servlet javax.servlet-api ${dependency.javax.servlet-api.version} provided org.eclipse.swt org.eclipse.swt.gtk.linux.x86_64 ${swt.version} org.eclipse.platform org.eclipse.jface ${jface.version} provided org.pentaho.di.plugins core-ui ${pdi.version} ================================================ FILE: kettle-plugins/hadoop-cluster/ui/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-hadoop-cluster 11.1.0.0-SNAPSHOT hadoop-cluster-ui 11.1.0.0-SNAPSHOT jar Hadoop Cluster Plugin UI 1.5.2 11.1.0.0-SNAPSHOT 4.0.0 org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.NamedClusterDialog pentaho pentaho-big-data-impl-clusterTests ${pdi.version} pentaho pentaho-big-data-api-runtimeTest ${project.version} provided pentaho pentaho-big-data-impl-cluster ${project.version} provided pentaho-kettle kettle-core pentaho-kettle kettle-ui-swt pentaho-kettle kettle-engine org.pentaho.di.plugins pentaho-metastore-locator-api ${pdi.version} provided pentaho pentaho-platform-extensions pentaho pentaho-platform-core ${platform.version} provided org.pentaho shim-api-core ${pentaho-hadoop-shims.version} provided org.pentaho shim-api ${pentaho-hadoop-shims.version} provided pentaho pentaho-big-data-kettle-plugins-common-ui ${pdi.version} provided pentaho pentaho-big-data-impl-clusterTests org.eclipse.swt org.eclipse.swt.gtk.linux.x86_64 org.eclipse.platform org.eclipse.jface org.eclipse.platform org.eclipse.jface com.fasterxml.jackson.jaxrs jackson-jaxrs-json-provider ${fasterxml-jackson.version} provided org.ops4j.pax.web pax-web-spi ${pax-web.version} provided org.ops4j.pax.swissbox pax-swissbox-core 1.7.1 provided commons-configuration commons-configuration 1.6 org.apache.commons commons-collections4 4.1 org.apache.httpcomponents httpmime 4.5.14 commons-configuration commons-configuration 1.6 provided org.apache.httpcomponents httpmime 4.5.14 commons-configuration commons-configuration 1.6 provided org.apache.httpcomponents httpmime 4.5.14 javax.ws.rs javax.ws.rs-api javax.servlet javax.servlet-api pentaho pentaho-big-data-legacy ${project.version} compile pentaho pentaho-big-data-legacy-core ${project.version} compile org.pentaho.di.plugins core-ui org.apache.commons commons-fileupload2-core ${commons-fileupload.version} pentaho-kettle kettle-core ${pdi.version} tests test org.mockito mockito-core ${mockito4.version} test org.mockito mockito-inline ${mockito4.version} test commons-configuration commons-configuration 1.6 false src/main/resources **/* META-INF/**/* OSGI-INF/**/* true src/main/resources META-INF/**/* OSGI-INF/**/* ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/HadoopClusterDelegate.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.NamedClusterDialog; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.CustomWizardDialog; import org.pentaho.di.base.AbstractMeta; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.runtime.test.RuntimeTester; import java.util.Collection; import java.util.Map; import java.util.function.Supplier; public class HadoopClusterDelegate { private static final Class PKG = HadoopClusterDelegate.class; private final Supplier spoonSupplier = Spoon::getInstance; private final RuntimeTester runtimeTester; private final NamedClusterService namedClusterService; private static final LogChannelInterface log = KettleLogStore.getLogChannelInterfaceFactory().create( "HadoopClusterDelegate" ); public HadoopClusterDelegate( NamedClusterService clusterService, RuntimeTester tester ) { namedClusterService = clusterService; runtimeTester = tester; } public void openDialog( String dialogState, Map urlParams ) { try { Collection metastoreLocators = PluginServiceLoader.loadServices( MetastoreLocator.class ); IMetaStore metastore = metastoreLocators.stream().findFirst().get().getMetastore(); CustomWizardDialog wizardDialog = new CustomWizardDialog( spoonSupplier.get().getShell(), new NamedClusterDialog( namedClusterService, metastore, spoonSupplier.get().getActiveMeta() == null ? spoonSupplier.get().getManagementBowl().getADefaultVariableSpace() : (AbstractMeta)spoonSupplier.get().getActiveMeta(), runtimeTester, urlParams, dialogState ) ); wizardDialog.open(); } catch ( Exception e ) { log.logError( e.getMessage() ); } } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/HadoopClusterDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog; import org.eclipse.swt.SWT; import org.eclipse.swt.browser.BrowserFunction; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.Const; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.repository.Repository; import org.pentaho.di.ui.core.dialog.ThinDialog; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.util.HelpUtils; import org.pentaho.platform.settings.ServerPort; import org.pentaho.platform.settings.ServerPortRegistry; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.function.Supplier; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.HadoopClusterManager.STRING_NAMED_CLUSTERS; public class HadoopClusterDialog extends ThinDialog { private static final Image LOGO = GUIResource.getInstance().getImageLogoSmall(); private static final String OSGI_SERVICE_PORT = "OSGI_SERVICE_PORT"; private static final int OPTIONS = SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX; private static final String THIN_CLIENT_HOST = "THIN_CLIENT_HOST"; private static final String THIN_CLIENT_PORT = "THIN_CLIENT_PORT"; private static final String LOCALHOST = "127.0.0.1"; private Supplier spoonSupplier = Spoon::getInstance; private final LogChannelInterface log = spoonSupplier.get().getLog(); HadoopClusterDialog( Shell shell, int width, int height ) { super( shell, width, height, true ); } void open( String title, String thinAppState, Map urlParams ) { StringBuilder clientPath = new StringBuilder(); clientPath.append( getClientPath() ); clientPath.append( "#!/" ); if ( thinAppState != null ) { clientPath.append( thinAppState ); } //Convert map into url params string HashMap params = new HashMap<>( urlParams ); params.put( "connectedToRepo", Boolean.toString( connectedToRepo() ) ); final String paramString = params.entrySet().stream() .map( p -> p.getKey() + "=" + p.getValue() ) .reduce( ( p1, p2 ) -> p1 + "&" + p2 ) .map( s -> "?" + s ) .orElse( "" ); clientPath.append( paramString ); String endpointURL = getEndpointURL( clientPath.toString() ); log.logDebug( "Thin endpoint URL: " + endpointURL ); super.createDialog( title, endpointURL, OPTIONS, LOGO ); super.dialog.setMinimumSize( 640, 630 ); new BrowserFunction( browser, "open" ) { @Override public Object function( Object[] arguments ) { HelpUtils.openHelpDialog( spoonSupplier.get().getDisplay().getActiveShell(), "", (String) arguments[ 0 ], "" ); return true; } }; new BrowserFunction( browser, "close" ) { @Override public Object function( Object[] arguments ) { Runnable execute = () -> { browser.dispose(); dialog.close(); dialog.dispose(); }; display.asyncExec( execute ); return true; } }; new BrowserFunction( browser, "setTitle" ) { @Override public Object function( Object[] arguments ) { Runnable execute = () -> { dialog.setText( (String) arguments[ 0 ] ); }; display.asyncExec( execute ); return true; } }; while ( !dialog.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } Spoon spoon = spoonSupplier.get(); if ( spoon != null && spoon.getShell() != null ) { spoon.getShell().getDisplay().asyncExec( () -> spoon.refreshTree( STRING_NAMED_CLUSTERS ) ); } } private String getClientPath() { Properties properties = new Properties(); try { InputStream inputStream = HadoopClusterDialog.class.getClassLoader().getResourceAsStream( "project.properties" ); properties.load( inputStream ); } catch ( IOException e ) { log.logError( e.getMessage(), e ); } return properties.getProperty( "CLIENT_PATH" ); } private int getOsgiServicePort() { // if no service port is specified try getting it from ServerPort osgiServicePort = ServerPortRegistry.getPort( OSGI_SERVICE_PORT ); if ( osgiServicePort != null ) { return osgiServicePort.getAssignedPort(); } throw new IllegalStateException( "No osgi service port defined" ); } private String getEndpointURL( String path ) { if ( connectedToRepo() ) { return getRepo().getUri() .orElseThrow( () -> new IllegalStateException( "Repo URI not defined" ) ) .toString() + "/osgi" + path; } if ( Const.isRunningOnWebspoonMode() ) { return System.getProperty( "KETTLE_CONTEXT_PATH", "" ) + "/osgi" + path; } String host; int port; try { host = getKettleProperty( THIN_CLIENT_HOST ); port = Integer.parseInt( getKettleProperty( THIN_CLIENT_PORT ) ); } catch ( Exception e ) { host = LOCALHOST; port = getOsgiServicePort(); } return "http://" + host + ":" + port + path; } private boolean connectedToRepo() { Repository repo = getRepo(); return repo != null && repo.getUri().isPresent(); } private Repository getRepo() { return spoonSupplier.get().getRepository(); } private String getKettleProperty( String propertyName ) { // loaded in system properties at startup return System.getProperty( propertyName ); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/NamedClusterDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard; import org.eclipse.jface.wizard.Wizard; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Shell; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages.ClusterSettingsPage; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages.KerberosSettingsPage; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages.KnoxSettingsPage; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages.ReportPage; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages.SecuritySettingsPage; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages.TestResultsPage; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.BadSiteFilesException; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.CustomWizardDialog; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.HadoopClusterManager; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel; import org.pentaho.big.data.plugins.common.ui.ClusterTestDialog; import org.pentaho.di.core.Props; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.encryption.TwoWayPasswordEncoderPluginType; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.metastore.MetaStoreConst; import org.pentaho.di.repository.Repository; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.runtime.test.RuntimeTestStatus; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.function.Supplier; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages.SecuritySettingsPage.NamedClusterSecurityType.NONE; /* * To run this dialog as stand alone for development purposes under UBUNTU do the following: * 1.Look for the following comment in the module: * FOR UI EXECUTION AS A STANDALONE * And either comment or uncomment the referred section as requested * 2.Execute running the following command at the root of the "ui" submodule: * mvn clean compile exec:java * * TO DEBUG * mvn clean compile exec:exec -Dexec.executable="java" -Dexec.args="-classpath %classpath -Xdebug * -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005 org.pentaho.big.data.kettle.plugins.hadoopcluster.ui * .dialog.wizard.NamedClusterDialog" * */ public class NamedClusterDialog extends Wizard { private String dialogState; private boolean isEditMode; private boolean isDuplicating; private ClusterSettingsPage clusterSettingsPage; private SecuritySettingsPage securitySettingsPage; private KerberosSettingsPage kerberosSettingsPage; private KnoxSettingsPage knoxSettingsPage; private ReportPage reportPage; private TestResultsPage testResultsPage; private final HadoopClusterManager hadoopClusterManager; private ThinNameClusterModel thinNameClusterModel; private boolean isDevMode = false; private final RuntimeTester runtimeTester; private final VariableSpace variableSpace; private final Supplier spoonSupplier = Spoon::getInstance; private static final Class PKG = ClusterSettingsPage.class; private static final LogChannelInterface log = KettleLogStore.getLogChannelInterfaceFactory().create( "NamedClusterDialog" ); public NamedClusterDialog( NamedClusterService namedClusterService, IMetaStore metastore, VariableSpace variables, RuntimeTester tester, Map params, String dialogState ) { setWindowTitle( BaseMessages.getString( PKG, "NamedClusterDialog.newCluster" ) ); variableSpace = variables; runtimeTester = tester; hadoopClusterManager = new HadoopClusterManager( spoonSupplier.get(), namedClusterService, metastore, "" ); String namedClusterNameParam = params.get( "name" ); isEditMode = namedClusterNameParam != null; thinNameClusterModel = createModel( hadoopClusterManager.getNamedCluster( namedClusterNameParam ) ); String duplicateNamedClusterParam = params.get( "duplicateName" ); if ( duplicateNamedClusterParam != null ) { thinNameClusterModel.setOldName( thinNameClusterModel.getName() ); thinNameClusterModel.setName( "copy-of-" + thinNameClusterModel.getName() ); isEditMode = false; isDuplicating = true; } this.dialogState = dialogState; } public boolean isConnectedToRepo() { boolean isConnectedToRepo = false; if ( spoonSupplier.get() != null ) { Repository repo = spoonSupplier.get().getRepository(); isConnectedToRepo = repo != null && repo.getUri().isPresent(); } return isConnectedToRepo; } public String getShimIdentifier() { return hadoopClusterManager.getShimIdentifier(); } private ThinNameClusterModel createModel( ThinNameClusterModel model ) { boolean isCreatingCluster = false; if ( model == null ) { model = new ThinNameClusterModel(); isCreatingCluster = true; } model.setName( model.getName() == null ? "" : model.getName() ); model.setShimIdentifier( model.getShimIdentifier() == null ? "" : model.getShimIdentifier() ); model.setHdfsHost( model.getHdfsHost() == null ? "" : model.getHdfsHost() ); model.setHdfsPort( model.getHdfsPort() == null && isCreatingCluster? "8020" : model.getHdfsPort() == null ? "" : model.getHdfsPort() ); model.setHdfsUsername( model.getHdfsUsername() == null ? "" : model.getHdfsUsername() ); model.setHdfsPassword( model.getHdfsPassword() == null ? "" : model.getHdfsPassword() ); model.setJobTrackerHost( model.getJobTrackerHost() == null ? "" : model.getJobTrackerHost() ); model.setJobTrackerPort( model.getJobTrackerPort() == null && isCreatingCluster? "8032" : model.getJobTrackerPort() == null ? "" : model.getJobTrackerPort() ); model.setZooKeeperHost( model.getZooKeeperHost() == null ? "" : model.getZooKeeperHost() ); model.setZooKeeperPort( model.getZooKeeperPort() == null && isCreatingCluster? "2181" : model.getZooKeeperPort() == null ? "" : model.getZooKeeperPort() ); model.setOozieUrl( model.getOozieUrl() == null ? "" : model.getOozieUrl() ); model.setKafkaBootstrapServers( model.getKafkaBootstrapServers() == null ? "" : model.getKafkaBootstrapServers() ); model.setOldName( model.getName() ); model.setSecurityType( model.getSecurityType() == null ? "None" : model.getSecurityType() ); model.setKerberosSubType( model.getKerberosSubType() == null ? "Password" : model.getKerberosSubType() ); model.setKerberosAuthenticationUsername( model.getKerberosAuthenticationUsername() == null ? "" : model.getKerberosAuthenticationUsername() ); model.setKerberosAuthenticationPassword( model.getKerberosAuthenticationPassword() == null ? "" : model.getKerberosAuthenticationPassword() ); model.setKerberosImpersonationUsername( model.getKerberosImpersonationUsername() == null ? "" : model.getKerberosImpersonationUsername() ); model.setKerberosImpersonationPassword( model.getKerberosImpersonationPassword() == null ? "" : model.getKerberosImpersonationPassword() ); model.setGatewayUrl( model.getGatewayUrl() == null ? "" : model.getGatewayUrl() ); model.setGatewayUsername( model.getGatewayUsername() == null ? "" : model.getGatewayUsername() ); model.setGatewayPassword( model.getGatewayPassword() == null ? "" : model.getGatewayPassword() ); model.setKeytabImpFile( model.getKeytabImpFile() == null ? "" : model.getKeytabImpFile() ); model.setKeytabAuthFile( model.getKeytabAuthFile() == null ? BaseMessages.getString( PKG, "NamedClusterDialog.noFileSelected" ) : model.getKeytabAuthFile() ); model.setSiteFiles( model.getSiteFiles() == null ? new ArrayList<>() : model.getSiteFiles() ); return model; } public void initialize( ThinNameClusterModel model ) { if ( !dialogState.equals( "testing" ) ) { thinNameClusterModel = model == null ? createModel( null ) : createModel( model ); clusterSettingsPage.initialize( thinNameClusterModel ); securitySettingsPage.initialize( thinNameClusterModel ); knoxSettingsPage.initialize( thinNameClusterModel ); kerberosSettingsPage.initialize( thinNameClusterModel ); reportPage.initialize( thinNameClusterModel ); testResultsPage.initialize( thinNameClusterModel ); } else { try { testResultsPage.initialize( model ); testResultsPage.setTestResults( getTestResults() ); } catch ( KettleException e ) { log.logError( e.getMessage() ); } } } public void addPages() { if ( !dialogState.equals( "testing" ) ) { clusterSettingsPage = new ClusterSettingsPage( variableSpace, thinNameClusterModel ); addPage( clusterSettingsPage ); securitySettingsPage = new SecuritySettingsPage( thinNameClusterModel ); addPage( securitySettingsPage ); knoxSettingsPage = new KnoxSettingsPage( variableSpace, thinNameClusterModel ); addPage( knoxSettingsPage ); kerberosSettingsPage = new KerberosSettingsPage( variableSpace, thinNameClusterModel ); addPage( kerberosSettingsPage ); reportPage = new ReportPage( thinNameClusterModel ); addPage( reportPage ); testResultsPage = new TestResultsPage( variableSpace, thinNameClusterModel ); addPage( testResultsPage ); } else { testResultsPage = new TestResultsPage( variableSpace, thinNameClusterModel ); addPage( testResultsPage ); } } public void editCluster() { dialogState = "new-edit"; ThinNameClusterModel model = hadoopClusterManager.getNamedCluster( thinNameClusterModel.getName() ); if ( model != null ) { isEditMode = true; isDuplicating = false; initialize( model ); } else { isEditMode = false; isDuplicating = false; } getContainer().showPage( getPage( ClusterSettingsPage.class.getSimpleName() ) ); } public void createNewCluster() { isEditMode = false; isDuplicating = false; initialize( null ); getContainer().showPage( getPage( ClusterSettingsPage.class.getSimpleName() ) ); } public boolean performFinish() { boolean finish = false; // We are about to either create or edit hadoop cluster. Send shim identifier to the currently loaded driver String shimIdentifier = getShimIdentifier(); if ( shimIdentifier != null ) { thinNameClusterModel.setShimIdentifier( shimIdentifier ); } String currentPage = super.getContainer().getCurrentPage().getName(); if ( reportPage != null && !currentPage.equals( reportPage.getClass().getSimpleName() ) && !currentPage.equals( testResultsPage.getClass().getSimpleName() ) ) { if ( isEditMode || isDuplicating ) { saveEditedNamedCluster(); } else { saveNewNamedCluster(); } } else { finish = true; } if ( spoonSupplier.get() != null ) { spoonSupplier.get().refreshTree( BaseMessages.getString( PKG, "HadoopClusterTree.Title" ) ); } return finish; } private void saveNewNamedCluster() { try { hadoopClusterManager.saveNewNamedCluster( thinNameClusterModel, dialogState ); reportPage.setTestResults( getTestResults() ); } catch ( BadSiteFilesException e ) { reportPage.setTestResult( BaseMessages.getString( PKG, "NamedClusterDialog.test.importFailed" ) ); } catch ( IOException | KettleException e ) { log.logError( e.getMessage() ); } getContainer().showPage( reportPage ); } private void saveEditedNamedCluster() { try { hadoopClusterManager.saveEditedNamedCluster( thinNameClusterModel, isEditMode ); reportPage.setTestResults( getTestResults() ); } catch ( BadSiteFilesException e ) { reportPage.setTestResult( BaseMessages.getString( PKG, "NamedClusterDialog.test.importFailed" ) ); } catch ( IOException | KettleException e ) { log.logError( e.getMessage() ); } getContainer().showPage( reportPage ); } private Object[] getTestResults() throws KettleException { NamedCluster namedCluster = hadoopClusterManager.getNamedClusterByName( thinNameClusterModel.getName() ); if ( isDevMode() ) { if ( !dialogState.equals( "testing" ) ) { return (Object[]) hadoopClusterManager.runTests( runtimeTester, thinNameClusterModel.getName() ); } else { return new Object[] {}; } } else { RuntimeTestStatus runtimeTestStatus = ClusterTestDialog.create( spoonSupplier.get().getShell(), namedCluster, runtimeTester ).open(); return hadoopClusterManager.produceTestCategories( runtimeTestStatus, namedCluster ); } } public boolean canFinish() { // Hack to style the CustomWizardDialog. ( (CustomWizardDialog) getContainer() ).style(); // Couldn't be done elsewhere because the "TestResultsPage" was not initialized by the wizard. String currentPage = super.getContainer().getCurrentPage().getName(); if ( !dialogState.equals( "testing" ) ) { if ( currentPage.equals( clusterSettingsPage.getClass().getSimpleName() ) ) { ( (CustomWizardDialog) getContainer() ).enableCancelButton( true ); } return ( currentPage.equals( securitySettingsPage.getClass().getSimpleName() ) && securitySettingsPage.getSecurityType() .equals( NONE ) ) || ( currentPage.equals( kerberosSettingsPage.getClass().getSimpleName() ) && kerberosSettingsPage.isPageComplete() || ( currentPage.equals( knoxSettingsPage.getClass().getSimpleName() ) && knoxSettingsPage.isPageComplete() ) || currentPage.equals( reportPage.getClass().getSimpleName() ) || currentPage.equals( testResultsPage.getClass().getSimpleName() ) ); } else { // Set to Initialize "TestResultsPage" when "dialogState" is "testing" and disable its "Finish" button. // Couldn't be done elsewhere because the "TestResultsPage" was not initialized by the wizard. initialize( thinNameClusterModel ); return true; } } public String getDialogState() { return dialogState; } public boolean clusterNameExists( String clusterName ) { return hadoopClusterManager.getNamedCluster( clusterName ) != null; } public void setDevMode( boolean devMode ) { this.isDevMode = devMode; } public boolean isDevMode() { return isDevMode; } public boolean isEditMode() { return isEditMode; } public static void main( String[] args ) { try { PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() ); PluginRegistry.init( false ); Encr.init( "Kettle" ); KettleLogStore.init(); Display display = new Display(); Shell shell = new Shell( display ); PropsUI.init( display, Props.TYPE_PROPERTIES_SPOON ); NamedClusterDialog namedClusterDialog = new NamedClusterDialog( NamedClusterManager.getInstance(), MetaStoreConst.openLocalPentahoMetaStore(), new Variables(), RuntimeTesterImpl.getInstance(), new HashMap(), "new-edit" ); namedClusterDialog.setDevMode( true ); CustomWizardDialog namedClusterWizardDialog = new CustomWizardDialog( shell, namedClusterDialog ); namedClusterWizardDialog.open(); } catch ( Exception e ) { log.logError( e.getMessage() ); } } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/pages/ClusterSettingsPage.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages; import org.pentaho.di.core.util.StringUtil; import org.eclipse.jface.wizard.IWizardPage; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.ScrolledComposite; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.NamedClusterDialog; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel; import org.pentaho.di.core.Const; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.util.HelpUtils; import java.io.File; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.ONE_COLUMN; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.TWO_COLUMNS; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.createLabel; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.createText; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.decodePassword; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.getVersionForDriver; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.getVendorForDriver; import static org.pentaho.di.ui.core.PropsUI.getDisplay; public class ClusterSettingsPage extends WizardPage { private PropsUI props; private Composite parent; private Composite mainPanel; private ScrolledComposite clusterScrollPanel; private TextVar hostNameTextFieldHdfsGroup; private TextVar portTextFieldHdfsGroup; private TextVar userNameTextFieldHdfsGroup; private TextVar passwordTextFieldHdfsGroup; private TextVar hostNameTextFieldJobTrackerGroup; private TextVar portTextFieldJobTrackerGroup; private TextVar hostNameTextFieldZooKeeperGroup; private TextVar portTextFieldZooKeeperGroup; private TextVar hostNameTextFieldOozieGroup; private TextVar hostNameTextFieldKafkaGroup; private Button deleteSiteFilesButton; private Text nameOfNamedCluster; private Table siteFilesTable; private Group hdfsGroup; private Group jobTrackerGroup; private Group zooKeeperGroup; private Group oozieGroup; private Group kafkaGroup; private Composite fillerComposite; private Map siteFilesPath; private ThinNameClusterModel thinNameClusterModel; private final Listener clusterListener = e -> validate(); private final VariableSpace variableSpace; private static final Class PKG = ClusterSettingsPage.class; private String loadedShimVendor = BaseMessages.getString( PKG, "NamedClusterDialog.noDriver" ); private String loadedShimVersion = ""; private String shimIdentifier; public ClusterSettingsPage( VariableSpace variables, ThinNameClusterModel model ) { super( ClusterSettingsPage.class.getSimpleName() ); variableSpace = variables; thinNameClusterModel = model; setPageComplete( false ); } public void createControl( Composite composite ) { parent = new Composite( composite, SWT.NONE ); props = PropsUI.getInstance(); props.setLook( parent ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); parent.setLayout( gridLayout ); Composite basePanel = new Composite( parent, SWT.NONE ); //START OF MAIN LAYOUT GridLayout basePanelGridLayout = new GridLayout( ONE_COLUMN, false ); basePanelGridLayout.marginWidth = 60; //TO CENTER CONTENTS basePanelGridLayout.marginTop = 10; //TO CENTER CONTENTS basePanelGridLayout.marginBottom = 30; basePanelGridLayout.marginLeft = 20; basePanel.setLayout( basePanelGridLayout ); GridData basePanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); basePanel.setLayoutData( basePanelGridData ); props.setLook( basePanel ); //END OF MAIN LAYOUT //START OF HEADER Composite headerPanel = new Composite( basePanel, SWT.NONE ); headerPanel.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData headerPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); headerPanel.setLayoutData( headerPanelGridData ); props.setLook( headerPanel ); GridData clusterNameLabelGridData = new GridData(); clusterNameLabelGridData.widthHint = 400; // Label width createLabel( headerPanel, BaseMessages.getString( PKG, "NamedClusterDialog.clusterName" ), clusterNameLabelGridData, props ); GridData clusterNameTextFieldGridData = new GridData(); clusterNameTextFieldGridData.widthHint = Const.isLinux() ? 395 : 409; // TextField width nameOfNamedCluster = new Text( headerPanel, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); nameOfNamedCluster.setText( "" ); nameOfNamedCluster.setLayoutData( clusterNameTextFieldGridData ); nameOfNamedCluster.addListener( SWT.CHANGED, clusterListener ); nameOfNamedCluster.addListener( SWT.MouseExit, clusterListener ); props.setLook( nameOfNamedCluster ); //END OF HEADER //START OF CLUSTER SCROLLABLE PANEL clusterScrollPanel = new ScrolledComposite( basePanel, SWT.V_SCROLL | SWT.NONE ); clusterScrollPanel.setExpandHorizontal( true ); clusterScrollPanel.setExpandVertical( true ); clusterScrollPanel.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData clusterScrollPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); clusterScrollPanelGridData.heightHint = 490; //Height of the scrollable panel (WILL NEED TO ADJUST) clusterScrollPanel.setLayoutData( clusterScrollPanelGridData ); props.setLook( clusterScrollPanel ); //START MAIN PANEL mainPanel = new Composite( clusterScrollPanel, SWT.NONE ); mainPanel.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData mainPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); mainPanel.setLayoutData( mainPanelGridData ); props.setLook( mainPanel ); //END MAIN PANEL createDriverGroup(); createSiteXMLFilesGroup(); //END OF CLUSTER SCROLLABLE PANEL clusterScrollPanel.setContent( mainPanel ); initialize( thinNameClusterModel ); setControl( parent ); } private void createDriverGroup() { try { loadedShimVendor = getLoadedDriverVendor(); loadedShimVersion = getLoadedDriverVersion(); } catch ( Exception e ) { // Do nothing go with defined loaded shim vendor and version } String loadedDriverText = loadedShimVendor + " " + loadedShimVersion; String originalDriverText; String shimIdentifier = thinNameClusterModel.getShimIdentifier(); if ( StringUtil.isEmpty( shimIdentifier ) ) { originalDriverText = BaseMessages.getString( PKG, "NamedClusterDialog.noDriver" ); } else { String vendor = getVendorForDriver( shimIdentifier ); String version = getVersionForDriver( shimIdentifier ); if ( StringUtil.isEmpty( vendor ) || StringUtil.isEmpty( version ) ) { originalDriverText = BaseMessages.getString( PKG, "NamedClusterDialog.noDriver" ); } else { originalDriverText = vendor + " " + version; } } Composite driverGroupPanel = new Composite( mainPanel, SWT.NONE ); GridLayout driverGroupGridLayout = new GridLayout( ONE_COLUMN, true ); driverGroupGridLayout.marginWidth = 0; driverGroupPanel.setLayout( driverGroupGridLayout ); GridData driverGroupPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); driverGroupPanel.setLayoutData( driverGroupPanelGridData ); props.setLook( driverGroupPanel ); GridData driverInfoGroupGridData = new GridData(); Label loadedDriverLabel = new Label( driverGroupPanel, SWT.NONE ); loadedDriverLabel.setText( BaseMessages.getString( PKG, "NamedClusterDialog.activeDriver" ) + " " + loadedDriverText ); loadedDriverLabel.setLayoutData( driverInfoGroupGridData ); if ( ( (NamedClusterDialog) getWizard() ).isEditMode() ) { Label originalDriverLabel = new Label( driverGroupPanel, SWT.NONE ); originalDriverLabel.setText( BaseMessages.getString( PKG, "NamedClusterDialog.originalDriver" ) + " " + originalDriverText ); originalDriverLabel.setLayoutData( driverInfoGroupGridData ); if ( !originalDriverText.equals( loadedDriverText ) ) { Label driverMismatchLabel = new Label( driverGroupPanel, SWT.NONE ); driverMismatchLabel.setText( BaseMessages.getString( PKG, "NamedClusterDialog.mismatchedDriver" ) ); driverMismatchLabel.setForeground( getDisplay().getSystemColor( SWT.COLOR_RED ) ); driverMismatchLabel.setLayoutData( driverInfoGroupGridData ); } } } private String getLoadedDriverVersion() { if( shimIdentifier == null ) { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); shimIdentifier = namedClusterDialog.getShimIdentifier(); } String version = ""; if( shimIdentifier != null ) { version = getVersionForDriver( shimIdentifier ); } return version; } private String getLoadedDriverVendor() { if( shimIdentifier == null ) { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); shimIdentifier = namedClusterDialog.getShimIdentifier(); } String vendor = ""; if( shimIdentifier != null ) { vendor = getVendorForDriver( shimIdentifier ); } return vendor; } private void createSiteXMLFilesGroup() { Group siteXmlFilesGroup = new Group( mainPanel, SWT.NONE ); siteXmlFilesGroup.setText( BaseMessages.getString( PKG, "NamedClusterDialog.siteXmlFiles" ) ); siteXmlFilesGroup.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData jobTrackerGroupGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); siteXmlFilesGroup.setLayoutData( jobTrackerGroupGridData ); props.setLook( siteXmlFilesGroup ); Composite buttonsPanel = new Composite( siteXmlFilesGroup, SWT.NONE ); GridLayout buttonsPanelGridLayout = new GridLayout( TWO_COLUMNS, false ); buttonsPanel.setLayout( buttonsPanelGridLayout ); GridData basePanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); buttonsPanel.setLayoutData( basePanelGridData ); props.setLook( buttonsPanel ); Button browseButton = new Button( buttonsPanel, SWT.PUSH ); GridData browserButtonGridData = new GridData( SWT.BEGINNING, SWT.FILL, true, false ); browseButton.setLayoutData( browserButtonGridData ); browseButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.browseButton" ) ); Listener browseListener = e -> browse(); browseButton.addListener( SWT.Selection, browseListener ); props.setLook( browseButton ); deleteSiteFilesButton = new Button( buttonsPanel, SWT.PUSH ); deleteSiteFilesButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.remove" ) ); deleteSiteFilesButton.setToolTipText( BaseMessages.getString( PKG, "NamedClusterDialog.removeSiteFile" ) ); deleteSiteFilesButton.setEnabled( false ); GridData deleteButtonGridData = new GridData( SWT.END, SWT.FILL, true, false ); deleteSiteFilesButton.setLayoutData( deleteButtonGridData ); Listener removeSiteFileListener = e -> removeSelectedSiteFiles(); deleteSiteFilesButton.addListener( SWT.Selection, removeSiteFileListener ); props.setLook( deleteSiteFilesButton ); siteFilesTable = new Table( siteXmlFilesGroup, SWT.BORDER | SWT.CHECK ); siteFilesTable.setHeaderVisible( true ); siteFilesTable.setLinesVisible( true ); GridData data = new GridData( SWT.FILL, SWT.FILL, true, true ); data.heightHint = 100; siteFilesTable.setLayoutData( data ); Listener tableListener = e -> processTableSelection(); siteFilesTable.addListener( SWT.Selection, tableListener ); props.setLook( siteFilesTable ); TableColumn fileNameColumn = new TableColumn( siteFilesTable, SWT.NONE ); fileNameColumn.setText( BaseMessages.getString( PKG, "NamedClusterDialog.file" ) ); fileNameColumn.setWidth( 330 ); fileNameColumn.setResizable( false ); } private void processTableSelection() { List selectedSiteFiles = getSelectedSiteFiles(); deleteSiteFilesButton.setEnabled( !selectedSiteFiles.isEmpty() ); } private void removeSelectedSiteFiles() { MessageBox warning = new MessageBox( mainPanel.getShell(), SWT.YES | SWT.NO ); warning.setMessage( BaseMessages.getString( PKG, "NamedClusterDialog.siteFileAlert" ) ); int buttonClicked = warning.open(); if ( buttonClicked == SWT.YES ) { List selectedSiteFiles = getSelectedSiteFiles(); for ( TableItem selectedSiteFile : selectedSiteFiles ) { siteFilesPath.remove( selectedSiteFile.getText() ); siteFilesTable.remove( siteFilesTable.indexOf( selectedSiteFile ) ); } deleteSiteFilesButton.setEnabled( false ); validate(); } } private List getSelectedSiteFiles() { List selectedSiteFiles = new ArrayList<>(); for ( TableItem siteFile : siteFilesTable.getItems() ) { if ( siteFile.getChecked() ) { selectedSiteFiles.add( siteFile ); } } return selectedSiteFiles; } private void createHdfsGroup() { hdfsGroup = new Group( mainPanel, SWT.NONE ); hdfsGroup.setText( BaseMessages.getString( PKG, "NamedClusterDialog.hdfs" ) ); hdfsGroup.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData hdfsGroupGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); hdfsGroup.setLayoutData( hdfsGroupGridData ); props.setLook( hdfsGroup ); if ( ( (NamedClusterDialog) getWizard() ).getDialogState().equals( "new-edit" ) ) { GridData hostNameLabelHdfsGroupGridData = new GridData(); hostNameLabelHdfsGroupGridData.widthHint = 400; // Label width createLabel( hdfsGroup, BaseMessages.getString( PKG, "NamedClusterDialog.hostname" ), hostNameLabelHdfsGroupGridData, props ); GridData hostNameTextFieldHdfsGroupdGridData = new GridData(); hostNameTextFieldHdfsGroupdGridData.widthHint = 400; // TextField width hostNameTextFieldHdfsGroup = createText( hdfsGroup, "", hostNameTextFieldHdfsGroupdGridData, props, variableSpace, clusterListener ); GridData portLabelHdfsGroupGridData = new GridData(); portLabelHdfsGroupGridData.widthHint = 400; // Label width createLabel( hdfsGroup, BaseMessages.getString( PKG, "NamedClusterDialog.port" ), portLabelHdfsGroupGridData, props ); GridData portTextFieldHdfsGroupGridData = new GridData(); portTextFieldHdfsGroupGridData.widthHint = 400; // TextField width portTextFieldHdfsGroup = createText( hdfsGroup, "", portTextFieldHdfsGroupGridData, props, variableSpace, clusterListener ); } Composite userPasswordHdfsGroupPanel = new Composite( hdfsGroup, SWT.NONE ); GridLayout userPasswordHdfsGroupGridLayout = new GridLayout( TWO_COLUMNS, true ); userPasswordHdfsGroupGridLayout.marginWidth = 0; userPasswordHdfsGroupPanel.setLayout( userPasswordHdfsGroupGridLayout ); GridData userPasswordHdfsGroupPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); userPasswordHdfsGroupPanel.setLayoutData( userPasswordHdfsGroupPanelGridData ); props.setLook( userPasswordHdfsGroupPanel ); GridData userNameLabelHdfsGroupGridData = new GridData( SWT.BEGINNING, SWT.FILL, true, false ); createLabel( userPasswordHdfsGroupPanel, BaseMessages.getString( PKG, "NamedClusterDialog.username" ), userNameLabelHdfsGroupGridData, props ); GridData passwordLabelHdfsGroupGridData = new GridData(); createLabel( userPasswordHdfsGroupPanel, BaseMessages.getString( PKG, "NamedClusterDialog.password" ), passwordLabelHdfsGroupGridData, props ); GridData userNameTextFieldHdfsGroupGridData = new GridData(); userNameTextFieldHdfsGroupGridData.widthHint = 197; // TextField width userNameTextFieldHdfsGroup = createText( userPasswordHdfsGroupPanel, "", userNameTextFieldHdfsGroupGridData, props, variableSpace, clusterListener ); GridData passwordTextFieldHdfsGroupGridData = new GridData(); passwordTextFieldHdfsGroupGridData.widthHint = 197; // TextField width passwordTextFieldHdfsGroup = createText( userPasswordHdfsGroupPanel, "", passwordTextFieldHdfsGroupGridData, props, variableSpace, clusterListener ); passwordTextFieldHdfsGroup.setEchoChar( '*' ); mainPanel.pack(); } private void createJobTrackerGroup() { jobTrackerGroup = new Group( mainPanel, SWT.NONE ); jobTrackerGroup.setText( BaseMessages.getString( PKG, "NamedClusterDialog.jobTracker" ) ); jobTrackerGroup.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData jobTrackerGroupGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); jobTrackerGroup.setLayoutData( jobTrackerGroupGridData ); props.setLook( jobTrackerGroup ); GridData hostNameLabelJobTrackerGroupGridData = new GridData(); hostNameLabelJobTrackerGroupGridData.widthHint = 400; // Label width createLabel( jobTrackerGroup, BaseMessages.getString( PKG, "NamedClusterDialog.hostname" ), hostNameLabelJobTrackerGroupGridData, props ); GridData hostNameTextFieldJobTrackerGroupdGridData = new GridData(); hostNameTextFieldJobTrackerGroupdGridData.widthHint = 400; // TextField width hostNameTextFieldJobTrackerGroup = createText( jobTrackerGroup, "", hostNameTextFieldJobTrackerGroupdGridData, props, variableSpace, clusterListener ); GridData portLabelJobTrackerGroupGridData = new GridData(); portLabelJobTrackerGroupGridData.widthHint = 400; // Label width createLabel( jobTrackerGroup, BaseMessages.getString( PKG, "NamedClusterDialog.port" ), portLabelJobTrackerGroupGridData, props ); GridData portTextFieldJobTrackerGroupGridData = new GridData(); portTextFieldJobTrackerGroupGridData.widthHint = 400; // TextField width portTextFieldJobTrackerGroup = createText( jobTrackerGroup, "", portTextFieldJobTrackerGroupGridData, props, variableSpace, clusterListener ); mainPanel.pack(); } private void createZooKeeperGroup() { zooKeeperGroup = new Group( mainPanel, SWT.NONE ); zooKeeperGroup.setText( BaseMessages.getString( PKG, "NamedClusterDialog.zooKeeper" ) ); zooKeeperGroup.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData zooKeeperGroupGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); zooKeeperGroup.setLayoutData( zooKeeperGroupGridData ); props.setLook( zooKeeperGroup ); GridData hostNameLabelZooKeeperGroupGridData = new GridData(); hostNameLabelZooKeeperGroupGridData.widthHint = 400; // Label width createLabel( zooKeeperGroup, BaseMessages.getString( PKG, "NamedClusterDialog.hostname" ), hostNameLabelZooKeeperGroupGridData, props ); GridData hostNameTextFieldZooKeeperGroupdGridData = new GridData(); hostNameTextFieldZooKeeperGroupdGridData.widthHint = 400; // TextField width hostNameTextFieldZooKeeperGroup = createText( zooKeeperGroup, "", hostNameTextFieldZooKeeperGroupdGridData, props, variableSpace, clusterListener ); GridData portLabelZooKeeperGroupGridData = new GridData(); portLabelZooKeeperGroupGridData.widthHint = 400; // Label width createLabel( zooKeeperGroup, BaseMessages.getString( PKG, "NamedClusterDialog.port" ), portLabelZooKeeperGroupGridData, props ); GridData portTextFieldZooKeeperGroupGridData = new GridData(); portTextFieldZooKeeperGroupGridData.widthHint = 400; // TextField width portTextFieldZooKeeperGroup = createText( zooKeeperGroup, "", portTextFieldZooKeeperGroupGridData, props, variableSpace, clusterListener ); mainPanel.pack(); } private void createOozieGroup() { oozieGroup = new Group( mainPanel, SWT.NONE ); oozieGroup.setText( BaseMessages.getString( PKG, "NamedClusterDialog.oozie" ) ); oozieGroup.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData oozieGroupGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); oozieGroup.setLayoutData( oozieGroupGridData ); props.setLook( oozieGroup ); GridData hostNameLabelOozieGroupGridData = new GridData(); hostNameLabelOozieGroupGridData.widthHint = 400; // Label width createLabel( oozieGroup, BaseMessages.getString( PKG, "NamedClusterDialog.hostname" ), hostNameLabelOozieGroupGridData, props ); GridData hostNameTextFieldOozieGroupdGridData = new GridData(); hostNameTextFieldOozieGroupdGridData.widthHint = 400; // TextField width hostNameTextFieldOozieGroup = createText( oozieGroup, "", hostNameTextFieldOozieGroupdGridData, props, variableSpace, clusterListener ); mainPanel.pack(); } private void createKafkaGroup() { kafkaGroup = new Group( mainPanel, SWT.NONE ); kafkaGroup.setText( BaseMessages.getString( PKG, "NamedClusterDialog.kafka" ) ); kafkaGroup.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData kafkaGroupGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); kafkaGroup.setLayoutData( kafkaGroupGridData ); props.setLook( kafkaGroup ); GridData hostNameLabelKafkaGroupGridData = new GridData(); hostNameLabelKafkaGroupGridData.widthHint = 400; // Label width createLabel( kafkaGroup, BaseMessages.getString( PKG, "NamedClusterDialog.bootstrapServers" ), hostNameLabelKafkaGroupGridData, props ); GridData hostNameTextFieldKafkaGroupdGridData = new GridData(); hostNameTextFieldKafkaGroupdGridData.widthHint = 400; // TextField width hostNameTextFieldKafkaGroup = createText( kafkaGroup, "", hostNameTextFieldKafkaGroupdGridData, props, variableSpace, clusterListener ); mainPanel.pack(); } private void createFiller() { fillerComposite = new Composite( mainPanel, SWT.NONE ); props.setLook( fillerComposite ); mainPanel.pack(); } private void browse() { FileDialog dialog = new FileDialog( mainPanel.getShell(), SWT.MULTI ); dialog.open(); for ( String fileName : dialog.getFileNames() ) { addSiteFileToTable( fileName ); siteFilesPath.put( fileName, dialog.getFilterPath() + File.separator ); } if ( dialog.getFileNames().length > 0 ) { validate(); } } private void validate() { thinNameClusterModel.setName( nameOfNamedCluster.getText() ); thinNameClusterModel.setHdfsUsername( userNameTextFieldHdfsGroup.getText() ); thinNameClusterModel.setHdfsPassword( passwordTextFieldHdfsGroup.getText() ); thinNameClusterModel.setSiteFiles( getTableItems( siteFilesTable.getItems() ) ); if ( ( (NamedClusterDialog) getWizard() ).getDialogState().equals( "new-edit" ) ) { thinNameClusterModel.setHdfsHost( hostNameTextFieldHdfsGroup.getText() ); thinNameClusterModel.setHdfsPort( portTextFieldHdfsGroup.getText() ); thinNameClusterModel.setJobTrackerPort( portTextFieldJobTrackerGroup.getText() ); thinNameClusterModel.setZooKeeperPort( portTextFieldZooKeeperGroup.getText() ); thinNameClusterModel.setJobTrackerHost( hostNameTextFieldJobTrackerGroup.getText() ); thinNameClusterModel.setZooKeeperHost( hostNameTextFieldZooKeeperGroup.getText() ); thinNameClusterModel.setOozieUrl( hostNameTextFieldOozieGroup.getText() ); thinNameClusterModel.setKafkaBootstrapServers( hostNameTextFieldKafkaGroup.getText() ); setPageComplete( !thinNameClusterModel.getName().isBlank() && !thinNameClusterModel.getHdfsHost().isBlank() && thinNameClusterModel.getName().matches( "^[a-zA-Z0-9-]+$" ) ); } if ( ( (NamedClusterDialog) getWizard() ).getDialogState().equals( "import" ) ) { setPageComplete( !thinNameClusterModel.getName().isBlank() && !thinNameClusterModel.getSiteFiles().isEmpty() && thinNameClusterModel.getName().matches( "^[a-zA-Z0-9-]+$" ) ); } } public IWizardPage getNextPage() { boolean nextButtonPressed = "nextPressed".equalsIgnoreCase( Thread.currentThread().getStackTrace()[ 2 ].getMethodName() ); boolean clusterNameExists = ( (NamedClusterDialog) getWizard() ).clusterNameExists( thinNameClusterModel.getName() ); boolean notEditingUsingSameName = !( ( (NamedClusterDialog) getWizard() ).isEditMode() && thinNameClusterModel.getName() .equals( thinNameClusterModel.getOldName() ) ); if ( nextButtonPressed && clusterNameExists && notEditingUsingSameName ) { MessageBox box = new MessageBox( mainPanel.getShell(), SWT.YES | SWT.NO | SWT.ICON_QUESTION ); box.setText( BaseMessages.getString( PKG, "NamedClusterDialog.clusterOverwriteTitle" ) ); box.setMessage( BaseMessages.getString( PKG, "NamedClusterDialog.clusterOverwrite", thinNameClusterModel.getName() ) ); int result = box.open(); if ( result != SWT.YES ) { return null; } } SecuritySettingsPage securitySettingsPage = (SecuritySettingsPage) getWizard().getPage( SecuritySettingsPage.class.getSimpleName() ); securitySettingsPage.initialize( thinNameClusterModel ); return securitySettingsPage; } private boolean isConnectedToRepo() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); boolean isConnectedToRepo = namedClusterDialog.isConnectedToRepo(); if ( isDevMode() ) { isConnectedToRepo = true; } return isConnectedToRepo; } public void initialize( ThinNameClusterModel model ) { setTitle( ( (NamedClusterDialog) getWizard() ).isEditMode() ? BaseMessages.getString( PKG, "NamedClusterDialog.editCluster.title" ) : ( (NamedClusterDialog) getWizard() ).getDialogState().equals( "import" ) ? BaseMessages.getString( PKG, "NamedClusterDialog.importCluster.title" ) : BaseMessages.getString( PKG, "NamedClusterDialog.newCluster.title" ) ); if ( isConnectedToRepo() ) { setDescription( BaseMessages.getString( PKG, "NamedClusterDialog.repositoryNotification" ) ); } thinNameClusterModel = model; siteFilesPath = new HashMap<>(); nameOfNamedCluster.setText( model.getName() ); setTableItems( model.getSiteFiles() ); disposeComponents(); createHdfsGroup(); userNameTextFieldHdfsGroup.setText( model.getHdfsUsername() ); passwordTextFieldHdfsGroup.setText( decodePassword( model.getHdfsPassword() ) ); if ( ( (NamedClusterDialog) getWizard() ).getDialogState().equals( "new-edit" ) ) { createJobTrackerGroup(); createZooKeeperGroup(); createOozieGroup(); createKafkaGroup(); createFiller(); hostNameTextFieldHdfsGroup.setText( model.getHdfsHost() ); portTextFieldHdfsGroup.setText( model.getHdfsPort() ); portTextFieldJobTrackerGroup.setText( model.getJobTrackerPort() ); portTextFieldZooKeeperGroup.setText( model.getZooKeeperPort() ); hostNameTextFieldJobTrackerGroup.setText( model.getJobTrackerHost() ); hostNameTextFieldZooKeeperGroup.setText( model.getZooKeeperHost() ); hostNameTextFieldOozieGroup.setText( model.getOozieUrl() ); hostNameTextFieldKafkaGroup.setText( model.getKafkaBootstrapServers() ); } clusterScrollPanel.setMinSize( mainPanel.computeSize( SWT.DEFAULT, SWT.DEFAULT ) ); mainPanel.pack(); validate(); } private void disposeComponents() { if ( hdfsGroup != null ) { hdfsGroup.dispose(); hdfsGroup = null; } if ( jobTrackerGroup != null ) { jobTrackerGroup.dispose(); jobTrackerGroup = null; } if ( zooKeeperGroup != null ) { zooKeeperGroup.dispose(); zooKeeperGroup = null; } if ( oozieGroup != null ) { oozieGroup.dispose(); oozieGroup = null; } if ( kafkaGroup != null ) { kafkaGroup.dispose(); kafkaGroup = null; } if ( fillerComposite != null ) { fillerComposite.dispose(); fillerComposite = null; } mainPanel.pack(); } private List> getTableItems( TableItem[] tableItems ) { List> siteFiles = new ArrayList<>(); for ( TableItem tableItem : tableItems ) { String path = siteFilesPath.get( tableItem.getText() ); path = path == null ? "" : path; siteFiles.add( new SimpleImmutableEntry<>( path, tableItem.getText() ) ); } return siteFiles; } public IWizardPage getPreviousPage() { return null; } public void performHelp() { HelpUtils.openHelpDialog( parent.getShell(), "", BaseMessages.getString( PKG, "NamedClusterDialog.help" ), "" ); } private void setTableItems( List> siteFiles ) { siteFilesTable.removeAll(); for ( SimpleImmutableEntry siteFile : siteFiles ) { addSiteFileToTable( siteFile.getValue() ); } } private void addSiteFileToTable( String fileName ) { TableItem item = new TableItem( siteFilesTable, SWT.NONE ); item.setText( 0, fileName ); } private boolean isDevMode() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); return namedClusterDialog.isDevMode(); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/pages/KerberosSettingsPage.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages; import org.eclipse.jface.wizard.IWizardPage; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Text; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.NamedClusterDialog; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel; import org.pentaho.di.core.Const; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.util.HelpUtils; import java.io.File; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.List; import java.util.stream.Collectors; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.ONE_COLUMN; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.TWO_COLUMNS; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.createLabel; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.createText; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.decodePassword; public class KerberosSettingsPage extends WizardPage { private PropsUI props; private Composite parent; private Composite mainPanel; private Composite passwordAuthenticationPanel; private Composite keytabAuthenticationPanel; private CCombo securityMethodCombo; private TextVar authenticationUserNameTextField; private TextVar authenticationPasswordTextField; private Text authenticationKeytabText; private TextVar impersonationUserNameTextField; private TextVar impersonationPasswordTextField; private Text impersonationKeytabText; private ThinNameClusterModel thinNameClusterModel; private final Listener clusterListener = e -> validate(); private final VariableSpace variableSpace; private final String password = "Password"; private final String keytab = "Keytab"; private final String NO_FILE_SELECTED = BaseMessages.getString( PKG, "NamedClusterDialog.noFileSelected" ); private final String fileSeparator = System.getProperty( "file.separator" ); private static final Class PKG = KerberosSettingsPage.class; public KerberosSettingsPage( VariableSpace variables, ThinNameClusterModel model ) { super( KerberosSettingsPage.class.getSimpleName() ); variableSpace = variables; thinNameClusterModel = model; setPageComplete( false ); } public void createControl( Composite composite ) { parent = new Composite( composite, SWT.NONE ); props = PropsUI.getInstance(); props.setLook( parent ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); parent.setLayout( gridLayout ); Composite basePanel = new Composite( parent, SWT.NONE ); //START OF MAIN LAYOUT GridLayout baseGridLayout = new GridLayout( ONE_COLUMN, false ); baseGridLayout.marginWidth = 60; //TO CENTER CONTENTS baseGridLayout.marginTop = 10; //TO CENTER CONTENTS baseGridLayout.marginBottom = 30; baseGridLayout.marginLeft = 20; basePanel.setLayout( baseGridLayout ); GridData basePanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); basePanel.setLayoutData( basePanelGridData ); props.setLook( basePanel ); //END OF MAIN LAYOUT mainPanel = new Composite( basePanel, SWT.NONE ); mainPanel.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData mainPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); mainPanelGridData.heightHint = 510; //Height of the panel (WILL NEED TO ADJUST) mainPanel.setLayoutData( mainPanelGridData ); props.setLook( mainPanel ); GridData securityMethodLableGridData = new GridData(); securityMethodLableGridData.widthHint = 400; // Label width createLabel( mainPanel, BaseMessages.getString( PKG, "NamedClusterDialog.securityMethod" ), securityMethodLableGridData, props ); GridData securityMethodComboGridData = new GridData(); securityMethodComboGridData.widthHint = 400; // TextField width securityMethodCombo = new CCombo( mainPanel, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER ); securityMethodCombo.setLayoutData( securityMethodComboGridData ); securityMethodCombo.add( password ); securityMethodCombo.add( keytab ); Listener securityMethodComboListener = e -> displaySecurityMethodFields(); securityMethodCombo.addListener( SWT.Selection, securityMethodComboListener ); props.setLook( securityMethodCombo ); setControl( parent ); initialize( thinNameClusterModel ); } private void displaySecurityMethodFields() { if ( securityMethodCombo.getText().equals( password ) ) { createPasswordAuthenticationFields(); updatePasswordFields( thinNameClusterModel ); } if ( securityMethodCombo.getText().equals( keytab ) ) { createKeytabAuthenticationFields(); updateKeytabFields( thinNameClusterModel ); } validate(); } private void disposeComponents() { if ( passwordAuthenticationPanel != null ) { passwordAuthenticationPanel.dispose(); passwordAuthenticationPanel = null; } if ( keytabAuthenticationPanel != null ) { keytabAuthenticationPanel.dispose(); keytabAuthenticationPanel = null; } mainPanel.pack(); } private void createPasswordAuthenticationFields() { disposeComponents(); passwordAuthenticationPanel = new Composite( mainPanel, SWT.NONE ); GridLayout authenticationPanelGridLayout = new GridLayout( TWO_COLUMNS, true ); authenticationPanelGridLayout.marginWidth = 0; passwordAuthenticationPanel.setLayout( authenticationPanelGridLayout ); GridData authenticationPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); passwordAuthenticationPanel.setLayoutData( authenticationPanelGridData ); props.setLook( passwordAuthenticationPanel ); GridData authenticationUsernameGridData = new GridData( SWT.BEGINNING, SWT.FILL, true, false ); createLabel( passwordAuthenticationPanel, BaseMessages.getString( PKG, "NamedClusterDialog.authenticationUsername" ), authenticationUsernameGridData, props ); GridData authenticationPasswordGridData = new GridData(); createLabel( passwordAuthenticationPanel, BaseMessages.getString( PKG, "NamedClusterDialog.password" ), authenticationPasswordGridData, props ); GridData authenticationUserNameTextFieldGridData = new GridData(); authenticationUserNameTextFieldGridData.widthHint = Const.isLinux() ? 197 : 200; // TextField width authenticationUserNameTextField = createText( passwordAuthenticationPanel, "", authenticationUserNameTextFieldGridData, props, variableSpace, clusterListener ); GridData authenticationPasswordTextFieldGroupGridData = new GridData(); authenticationPasswordTextFieldGroupGridData.widthHint = Const.isLinux() ? 197 : 200; // TextField width authenticationPasswordTextField = createText( passwordAuthenticationPanel, "", authenticationPasswordTextFieldGroupGridData, props, variableSpace, clusterListener ); authenticationPasswordTextField.setEchoChar( '*' ); if ( isConnectedToRepo() ) { GridData impersonationUsernameGridData = new GridData( SWT.BEGINNING, SWT.FILL, true, false ); createLabel( passwordAuthenticationPanel, BaseMessages.getString( PKG, "NamedClusterDialog.impersonationUsername" ), impersonationUsernameGridData, props ); GridData impersonationPasswordGridData = new GridData(); createLabel( passwordAuthenticationPanel, BaseMessages.getString( PKG, "NamedClusterDialog.password" ), impersonationPasswordGridData, props ); GridData impersonationUserNameTextFieldGridData = new GridData(); impersonationUserNameTextFieldGridData.widthHint = Const.isLinux() ? 197 : 200; // TextField width impersonationUserNameTextField = createText( passwordAuthenticationPanel, "", impersonationUserNameTextFieldGridData, props, variableSpace, clusterListener ); GridData impersonationPasswordTextFieldGroupGridData = new GridData(); impersonationPasswordTextFieldGroupGridData.widthHint = Const.isLinux() ? 197 : 200; // TextField width impersonationPasswordTextField = createText( passwordAuthenticationPanel, "", impersonationPasswordTextFieldGroupGridData, props, variableSpace, clusterListener ); impersonationPasswordTextField.setEchoChar( '*' ); } mainPanel.pack(); } private void createKeytabAuthenticationFields() { disposeComponents(); keytabAuthenticationPanel = new Composite( mainPanel, SWT.NONE ); GridLayout authenticationPanelGridLayout = new GridLayout( ONE_COLUMN, true ); authenticationPanelGridLayout.marginWidth = 0; keytabAuthenticationPanel.setLayout( authenticationPanelGridLayout ); GridData authenticationPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); keytabAuthenticationPanel.setLayoutData( authenticationPanelGridData ); props.setLook( keytabAuthenticationPanel ); GridData authenticationUsernameGridData = new GridData( SWT.BEGINNING, SWT.FILL, true, false ); createLabel( keytabAuthenticationPanel, BaseMessages.getString( PKG, "NamedClusterDialog.authenticationUsername" ), authenticationUsernameGridData, props ); GridData authenticationUserNameTextFieldGridData = new GridData(); authenticationUserNameTextFieldGridData.widthHint = Const.isLinux() ? 400 : 405; // TextField width authenticationUserNameTextField = createText( keytabAuthenticationPanel, "", authenticationUserNameTextFieldGridData, props, variableSpace, clusterListener ); GridData authenticationPasswordGridData = new GridData(); createLabel( keytabAuthenticationPanel, BaseMessages.getString( PKG, "NamedClusterDialog.authenticationKeytab" ), authenticationPasswordGridData, props ); Composite authenticationKeytabPanel = new Composite( keytabAuthenticationPanel, SWT.NONE ); GridLayout authenticationKeytabPanelGridLayout = new GridLayout( TWO_COLUMNS, false ); authenticationKeytabPanelGridLayout.marginWidth = 0; authenticationKeytabPanel.setLayout( authenticationKeytabPanelGridLayout ); GridData authenticationKeytabPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); authenticationKeytabPanel.setLayoutData( authenticationKeytabPanelGridData ); props.setLook( authenticationKeytabPanel ); authenticationKeytabText = new Text( authenticationKeytabPanel, SWT.BORDER ); authenticationKeytabText.setEditable( false ); GridData authenticationKeytabTextGridData = new GridData(); authenticationKeytabTextGridData.widthHint = Const.isLinux() ? 310 : 341; authenticationKeytabText.setLayoutData( authenticationKeytabTextGridData ); props.setLook( authenticationKeytabText ); Button authenticationBrowseButton = new Button( authenticationKeytabPanel, SWT.PUSH ); authenticationBrowseButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.browse" ) ); props.setLook( authenticationBrowseButton ); Listener authenticationBrowseListener = e -> authenticationBrowse(); authenticationBrowseButton.addListener( SWT.Selection, authenticationBrowseListener ); if ( isConnectedToRepo() ) { GridData impersonationUsernameGridData = new GridData( SWT.BEGINNING, SWT.FILL, true, false ); createLabel( keytabAuthenticationPanel, BaseMessages.getString( PKG, "NamedClusterDialog.impersonationUsername" ), impersonationUsernameGridData, props ); GridData impersonationUserNameTextFieldGridData = new GridData(); impersonationUserNameTextFieldGridData.widthHint = Const.isLinux() ? 400 : 405; // TextField width impersonationUserNameTextField = createText( keytabAuthenticationPanel, "", impersonationUserNameTextFieldGridData, props, variableSpace, clusterListener ); GridData impersonationPasswordGridData = new GridData(); createLabel( keytabAuthenticationPanel, BaseMessages.getString( PKG, "NamedClusterDialog.impersonationKeytab" ), impersonationPasswordGridData, props ); Composite impersonationKeytabPanel = new Composite( keytabAuthenticationPanel, SWT.NONE ); GridLayout impersonationKeytabPanelGridLayout = new GridLayout( TWO_COLUMNS, false ); impersonationKeytabPanelGridLayout.marginWidth = 0; impersonationKeytabPanel.setLayout( impersonationKeytabPanelGridLayout ); GridData impersonationKeytabPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); impersonationKeytabPanel.setLayoutData( impersonationKeytabPanelGridData ); props.setLook( impersonationKeytabPanel ); impersonationKeytabText = new Text( impersonationKeytabPanel, SWT.BORDER ); impersonationKeytabText.setEditable( false ); GridData impersonationKeytabTextGridData = new GridData(); impersonationKeytabTextGridData.widthHint = Const.isLinux() ? 310 : 341; impersonationKeytabText.setLayoutData( impersonationKeytabTextGridData ); props.setLook( impersonationKeytabText ); Button impersonationBrowseButton = new Button( impersonationKeytabPanel, SWT.PUSH ); impersonationBrowseButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.browse" ) ); props.setLook( impersonationBrowseButton ); Listener impersonationBrowseListener = e -> impersonationBrowse(); impersonationBrowseButton.addListener( SWT.Selection, impersonationBrowseListener ); Button clearImpersonationButton = new Button( impersonationKeytabPanel, SWT.PUSH ); clearImpersonationButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.clear" ) ); props.setLook( clearImpersonationButton ); Listener clearImpersonationListener = e -> clearImpersonation(); clearImpersonationButton.addListener( SWT.Selection, clearImpersonationListener ); } mainPanel.pack(); } private void validate() { if ( securityMethodCombo.getText().equals( password ) ) { thinNameClusterModel.setKerberosSubType( password ); thinNameClusterModel.setKerberosAuthenticationUsername( authenticationUserNameTextField.getText() ); thinNameClusterModel.setKerberosAuthenticationPassword( authenticationPasswordTextField.getText() ); if ( isConnectedToRepo() ) { thinNameClusterModel.setKerberosImpersonationUsername( impersonationUserNameTextField.getText() ); thinNameClusterModel.setKerberosImpersonationPassword( impersonationPasswordTextField.getText() ); setPageComplete( ( !thinNameClusterModel.getKerberosAuthenticationUsername().isBlank() && !thinNameClusterModel.getKerberosAuthenticationPassword().isBlank() ) || ( !thinNameClusterModel.getKerberosImpersonationUsername().isBlank() && !thinNameClusterModel.getKerberosImpersonationPassword().isBlank() ) ); } else { setPageComplete( !thinNameClusterModel.getKerberosAuthenticationUsername().isBlank() && !thinNameClusterModel.getKerberosAuthenticationPassword().isBlank() ); } } if ( securityMethodCombo.getText().equals( keytab ) ) { thinNameClusterModel.setKerberosSubType( keytab ); thinNameClusterModel.setKerberosAuthenticationUsername( authenticationUserNameTextField.getText() ); thinNameClusterModel.setKeytabAuthFile( authenticationKeytabText.getData() .equals( NO_FILE_SELECTED ) ? "" : (String) authenticationKeytabText.getData() ); if ( !thinNameClusterModel.getKeytabAuthFile().isBlank() ) { List> siteFiles = thinNameClusterModel.getSiteFiles(); List> result = siteFiles.stream().filter( siteFile -> siteFile.getValue().equals( "keytabAuthFile" ) ).collect( Collectors.toList() ); if ( !result.isEmpty() ) { siteFiles.remove( result.get( 0 ) ); } siteFiles.add( new SimpleImmutableEntry<>( thinNameClusterModel.getKeytabAuthFile(), "keytabAuthFile" ) ); } if ( isConnectedToRepo() ) { thinNameClusterModel.setKerberosImpersonationUsername( impersonationUserNameTextField.getText() ); thinNameClusterModel.setKeytabImpFile( impersonationKeytabText.getData() .equals( NO_FILE_SELECTED ) ? "" : (String) impersonationKeytabText.getData() ); List> siteFiles = thinNameClusterModel.getSiteFiles(); List> result = siteFiles.stream().filter( siteFile -> siteFile.getValue().equals( "keytabImpFile" ) ).collect( Collectors.toList() ); if ( !result.isEmpty() ) { siteFiles.remove( result.get( 0 ) ); } if ( !thinNameClusterModel.getKeytabImpFile().isBlank() ) { siteFiles.add( new SimpleImmutableEntry<>( thinNameClusterModel.getKeytabImpFile(), "keytabImpFile" ) ); } setPageComplete( !thinNameClusterModel.getKeytabAuthFile().isBlank() ); } else { setPageComplete( !thinNameClusterModel.getKeytabAuthFile().isBlank() ); } } } // FOR DEV MODE ONLY private boolean isDevMode() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); return namedClusterDialog.isDevMode(); } // FOR DEV MODE ONLY private boolean isConnectedToRepo() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); boolean isConnectedToRepo = namedClusterDialog.isConnectedToRepo(); if ( isDevMode() ) { isConnectedToRepo = true; } return isConnectedToRepo; } public void initialize( ThinNameClusterModel model ) { setTitle( ( (NamedClusterDialog) getWizard() ).isEditMode() ? BaseMessages.getString( PKG, "NamedClusterDialog.editCluster.title" ) : ( (NamedClusterDialog) getWizard() ).getDialogState().equals( "import" ) ? BaseMessages.getString( PKG, "NamedClusterDialog.importCluster.title" ) : BaseMessages.getString( PKG, "NamedClusterDialog.newCluster.title" ) ); if ( isConnectedToRepo() ) { setDescription( BaseMessages.getString( PKG, "NamedClusterDialog.repositoryNotification" ) ); } thinNameClusterModel = model; securityMethodCombo.setText( model.getKerberosSubType() ); if ( securityMethodCombo.getText().equals( password ) ) { createPasswordAuthenticationFields(); updatePasswordFields( model ); } if ( securityMethodCombo.getText().equals( keytab ) ) { createKeytabAuthenticationFields(); updateKeytabFields( model ); } validate(); } private void updatePasswordFields( ThinNameClusterModel model ) { authenticationUserNameTextField.setText( model.getKerberosAuthenticationUsername() ); authenticationPasswordTextField.setText( decodePassword( model.getKerberosAuthenticationPassword() ) ); if ( isConnectedToRepo() ) { impersonationUserNameTextField.setText( model.getKerberosImpersonationUsername() ); impersonationPasswordTextField.setText( decodePassword( model.getKerberosImpersonationPassword() ) ); } } private void updateKeytabFields( ThinNameClusterModel model ) { authenticationKeytabText.setText( model.getKeytabAuthFile().isBlank() ? NO_FILE_SELECTED : model.getKeytabAuthFile().substring( model.getKeytabAuthFile().lastIndexOf( fileSeparator ) + 1 ) ); authenticationKeytabText.setData( model.getKeytabAuthFile().isBlank() ? NO_FILE_SELECTED : model.getKeytabAuthFile() ); authenticationUserNameTextField.setText( model.getKerberosAuthenticationUsername() ); if ( isConnectedToRepo() ) { impersonationKeytabText.setText( model.getKeytabImpFile().isBlank() ? NO_FILE_SELECTED : model.getKeytabImpFile().substring( model.getKeytabImpFile().lastIndexOf( fileSeparator ) + 1 ) ); impersonationKeytabText.setData( model.getKeytabImpFile().isBlank() ? NO_FILE_SELECTED : model.getKeytabImpFile() ); impersonationUserNameTextField.setText( model.getKerberosImpersonationUsername() ); } } private void authenticationBrowse() { FileDialog dialog = new FileDialog( mainPanel.getShell(), SWT.OPEN ); String path = dialog.open(); if ( path != null ) { File file = new File( path ); if ( file.isFile() ) { authenticationKeytabText.setText( file.toString() ); authenticationKeytabText.setData( file.toString() ); validate(); } } } private void impersonationBrowse() { FileDialog dialog = new FileDialog( mainPanel.getShell(), SWT.OPEN ); String path = dialog.open(); if ( path != null ) { File file = new File( path ); if ( file.isFile() ) { impersonationKeytabText.setText( file.toString() ); impersonationKeytabText.setData( file.toString() ); validate(); } } } private void clearImpersonation() { impersonationKeytabText.setText( "" ); impersonationKeytabText.setData( NO_FILE_SELECTED ); validate(); } public IWizardPage getNextPage() { return null; } public void performHelp() { HelpUtils.openHelpDialog( parent.getShell(), "", BaseMessages.getString( PKG, "NamedClusterDialog.help" ), "" ); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/pages/KnoxSettingsPage.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages; import org.eclipse.jface.wizard.IWizardPage; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Text; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.NamedClusterDialog; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel; import org.pentaho.di.core.Const; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.util.HelpUtils; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.ONE_COLUMN; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.TWO_COLUMNS; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.createLabel; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.createText; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.decodePassword; public class KnoxSettingsPage extends WizardPage { private PropsUI props; private Composite basePanel; private Composite parent; private Composite mainPanel; private Text gatewayURLTextField; private TextVar gatewayUsernameTextfield; private TextVar gatewayPasswordTextField; private final VariableSpace variableSpace; private final ThinNameClusterModel thinNameClusterModel; private final Listener clusterListener = e -> validate(); private static final Class PKG = KnoxSettingsPage.class; public KnoxSettingsPage( VariableSpace variables, ThinNameClusterModel model ) { super( KnoxSettingsPage.class.getSimpleName() ); thinNameClusterModel = model; variableSpace = variables; } public void createControl( Composite composite ) { parent = new Composite( composite, SWT.NONE ); props = PropsUI.getInstance(); props.setLook( parent ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); parent.setLayout( gridLayout ); basePanel = new Composite( parent, SWT.NONE ); //START OF MAIN LAYOUT GridLayout baseGridLayout = new GridLayout( ONE_COLUMN, false ); baseGridLayout.marginWidth = 60; //TO CENTER CONTENTS baseGridLayout.marginTop = 10; //TO CENTER CONTENTS baseGridLayout.marginBottom = 30; baseGridLayout.marginLeft = 20; basePanel.setLayout( baseGridLayout ); GridData basePanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); basePanel.setLayoutData( basePanelGridData ); props.setLook( basePanel ); //END OF MAIN LAYOUT mainPanel = new Composite( basePanel, SWT.NONE ); mainPanel.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData mainPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); mainPanelGridData.heightHint = 510; //Height of the panel (WILL NEED TO ADJUST) mainPanel.setLayoutData( mainPanelGridData ); props.setLook( mainPanel ); GridData gatewayUrlLabelGridData = new GridData(); gatewayUrlLabelGridData.widthHint = 400; // Label width createLabel( mainPanel, BaseMessages.getString( PKG, "NamedClusterDialog.gatewayURL" ), gatewayUrlLabelGridData, props ); GridData gatewayUrlTextfieldGridData = new GridData(); gatewayUrlTextfieldGridData.widthHint = Const.isLinux() ? 380 : 390; // TextField width gatewayURLTextField = new Text( mainPanel, SWT.SINGLE | SWT.LEFT | SWT.BORDER | SWT.PASSWORD ); gatewayURLTextField.setText( "" ); gatewayURLTextField.setLayoutData( gatewayUrlTextfieldGridData ); gatewayURLTextField.addListener( SWT.CHANGED, clusterListener ); gatewayURLTextField.addListener( SWT.MouseExit, clusterListener ); props.setLook( gatewayURLTextField ); Composite gatewayAuthenticationPanel = new Composite( mainPanel, SWT.NONE ); GridLayout authenticationPanelGridLayout = new GridLayout( TWO_COLUMNS, true ); authenticationPanelGridLayout.marginWidth = 0; gatewayAuthenticationPanel.setLayout( authenticationPanelGridLayout ); GridData authenticationPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); gatewayAuthenticationPanel.setLayoutData( authenticationPanelGridData ); props.setLook( gatewayAuthenticationPanel ); GridData gatewayUsernameLabel = new GridData( SWT.BEGINNING, SWT.FILL, true, false ); createLabel( gatewayAuthenticationPanel, BaseMessages.getString( PKG, "NamedClusterDialog.gatewayUsername" ), gatewayUsernameLabel, props ); GridData gatewayPasswordLabel = new GridData(); createLabel( gatewayAuthenticationPanel, BaseMessages.getString( PKG, "NamedClusterDialog.gatewayPassword" ), gatewayPasswordLabel, props ); GridData gatewayUsernameTextFieldGridData = new GridData(); gatewayUsernameTextFieldGridData.widthHint = Const.isLinux() ? 197 : 200; // TextField width gatewayUsernameTextfield = createText( gatewayAuthenticationPanel, "", gatewayUsernameTextFieldGridData, props, variableSpace, clusterListener ); GridData gatewayPasswordTextFieldGridData = new GridData(); gatewayPasswordTextFieldGridData.widthHint = Const.isLinux() ? 197 : 200; // TextField width gatewayPasswordTextField = createText( gatewayAuthenticationPanel, "", gatewayPasswordTextFieldGridData, props, variableSpace, clusterListener ); gatewayPasswordTextField.setEchoChar( '*' ); setControl( parent ); initialize( thinNameClusterModel ); } // FOR DEV MODE ONLY private boolean isDevMode() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); return namedClusterDialog.isDevMode(); } // FOR DEV MODE ONLY private boolean isConnectedToRepo() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); boolean isConnectedToRepo = namedClusterDialog.isConnectedToRepo(); if ( isDevMode() ) { isConnectedToRepo = true; } return isConnectedToRepo; } public void initialize( ThinNameClusterModel model ) { setTitle( ( (NamedClusterDialog) getWizard() ).isEditMode() ? BaseMessages.getString( PKG, "NamedClusterDialog.editCluster.title" ) : ( (NamedClusterDialog) getWizard() ).getDialogState().equals( "import" ) ? BaseMessages.getString( PKG, "NamedClusterDialog.importCluster.title" ) : BaseMessages.getString( PKG, "NamedClusterDialog.newCluster.title" ) ); if ( isConnectedToRepo() ) { setDescription( BaseMessages.getString( PKG, "NamedClusterDialog.repositoryNotification" ) ); } gatewayURLTextField.setText( decodePassword( model.getGatewayUrl() ) ); gatewayUsernameTextfield.setText( model.getGatewayUsername() ); gatewayPasswordTextField.setText( decodePassword( model.getGatewayPassword() ) ); validate(); } private void validate() { thinNameClusterModel.setGatewayUrl( gatewayURLTextField.getText() ); thinNameClusterModel.setGatewayUsername( gatewayUsernameTextfield.getText() ); thinNameClusterModel.setGatewayPassword( gatewayPasswordTextField.getText() ); setPageComplete( !thinNameClusterModel.getGatewayUrl().isBlank() && !thinNameClusterModel.getGatewayUsername().isBlank() && !thinNameClusterModel.getGatewayPassword().isBlank() ); } public IWizardPage getNextPage() { return null; } public void performHelp() { HelpUtils.openHelpDialog( parent.getShell(), "", BaseMessages.getString( PKG, "NamedClusterDialog.help" ), "" ); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/pages/ReportPage.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages; import org.eclipse.jface.wizard.IWizardPage; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.NamedClusterDialog; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.TestCategory; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.util.HelpUtils; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.ONE_COLUMN; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.createLabel; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.createLabelWithStyle; public class ReportPage extends WizardPage { private PropsUI props; private Composite basePanel; private Composite parent; private Composite mainPanel; private Label statusLabel; private Label statusDescriptionLabel; private Label iconLabel; private Button viewTestResultsButton; private Object[] testResults; private ThinNameClusterModel thinNameClusterModel; private static final Class PKG = ReportPage.class; private static final String SUCCESS_IMG = "images/success.svg"; private static final String FAIL_IMG = "images/fail.svg"; public ReportPage( ThinNameClusterModel model ) { super( ReportPage.class.getSimpleName() ); thinNameClusterModel = model; } public void createControl( Composite composite ) { parent = new Composite( composite, SWT.NONE ); props = PropsUI.getInstance(); props.setLook( parent ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); parent.setLayout( gridLayout ); basePanel = new Composite( parent, SWT.NONE ); //START OF MAIN LAYOUT GridLayout baseGridLayout = new GridLayout( ONE_COLUMN, false ); baseGridLayout.marginWidth = 60; //TO CENTER CONTENTS baseGridLayout.marginTop = 10; //TO CENTER CONTENTS baseGridLayout.marginBottom = 30; baseGridLayout.marginLeft = 20; basePanel.setLayout( baseGridLayout ); GridData basePanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); basePanel.setLayoutData( basePanelGridData ); props.setLook( basePanel ); //END OF MAIN LAYOUT mainPanel = new Composite( basePanel, SWT.NONE ); mainPanel.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData mainPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); mainPanelGridData.heightHint = 510; //Height of the panel (WILL NEED TO ADJUST) mainPanel.setLayoutData( mainPanelGridData ); props.setLook( mainPanel ); GridData iconGridData = new GridData(); iconGridData.widthHint = 400; // Label width iconGridData.heightHint = 100; // Label height iconLabel = createLabelWithStyle( mainPanel, "", iconGridData, props, SWT.NONE ); iconLabel.setAlignment( SWT.CENTER ); GridData statusGridData = new GridData(); statusGridData.widthHint = 400; // Label width statusGridData.heightHint = 50; // Label height statusLabel = createLabelWithStyle( mainPanel, "", statusGridData, props, SWT.NONE ); statusLabel.setFont( new Font( statusLabel.getDisplay(), new FontData( "Arial", 20, SWT.NONE ) ) ); statusLabel.setAlignment( SWT.CENTER ); GridData statusDescriptionGridData = new GridData(); statusDescriptionGridData.widthHint = 400; // Label width statusDescriptionGridData.heightHint = 100; // Label height statusDescriptionLabel = createLabelWithStyle( mainPanel, "", statusDescriptionGridData, props, SWT.WRAP ); statusDescriptionLabel.setAlignment( SWT.CENTER ); GridData questonLabelGridData = new GridData(); questonLabelGridData.widthHint = 400; // Label width questonLabelGridData.heightHint = 50; // Label height createLabel( mainPanel, BaseMessages.getString( PKG, "NamedClusterDialog.question" ), questonLabelGridData, props ).setAlignment( SWT.CENTER ); Button editClusterButton = new Button( mainPanel, SWT.PUSH ); GridData editButtonGridData = new GridData( SWT.BEGINNING, SWT.FILL, true, false ); editButtonGridData.widthHint = 155; editButtonGridData.horizontalAlignment = SWT.CENTER; editClusterButton.setLayoutData( editButtonGridData ); editClusterButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.editCluster" ) ); Listener editClusterListener = e -> editCluster(); editClusterButton.addListener( SWT.Selection, editClusterListener ); props.setLook( editClusterButton ); Button newClusterButton = new Button( mainPanel, SWT.PUSH ); GridData newButtonGridData = new GridData( SWT.BEGINNING, SWT.FILL, true, false ); newButtonGridData.widthHint = 155; newButtonGridData.horizontalAlignment = SWT.CENTER; newClusterButton.setLayoutData( newButtonGridData ); newClusterButton.setText( ( (NamedClusterDialog) getWizard() ).getDialogState().equals( "import" ) ? BaseMessages.getString( PKG, "NamedClusterDialog.importNewCluster" ) : BaseMessages.getString( PKG, "NamedClusterDialog.createNewCluster" ) ); Listener newClusterListener = e -> createNewCluster(); newClusterButton.addListener( SWT.Selection, newClusterListener ); props.setLook( newClusterButton ); viewTestResultsButton = new Button( mainPanel, SWT.PUSH ); GridData viewTestResultsButtonGridData = new GridData( SWT.BEGINNING, SWT.FILL, true, false ); viewTestResultsButtonGridData.widthHint = 155; viewTestResultsButtonGridData.horizontalAlignment = SWT.CENTER; viewTestResultsButton.setLayoutData( viewTestResultsButtonGridData ); viewTestResultsButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.viewTestResults" ) ); Listener viewTestResultsListener = e -> viewTestResults(); viewTestResultsButton.addListener( SWT.Selection, viewTestResultsListener ); props.setLook( viewTestResultsButton ); setControl( parent ); initialize( thinNameClusterModel ); } public void setTestResult( String status ) { if ( status.equals( BaseMessages.getString( PKG, "NamedClusterDialog.test.pass" ) ) ) { statusLabel.setText( BaseMessages.getString( PKG, "NamedClusterDialog.pass" ) ); statusDescriptionLabel.setText( BaseMessages.getString( PKG, "NamedClusterDialog.description.pass" ) ); iconLabel.setImage( GUIResource.getInstance().getImage( SUCCESS_IMG, getClass().getClassLoader(), 70, 70 ) ); viewTestResultsButton.setVisible( true ); } else if ( status.equals( BaseMessages.getString( PKG, "NamedClusterDialog.test.importFailed" ) ) ) { statusLabel.setText( BaseMessages.getString( PKG, "NamedClusterDialog.import.fail" ) ); statusDescriptionLabel.setText( BaseMessages.getString( PKG, "NamedClusterDialog.import.fail.description" ) ); iconLabel.setImage( GUIResource.getInstance().getImage( FAIL_IMG, getClass().getClassLoader(), 70, 70 ) ); viewTestResultsButton.setVisible( false ); } else { statusLabel.setText( BaseMessages.getString( PKG, "NamedClusterDialog.fail" ) ); statusDescriptionLabel.setText( BaseMessages.getString( PKG, "NamedClusterDialog.fail.description" ) ); iconLabel.setImage( GUIResource.getInstance().getImage( FAIL_IMG, getClass().getClassLoader(), 70, 70 ) ); viewTestResultsButton.setVisible( true ); } mainPanel.pack(); } public void setTestResults( Object[] categories ) { testResults = categories; String status = BaseMessages.getString( PKG, "NamedClusterDialog.test.pass" ); for ( Object category : testResults ) { TestCategory testCategory = (TestCategory) category; if ( !testCategory.getCategoryStatus().equals( status ) && testCategory.isCategoryActive() ) { status = testCategory.getCategoryStatus(); break; } } setTestResult( status ); } // FOR DEV MODE ONLY private boolean isDevMode() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); return namedClusterDialog.isDevMode(); } // FOR DEV MODE ONLY private boolean isConnectedToRepo() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); boolean isConnectedToRepo = namedClusterDialog.isConnectedToRepo(); if ( isDevMode() ) { isConnectedToRepo = true; } return isConnectedToRepo; } public void initialize( ThinNameClusterModel model ) { setTitle( ( (NamedClusterDialog) getWizard() ).isEditMode() ? BaseMessages.getString( PKG, "NamedClusterDialog.editCluster.title" ) : ( (NamedClusterDialog) getWizard() ).getDialogState().equals( "import" ) ? BaseMessages.getString( PKG, "NamedClusterDialog.importCluster.title" ) : BaseMessages.getString( PKG, "NamedClusterDialog.newCluster.title" ) ); if ( isConnectedToRepo() ) { setDescription( BaseMessages.getString( PKG, "NamedClusterDialog.repositoryNotification" ) ); } thinNameClusterModel = model; } private void viewTestResults() { TestResultsPage testResultsPage = (TestResultsPage) getWizard().getPage( TestResultsPage.class.getSimpleName() ); testResultsPage.setTestResults( testResults ); getContainer().showPage( testResultsPage ); } private void editCluster() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); namedClusterDialog.editCluster(); } private void createNewCluster() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); namedClusterDialog.createNewCluster(); } public IWizardPage getPreviousPage() { return null; } public IWizardPage getNextPage() { return null; } public void performHelp() { HelpUtils.openHelpDialog( parent.getShell(), "", BaseMessages.getString( PKG, "NamedClusterDialog.help" ), "" ); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/pages/SecuritySettingsPage.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages; import org.eclipse.jface.wizard.IWizardPage; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Listener; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.NamedClusterDialog; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.util.HelpUtils; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.ONE_COLUMN; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.createLabel; public class SecuritySettingsPage extends WizardPage { private PropsUI props; private Button noneButton; private Button kerberosButton; private Button knoxButton; private Composite basePanel; private Composite parent; private Composite mainPanel; private ThinNameClusterModel thinNameClusterModel; private NamedClusterSecurityType securityType; private final Listener securityTypeListener = e -> setSecurityType(); public enum NamedClusterSecurityType {NONE, KERBEROS, KNOX} private static final Class PKG = SecuritySettingsPage.class; public SecuritySettingsPage( ThinNameClusterModel model ) { super( SecuritySettingsPage.class.getSimpleName() ); securityType = NamedClusterSecurityType.NONE; thinNameClusterModel = model; } public void createControl( Composite composite ) { parent = new Composite( composite, SWT.NONE ); props = PropsUI.getInstance(); props.setLook( parent ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); parent.setLayout( gridLayout ); basePanel = new Composite( parent, SWT.NONE ); //START OF MAIN LAYOUT GridLayout baseGridLayout = new GridLayout( ONE_COLUMN, false ); baseGridLayout.marginWidth = 60; //TO CENTER CONTENTS baseGridLayout.marginTop = 10; //TO CENTER CONTENTS baseGridLayout.marginBottom = 30; baseGridLayout.marginLeft = 20; basePanel.setLayout( baseGridLayout ); GridData basePanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); basePanel.setLayoutData( basePanelGridData ); props.setLook( basePanel ); //END OF MAIN LAYOUT mainPanel = new Composite( basePanel, SWT.NONE ); mainPanel.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData mainPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); mainPanelGridData.heightHint = 510; //Height of the panel (WILL NEED TO ADJUST) mainPanel.setLayoutData( mainPanelGridData ); props.setLook( mainPanel ); GridData clusterNameLabelGridData = new GridData(); clusterNameLabelGridData.widthHint = 400; // Label width createLabel( mainPanel, BaseMessages.getString( PKG, "NamedClusterDialog.security" ), clusterNameLabelGridData, props ); noneButton = new Button( mainPanel, SWT.RADIO ); noneButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.none" ) ); noneButton.addListener( SWT.Selection, securityTypeListener ); props.setLook( noneButton ); kerberosButton = new Button( mainPanel, SWT.RADIO ); kerberosButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.kerberos" ) ); kerberosButton.addListener( SWT.Selection, securityTypeListener ); props.setLook( kerberosButton ); knoxButton = new Button( mainPanel, SWT.RADIO ); knoxButton.setText( BaseMessages.getString( PKG, "NamedClusterDialog.knox" ) ); knoxButton.addListener( SWT.Selection, securityTypeListener ); props.setLook( knoxButton ); setControl( parent ); initialize( thinNameClusterModel ); } private void setSecurityType() { if ( noneButton.getSelection() ) { securityType = NamedClusterSecurityType.NONE; thinNameClusterModel.setSecurityType( "None" ); getContainer().updateButtons(); } if ( kerberosButton.getSelection() ) { securityType = NamedClusterSecurityType.KERBEROS; thinNameClusterModel.setSecurityType( "Kerberos" ); getContainer().updateButtons(); } if ( knoxButton.getSelection() ) { securityType = NamedClusterSecurityType.KNOX; thinNameClusterModel.setSecurityType( "Knox" ); getContainer().updateButtons(); } } // FOR DEV MODE ONLY private boolean isDevMode() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); return namedClusterDialog.isDevMode(); } // FOR DEV MODE ONLY private boolean isConnectedToRepo() { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); boolean isConnectedToRepo = namedClusterDialog.isConnectedToRepo(); if ( isDevMode() ) { isConnectedToRepo = true; } return isConnectedToRepo; } public void initialize( ThinNameClusterModel model ) { setTitle( ( (NamedClusterDialog) getWizard() ).isEditMode() ? BaseMessages.getString( PKG, "NamedClusterDialog.editCluster.title" ) : ( (NamedClusterDialog) getWizard() ).getDialogState().equals( "import" ) ? BaseMessages.getString( PKG, "NamedClusterDialog.importCluster.title" ) : BaseMessages.getString( PKG, "NamedClusterDialog.newCluster.title" ) ); if ( isConnectedToRepo() ) { setDescription( BaseMessages.getString( PKG, "NamedClusterDialog.repositoryNotification" ) ); } thinNameClusterModel = model; noneButton.setSelection( model.getSecurityType().equals( "None" ) ); kerberosButton.setSelection( model.getSecurityType().equals( "Kerberos" ) ); knoxButton.setSelection( model.getSecurityType().equals( "Knox" ) ); if ( noneButton.getSelection() ) { securityType = NamedClusterSecurityType.NONE; } if ( kerberosButton.getSelection() ) { securityType = NamedClusterSecurityType.KERBEROS; } if ( knoxButton.getSelection() ) { securityType = NamedClusterSecurityType.KNOX; } String shimIdentifier = model.getShimIdentifier(); if( shimIdentifier == null || shimIdentifier.isEmpty() ) { NamedClusterDialog namedClusterDialog = (NamedClusterDialog) getWizard(); shimIdentifier = namedClusterDialog.getShimIdentifier(); } knoxButton.setVisible( shimIdentifier.equals( "cdpdc71" ) || shimIdentifier.equals( "hdp31" ) ); } public NamedClusterSecurityType getSecurityType() { return securityType; } public IWizardPage getNextPage() { IWizardPage nextPage = null; if ( getSecurityType().equals( NamedClusterSecurityType.KERBEROS ) ) { nextPage = getWizard().getPage( KerberosSettingsPage.class.getSimpleName() ); } if ( getSecurityType().equals( NamedClusterSecurityType.KNOX ) ) { nextPage = getWizard().getPage( KnoxSettingsPage.class.getSimpleName() ); } return nextPage; } public void performHelp() { HelpUtils.openHelpDialog( parent.getShell(), "", BaseMessages.getString( PKG, "NamedClusterDialog.help" ), "" ); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/pages/TestResultsPage.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.pages; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CLabel; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.ExpandBar; import org.eclipse.swt.widgets.ExpandItem; import org.eclipse.swt.widgets.Label; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.Test; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.TestCategory; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel; import org.pentaho.di.core.Const; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.util.HelpUtils; import java.util.ArrayList; import java.util.List; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.ONE_COLUMN; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.createLabelWithStyle; public class TestResultsPage extends WizardPage { private PropsUI props; private Composite basePanel; private Composite parent; private Composite mainPanel; private ExpandBar testResultsExpandBar; private ThinNameClusterModel thinNameClusterModel; private static final Class PKG = TestResultsPage.class; private static final String WARNING = BaseMessages.getString( PKG, "NamedClusterDialog.test.warning" ); private static final String FAIL = BaseMessages.getString( PKG, "NamedClusterDialog.test.fail" ); private static final String PASS = BaseMessages.getString( PKG, "NamedClusterDialog.test.pass" ); private static final String WARNING_IMG = "images/warning_category.svg"; private static final String FAIL_IMG = "images/fail_category.svg"; private static final String PASS_IMG = "images/success_category.svg"; public TestResultsPage( VariableSpace variables, ThinNameClusterModel model ) { super( TestResultsPage.class.getSimpleName() ); thinNameClusterModel = model; } public void createControl( Composite composite ) { parent = new Composite( composite, SWT.NONE ); props = PropsUI.getInstance(); props.setLook( parent ); GridLayout gridLayout = new GridLayout( ONE_COLUMN, false ); parent.setLayout( gridLayout ); basePanel = new Composite( parent, SWT.NONE ); //START OF MAIN LAYOUT GridLayout baseGridLayout = new GridLayout( ONE_COLUMN, false ); baseGridLayout.marginWidth = 60; //TO CENTER CONTENTS baseGridLayout.marginTop = 10; //TO CENTER CONTENTS baseGridLayout.marginBottom = 30; baseGridLayout.marginLeft = 20; basePanel.setLayout( baseGridLayout ); GridData basePanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); basePanel.setLayoutData( basePanelGridData ); props.setLook( basePanel ); //END OF MAIN LAYOUT mainPanel = new Composite( basePanel, SWT.NONE ); mainPanel.setLayout( new GridLayout( ONE_COLUMN, false ) ); GridData mainPanelGridData = new GridData( SWT.FILL, SWT.FILL, false, false ); mainPanelGridData.heightHint = 510; //Height of the panel (WILL NEED TO ADJUST) mainPanel.setLayoutData( mainPanelGridData ); props.setLook( mainPanel ); GridData statusGridData = new GridData(); statusGridData.widthHint = 400; // Label width statusGridData.heightHint = 50; // Label height Label statusLabel = createLabelWithStyle( mainPanel, BaseMessages.getString( PKG, "NamedClusterDialog.testResults" ), statusGridData, props, SWT.NONE ); statusLabel.setFont( new Font( statusLabel.getDisplay(), new FontData( "Arial", 20, SWT.NONE ) ) ); statusLabel.setAlignment( SWT.CENTER ); setControl( parent ); initialize( thinNameClusterModel ); } private List setTestResultsOrder( Object[] categories ) { List testCategories = new ArrayList<>(); String[] categoryNames = new String[ 5 ]; if ( Const.isWindows() || Const.isOSX() ) { categoryNames[ 0 ] = "Kafka"; categoryNames[ 1 ] = "Oozie"; categoryNames[ 2 ] = "Job"; categoryNames[ 3 ] = "Zookeeper"; categoryNames[ 4 ] = "Hadoop"; } else { categoryNames[ 0 ] = "Hadoop"; categoryNames[ 1 ] = "Zookeeper"; categoryNames[ 2 ] = "Job"; categoryNames[ 3 ] = "Oozie"; categoryNames[ 4 ] = "Kafka"; } for ( String categoryName : categoryNames ) { TestCategory category = getTestCategory( categoryName, categories ); if ( category != null ) { testCategories.add( category ); } } return testCategories; } private TestCategory getTestCategory( String categoryName, Object[] categories ) { TestCategory testCategory = null; for ( Object category : categories ) { if ( ( (TestCategory) category ).getCategoryName().startsWith( categoryName ) ) { testCategory = (TestCategory) category; } } return testCategory; } public void setTestResults( Object[] categories ) { if ( testResultsExpandBar != null ) { testResultsExpandBar.dispose(); mainPanel.pack(); } testResultsExpandBar = new ExpandBar( mainPanel, SWT.V_SCROLL ); GridData testResultsExpandBarLayoutData = new GridData( SWT.FILL, SWT.FILL, false, false ); testResultsExpandBarLayoutData.heightHint = 400; //Height of the panel (WILL NEED TO ADJUST) testResultsExpandBarLayoutData.widthHint = 400; //Height of the panel (WILL NEED TO ADJUST) testResultsExpandBar.setLayoutData( testResultsExpandBarLayoutData ); testResultsExpandBar.setSpacing( 8 ); props.setLook( testResultsExpandBar ); mainPanel.pack(); List testCategories = setTestResultsOrder( categories ); for ( TestCategory testCategory : testCategories ) { ExpandItem categoryItem = new ExpandItem( testResultsExpandBar, SWT.NONE, 0 ); categoryItem.setText( testCategory.getCategoryName() ); if ( testCategory.getCategoryStatus().equals( FAIL ) ) { categoryItem.setImage( GUIResource.getInstance().getImage( FAIL_IMG, getClass().getClassLoader(), 16, 16 ) ); } else if ( testCategory.getCategoryStatus().isEmpty() ) { categoryItem.setImage( GUIResource.getInstance().getImage( WARNING_IMG, getClass().getClassLoader(), 16, 16 ) ); categoryItem.setText( testCategory.getCategoryName() + " (skipped)" ); } else if ( testCategory.getCategoryStatus().equals( WARNING ) ) { categoryItem.setImage( GUIResource.getInstance().getImage( WARNING_IMG, getClass().getClassLoader(), 16, 16 ) ); } else if ( testCategory.getCategoryStatus().equals( PASS ) ) { categoryItem.setImage( GUIResource.getInstance().getImage( PASS_IMG, getClass().getClassLoader(), 16, 16 ) ); } List tests = testCategory.getTests(); Composite testComposite = new Composite( testResultsExpandBar, SWT.NONE ); props.setLook( testComposite ); for ( Test test : tests ) { GridLayout testLayout = new GridLayout(); testLayout.marginLeft = testLayout.marginTop = testLayout.marginRight = testLayout.marginBottom = 10; testLayout.verticalSpacing = 10; testComposite.setLayout( testLayout ); GridData testLayoutData = new GridData(); testLayoutData.widthHint = 400; testLayoutData.heightHint = 400; testComposite.setLayoutData( testLayoutData ); CLabel testLabel = new CLabel( testComposite, SWT.NONE ); if ( test.getTestStatus().equals( WARNING ) ) { testLabel.setImage( GUIResource.getInstance().getImage( WARNING_IMG, getClass().getClassLoader(), 16, 16 ) ); } else if ( test.getTestStatus().equals( FAIL ) ) { testLabel.setImage( GUIResource.getInstance().getImage( FAIL_IMG, getClass().getClassLoader(), 16, 16 ) ); } else if ( test.getTestStatus().equals( PASS ) ) { testLabel.setImage( GUIResource.getInstance().getImage( PASS_IMG, getClass().getClassLoader(), 16, 16 ) ); } testLabel.setText( test.getTestName() ); props.setLook( testLabel ); } categoryItem.setHeight( testComposite.computeSize( SWT.DEFAULT, SWT.DEFAULT ).y ); categoryItem.setControl( testComposite ); mainPanel.pack(); } mainPanel.pack(); } public void initialize( ThinNameClusterModel model ) { thinNameClusterModel = model; } public void performHelp() { HelpUtils.openHelpDialog( parent.getShell(), "", "https://docs.pentaho.com/pdia-11.0-install/use-hadoop-with-pentaho/big-data-issues", "" ); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/util/BadSiteFilesException.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util; public class BadSiteFilesException extends Exception { } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/util/CustomWizardDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.wizard.IWizard; import org.eclipse.jface.wizard.WizardDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.gui.GUIResource; public class CustomWizardDialog extends WizardDialog { public CustomWizardDialog( Shell parentShell, IWizard newWizard ) { super( parentShell, newWizard ); setDefaultImage( GUIResource.getInstance().getImageWizard() ); setHelpAvailable( true ); setShellStyle( SWT.CLOSE | SWT.TITLE | SWT.BORDER | SWT.APPLICATION_MODAL | getDefaultOrientation() ); create(); Rectangle shellBounds = getParentShell().getBounds(); Point dialogSize = getShell().getSize(); getShell().setLocation( shellBounds.x + ( shellBounds.width - dialogSize.x ) / 2, shellBounds.y + ( shellBounds.height - dialogSize.y ) / 2 ); } public void style() { PropsUI propsUI = PropsUI.getInstance(); propsUI.setLook( getButtonBar() ); propsUI.setLook( getDialogArea() ); } public void enableCancelButton( boolean isEnabled ) { Button cancelButton = getButton( IDialogConstants.CANCEL_ID ); cancelButton.setEnabled( isEnabled ); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/util/NamedClusterHelper.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.CachedFileItemStream; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.HadoopClusterManager; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.repository.Repository; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.metastore.api.security.Base64TwoWayPasswordEncoder; import org.pentaho.metastore.api.security.ITwoWayPasswordEncoder; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Supplier; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; public abstract class NamedClusterHelper { public static final int ONE_COLUMN = 1; public static final int TWO_COLUMNS = 2; public static final String USERNAME = "USERNAME"; public static final String PASSWORD = "PASSWORD"; private static final Supplier spoonSupplier = Spoon::getInstance; private static final ITwoWayPasswordEncoder passwordEncoder = new Base64TwoWayPasswordEncoder(); /** * Data structure to hold driver configuration information */ public static class DriverInfo { private final String id; private final String vendor; private final String version; public DriverInfo( String id, String vendor, String version ) { this.id = id; this.vendor = vendor; this.version = version; } public String getId() { return id; } public String getVendor() { return vendor; } public String getVersion() { return version; } } private static final Map DRIVER_INFO_MAP = new HashMap<>(); static { DRIVER_INFO_MAP.put( "apachevanilla", new DriverInfo( "apachevanilla", "ApacheVanilla", "3.4.0" ) ); DRIVER_INFO_MAP.put( "cdpdc71", new DriverInfo( "cdpdc71", "Cloudera", "7.1" ) ); DRIVER_INFO_MAP.put( "dataproc1421", new DriverInfo( "dataproc1421", "Google Dataproc", "1.4" ) ); DRIVER_INFO_MAP.put( "dataproc23", new DriverInfo( "dataproc23", "Google Dataproc", "2.3" ) ); DRIVER_INFO_MAP.put( "emr770", new DriverInfo( "emr770", "EMR", "7.7" ) ); DRIVER_INFO_MAP.put( "hdi40", new DriverInfo( "hdi40", "HDInsight", "4.0" ) ); DRIVER_INFO_MAP.put( "apache", new DriverInfo( "apache", "Apache", "3.4" ) ); } public enum FileType { CONFIGURATION( "configuration" ), DRIVER( ".kar" ); private final String val; FileType( String val ) { this.val = val; } String getValue() { return this.val; } } public static Label createLabel( Composite parent, String text, GridData gd, PropsUI props ) { Label label = new Label( parent, SWT.NONE ); label.setText( text ); label.setLayoutData( gd ); props.setLook( label ); return label; } public static Label createLabelWithStyle( Composite parent, String text, GridData gd, PropsUI props, int style ) { Label label = new Label( parent, style ); label.setText( text ); label.setLayoutData( gd ); props.setLook( label ); return label; } public static TextVar createText( Composite parent, String text, GridData gd, PropsUI props, VariableSpace variableSpace ) { return createText( parent, text, gd, props, variableSpace, null ); } public static TextVar createText( Composite parent, String text, GridData gd, PropsUI props, VariableSpace variableSpace, Listener listener ) { TextVar textVar = new TextVar( variableSpace, parent, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); textVar.setText( text ); textVar.setLayoutData( gd ); if ( listener != null ) { textVar.getTextWidget().addListener( SWT.CHANGED, listener ); textVar.getTextWidget().addListener( SWT.MouseExit, listener ); } props.setLook( textVar ); return textVar; } public static Map processSiteFiles( ThinNameClusterModel model, HadoopClusterManager manager ) throws BadSiteFilesException, IOException { Map siteFiles = new HashMap<>(); List> files = model.getSiteFiles(); for ( AbstractMap.SimpleImmutableEntry file : files ) { File siteFile = null; String fileName = null; if ( file.getValue().equals( "keytabAuthFile" ) || file.getValue().equals( "keytabImpFile" ) ) { siteFile = new File( file.getKey() ); fileName = file.getValue(); } else { siteFile = new File( file.getKey() + file.getValue() ); fileName = siteFile.getName(); } InputStream fileInputStream = null; try { fileInputStream = new FileInputStream( siteFile ); } catch ( FileNotFoundException e ) { if ( file.getKey().isEmpty() ) { if ( manager.getNamedClusterByName( model.getName() ) != null ) { fileInputStream = manager.getSiteFileInputStream( model.getName(), file.getValue() ); } else { fileInputStream = manager.getSiteFileInputStream( model.getOldName(), file.getValue() ); } } else { if( !( file.getKey().contains( model.getName() ) || file.getKey().contains( model.getOldName() ) ) ) { throw new BadSiteFilesException(); } else { continue; } } } List fileItemStreams = copyAndUnzip( fileInputStream, FileType.CONFIGURATION, siteFile.getName(), fileName, manager ); for ( CachedFileItemStream cachedFileItemStream : fileItemStreams ) { siteFiles.put( cachedFileItemStream.getFieldName(), cachedFileItemStream ); } } return siteFiles; } public static List copyAndUnzip( InputStream fileInputStream, FileType fileType, String fileName, String realFileName, HadoopClusterManager manager ) throws IOException { List unzippedFileItemStreams = new ArrayList<>(); if ( realFileName.endsWith( ".zip" ) ) { try ( ZipInputStream zis = new ZipInputStream( fileInputStream ) ) { for ( ZipEntry zipEntry = zis.getNextEntry(); zipEntry != null; zipEntry = zis.getNextEntry() ) { if ( !zipEntry.isDirectory() ) { // Remove all directory structure from the zip file names and only unzip the files String[] split = zipEntry.getName().split( "/" ); //zip files always use forward slash String unzippedFileName = split[ split.length - 1 ]; if ( isValidUpload( unzippedFileName, fileType, manager ) ) { CachedFileItemStream unzippedFileItemStream = new CachedFileItemStream( zis, unzippedFileName, unzippedFileName ); unzippedFileItemStream.setLastModified( zipEntry.getLastModifiedTime().toMillis() ); unzippedFileItemStreams.add( unzippedFileItemStream ); } } } } } else { // File is not zipped if ( isValidUpload( realFileName, fileType, manager ) ) { unzippedFileItemStreams.add( new CachedFileItemStream( fileInputStream, fileName, realFileName ) ); } } return unzippedFileItemStreams; } public static boolean isValidUpload( String fileName, FileType fileType, HadoopClusterManager manager ) { boolean valid = ( fileType.equals( FileType.CONFIGURATION ) && manager.isValidConfigurationFile( fileName ) ) || ( fileType.equals( FileType.DRIVER ) && fileName.endsWith( FileType.DRIVER.getValue() ) ); return valid; } public static boolean isConnectedToRepo() { Spoon supplier = spoonSupplier.get(); if ( supplier != null ) { Repository repo = supplier.getRepository(); return repo != null && repo.getUri().isPresent(); } else { return false; } } public static String getEndpointURL( String endpoint ) { double cacheBust = Math.round( new Date().getTime() / 1000 ) + Math.random(); return spoonSupplier.get().getRepository().getUri() .orElseThrow( () -> new IllegalStateException( "Repo URI not defined" ) ) .toString() + "/plugin/pentaho-hadoop-cluster-plugin/api/" + endpoint + "?v=" + cacheBust; } public static Map getSecurityCredentials() { Repository repo = spoonSupplier.get().getRepository(); String userName = repo.getUserInfo().getLogin(); String password = repo.getUserInfo().getPassword(); Map credentials = new HashMap<>(); credentials.put( USERNAME, userName ); credentials.put( PASSWORD, password ); return credentials; } public static String encodePassword( String password ) { if ( password != null && !password.startsWith( Encr.PASSWORD_ENCRYPTED_PREFIX ) ) { password = Encr.encryptPasswordIfNotUsingVariables( password ); } return password; } public static String decodePassword( String password ) { if ( password == null || password.startsWith( Encr.PASSWORD_ENCRYPTED_PREFIX ) ) { return Encr.decryptPasswordOptionallyEncrypted( password ); } else { //Password is likely stored encrypted with legacy Base64TwoWayPasswordEncoder if ( !StringUtil.isVariable( password ) ) { return passwordEncoder.decode( password ); } } return password; } /** * Get the vendor name for a given driver ID * @param driverId The driver identifier * @return The vendor name, or null if not found */ public static String getVendorForDriver( String driverId ) { DriverInfo driverInfo = DRIVER_INFO_MAP.get( driverId ); return driverInfo != null ? driverInfo.getVendor() : null; } /** * Get the version for a given driver ID * @param driverId The driver identifier * @return The version, or null if not found */ public static String getVersionForDriver( String driverId ) { DriverInfo driverInfo = DRIVER_INFO_MAP.get( driverId ); return driverInfo != null ? driverInfo.getVersion() : null; } /** * Get the complete DriverInfo for a given driver ID * @param driverId The driver identifier * @return The DriverInfo object, or null if not found */ public static DriverInfo getDriverInfo( String driverId ) { return DRIVER_INFO_MAP.get( driverId ); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/endpoints/CachedFileItemStream.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints; import org.apache.commons.fileupload2.core.FileItemInput; import org.apache.commons.io.IOUtils; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; public class CachedFileItemStream { private ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); private String name; private String fieldName; private long lastModified; //optional file last modified date public CachedFileItemStream( FileItemInput fileItemStream ) throws IOException { this( fileItemStream.getInputStream(), fileItemStream.getName(), fileItemStream.getFieldName() ); } public CachedFileItemStream( InputStream inputStream, String name, String fieldName ) throws IOException { IOUtils.copy( inputStream, this.outputStream ); this.name = name; this.fieldName = fieldName; } public ByteArrayOutputStream getCachedOutputStream() { return this.outputStream; } public ByteArrayInputStream getCachedInputStream() { return new ByteArrayInputStream( this.outputStream.toByteArray() ); } public String getName() { return this.name; } public String getFieldName() { return this.fieldName; } public long getLastModified() { return lastModified; } public void setLastModified( long lastModified ) { this.lastModified = lastModified; } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/endpoints/Category.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints; import java.util.List; public interface Category { public List getTests(); public String getCategoryName(); public void setCategoryName( String categoryName ); public void setTests( List tests ); public String getCategoryStatus(); public void setCategoryStatus( String categoryStatus ); public boolean isCategoryActive(); public void setCategoryActive( boolean categoryActive ); public void addTest( Test test ); } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/endpoints/HadoopClusterManager.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.fileupload2.core.FileItemInput; import org.apache.commons.io.FileUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.AuthCache; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.entity.ContentType; import org.apache.http.entity.mime.MultipartEntityBuilder; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.impl.client.BasicAuthCache; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.HadoopClusterDialog; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.BadSiteFilesException; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel; import org.pentaho.big.data.plugins.common.ui.HadoopClusterDelegateImpl; import org.pentaho.di.base.AbstractMeta; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.osgi.api.NamedClusterSiteFile; import org.pentaho.di.core.osgi.impl.NamedClusterSiteFileImpl; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.runtime.test.RuntimeTest; import org.pentaho.runtime.test.RuntimeTestProgressCallback; import org.pentaho.runtime.test.RuntimeTestStatus; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.module.RuntimeTestModuleResults; import org.pentaho.runtime.test.result.RuntimeTestResult; import org.pentaho.runtime.test.result.RuntimeTestResultEntry; import org.w3c.dom.Document; import org.w3c.dom.NodeList; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpression; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.pentaho.big.data.impl.cluster.tests.Constants.HADOOP_FILE_SYSTEM; import static org.pentaho.big.data.impl.cluster.tests.Constants.OOZIE; import static org.pentaho.big.data.impl.cluster.tests.Constants.KAFKA; import static org.pentaho.big.data.impl.cluster.tests.Constants.ZOOKEEPER; import static org.pentaho.big.data.impl.cluster.tests.Constants.MAP_REDUCE; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.isConnectedToRepo; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.processSiteFiles; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel.NAME_KEY; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.wizard.util.NamedClusterHelper.encodePassword; public class HadoopClusterManager implements RuntimeTestProgressCallback { private static final Class PKG = HadoopClusterDialog.class; public static final String STRING_NAMED_CLUSTERS = BaseMessages.getString( PKG, "HadoopClusterTree.Title" ); public static final String PLACEHOLDER_VALUE = "[object Object]"; private final String fileSeparator = System.getProperty( "file.separator" ); private static final String PASS = "Pass"; private static final String WARNING = "Warning"; private static final String FAIL = "Fail"; private static final String NAMED_CLUSTER = "namedCluster"; private static final String INSTALLED = "installed"; private static final String CONFIG_PROPERTIES = "config.properties"; private static final String KEYTAB_AUTH_FILE = "keytabAuthFile"; private static final String KEYTAB_IMPL_FILE = "keytabImpFile"; public static final String MAPR_SHIM = "Map-R"; public static final String MAPRFS_SCHEME = "maprfs"; private static final LogChannelInterface log = KettleLogStore.getLogChannelInterfaceFactory().create( "HadoopClusterManager" ); private final String internalShim; private enum KERBEROS_SUBTYPE { PASSWORD( "Password" ), KEYTAB( "Keytab" ); private String val; KERBEROS_SUBTYPE( String val ) { this.val = val; } public String getValue() { return this.val; } } private enum SECURITY_TYPE { NONE( "None" ), KERBEROS( "Kerberos" ), KNOX( "Knox" ); private String val; SECURITY_TYPE( String val ) { this.val = val; } public String getValue() { return this.val; } } private enum IMPERSONATION_TYPE { SIMPLE( "simple" ), DISABLED( "disabled" ); private String val; IMPERSONATION_TYPE( String val ) { this.val = val; } public String getValue() { return this.val; } } private static final String KERBEROS_AUTHENTICATION_USERNAME = "pentaho.authentication.default.kerberos.principal"; private static final String KERBEROS_AUTHENTICATION_PASS = "pentaho.authentication.default.kerberos.password"; private static final String KERBEROS_IMPERSONATION_USERNAME = "pentaho.authentication.default.mapping.server.credentials.kerberos.principal"; private static final String KERBEROS_IMPERSONATION_PASS = "pentaho.authentication.default.mapping.server.credentials.kerberos.password"; private static final String IMPERSONATION = "pentaho.authentication.default.mapping.impersonation.type"; private static final String KEYTAB_AUTHENTICATION_LOCATION = "pentaho.authentication.default.kerberos.keytabLocation"; private static final String KEYTAB_IMPERSONATION_LOCATION = "pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation"; private final Spoon spoon; private final NamedClusterService namedClusterService; private final IMetaStore metaStore; private final VariableSpace variableSpace; private RuntimeTestStatus runtimeTestStatus = null; public HadoopClusterManager( Spoon spoon, NamedClusterService namedClusterService, IMetaStore metaStore, String internalShim ) { this.spoon = spoon; this.namedClusterService = namedClusterService; this.metaStore = metaStore != null ? metaStore : spoon.getMetaStore(); this.variableSpace = spoon == null ? new Variables() : (AbstractMeta) spoon.getActiveMeta(); this.internalShim = internalShim; } public HadoopClusterManager( NamedClusterService namedClusterService, IMetaStore metaStore, String internalShim ) { this( null, namedClusterService, metaStore, internalShim); } public JSONObject importNamedCluster( ThinNameClusterModel model, Map siteFilesSource ) { JSONObject response = new JSONObject(); response.put( NAMED_CLUSTER, "" ); try { // Create and initialize template. NamedCluster nc = namedClusterService.getClusterTemplate(); nc.setHdfsHost( "" ); nc.setHdfsPort( "" ); nc.setJobTrackerHost( "" ); nc.setJobTrackerPort( "" ); nc.setZooKeeperHost( "" ); nc.setZooKeeperPort( "" ); nc.setOozieUrl( "" ); nc.setName( model.getName() ); nc.setHdfsUsername( model.getHdfsUsername() ); nc.setHdfsPassword( encodePassword( model.getHdfsPassword() ) ); if ( variableSpace != null ) { nc.shareVariablesWith( variableSpace ); } else { nc.initializeVariablesFrom( null ); } boolean isConfigurationSet = configureNamedCluster( siteFilesSource, nc); if ( isConfigurationSet ) { deleteNamedClusterSchemaOnly( model ); setupKnoxSecurity( nc, model ); deleteConfigFolder( nc.getName() ); installSiteFiles( siteFilesSource, nc ); namedClusterService.create( nc, metaStore ); createConfigProperties( nc ); setupKerberosSecurity( model, siteFilesSource, "", "" ); response.put( NAMED_CLUSTER, nc.getName() ); } } catch ( Exception e ) { log.logError( e.getMessage() ); } return response; } private void deleteNamedClusterSchemaOnly( ThinNameClusterModel model ) throws MetaStoreException { List existingNcNames = namedClusterService.listNames( metaStore ); for ( String existingNcName : existingNcNames ) { if ( existingNcName.equalsIgnoreCase( model.getName() ) ) { namedClusterService.delete( existingNcName, metaStore ); } } } private NamedCluster convertToNamedCluster( ThinNameClusterModel model ) { NamedCluster nc = namedClusterService.getClusterTemplate(); nc.setName( model.getName() ); nc.setHdfsHost( model.getHdfsHost() ); nc.setHdfsPort( model.getHdfsPort() ); nc.setHdfsUsername( model.getHdfsUsername() ); nc.setHdfsPassword( encodePassword( model.getHdfsPassword() ) ); nc.setJobTrackerHost( model.getJobTrackerHost() ); nc.setJobTrackerPort( model.getJobTrackerPort() ); nc.setZooKeeperHost( model.getZooKeeperHost() ); nc.setZooKeeperPort( model.getZooKeeperPort() ); nc.setOozieUrl( model.getOozieUrl() ); nc.setKafkaBootstrapServers( model.getKafkaBootstrapServers() ); resolveShimIdentifier( nc ); setupKnoxSecurity( nc, model ); if ( variableSpace != null ) { nc.shareVariablesWith( variableSpace ); } else { nc.initializeVariablesFrom( null ); } return nc; } public boolean deleteConfigFolder( String configFolderName ) throws IOException { File configFolder = new File( getNamedClusterConfigsRootDir() ); File[] files = configFolder.listFiles(); if ( files != null ) { for ( File file : files ) { if ( file.isDirectory() && file.getName().equalsIgnoreCase( configFolderName ) ) { FileUtils.deleteDirectory( file ); break; } } } return true; } public JSONObject createNamedCluster( ThinNameClusterModel model, Map siteFilesSource ) { return createNamedCluster( model, siteFilesSource, "", "" ); } @VisibleForTesting public JSONObject createNamedCluster( ThinNameClusterModel model, Map siteFilesSource, String keytabAuthenticationLocation, String keytabImpersonationLocation ) { JSONObject response = new JSONObject(); response.put( NAMED_CLUSTER, "" ); try { NamedCluster nc = convertToNamedCluster( model ); deleteConfigFolder( nc.getName() ); installSiteFiles( siteFilesSource, nc ); namedClusterService.create( nc, metaStore ); createConfigProperties( nc ); setupKerberosSecurity( model, siteFilesSource, keytabAuthenticationLocation, keytabImpersonationLocation ); response.put( NAMED_CLUSTER, nc.getName() ); } catch ( Exception e ) { log.logError( e.getMessage() ); } return response; } public JSONObject editNamedCluster( ThinNameClusterModel model, boolean isEditMode, Map siteFilesSource ) { JSONObject response = new JSONObject(); response.put( NAMED_CLUSTER, "" ); try { final NamedCluster newNc = namedClusterService.getNamedClusterByName( model.getName(), metaStore ); final NamedCluster oldNc = namedClusterService.getNamedClusterByName( model.getOldName(), metaStore ); // Must get the current shim identifier before the creation of the Named Cluster xml schema for later comparison. String shimId = null; List existingSiteFiles = new ArrayList<>(); if ( oldNc != null ) { shimId = oldNc.getShimIdentifier(); existingSiteFiles = oldNc.getSiteFiles(); } NamedCluster nc = convertToNamedCluster( model ); nc.setSiteFiles( getIntersectionSiteFiles( model, existingSiteFiles ) ); installSiteFiles( siteFilesSource, nc ); if ( newNc != null ) { namedClusterService.update( nc, metaStore ); //new cluster name exists } else { namedClusterService.create( nc, metaStore ); //new cluster does not exist. Use creation logic } File oldConfigFolder = new File( getNamedClusterConfigsRootDir() + fileSeparator + model.getOldName() ); File newConfigFolder = new File( getNamedClusterConfigsRootDir() + fileSeparator + nc.getName() ); // Copy all files from the old config folder to the new config folder. if ( !oldConfigFolder.getName().equalsIgnoreCase( newConfigFolder.getName() ) ) { FileUtils.copyDirectory( oldConfigFolder, newConfigFolder ); } else { boolean success = oldConfigFolder.renameTo( newConfigFolder ); if ( !success ) { log.logError( "Renaming Named Cluster configuration folder failed." ); } } // If the user changed the shim, create a new config.properties file that corresponds to that shim // in the new config folder. Also save the keytab locations to set them again in the new config.properties // unless the kerberos subtype is Password. String keytabAuthenticationLocation = ""; String keytabImpersonationLocation = ""; String kerberosSubType = model.getKerberosSubType(); if ( !kerberosSubType.equals( KERBEROS_SUBTYPE.PASSWORD.getValue() ) ) { String configFile = getNamedClusterConfigsRootDir() + fileSeparator + nc.getName() + fileSeparator + CONFIG_PROPERTIES; PropertiesConfiguration config = new PropertiesConfiguration( new File( configFile ) ); keytabAuthenticationLocation = (String) config.getProperty( KEYTAB_AUTHENTICATION_LOCATION ); keytabImpersonationLocation = (String) config.getProperty( KEYTAB_IMPERSONATION_LOCATION ); } if ( nc.getShimIdentifier() != null && !nc.getShimIdentifier().equals( shimId ) ) { createConfigProperties( nc ); } setupKerberosSecurity( model, siteFilesSource, keytabAuthenticationLocation, keytabImpersonationLocation ); // Delete old config folder. if ( isEditMode && !oldConfigFolder.getName().equalsIgnoreCase( newConfigFolder.getName() ) ) { deleteNamedCluster( metaStore, model.getOldName(), false ); } response.put( NAMED_CLUSTER, nc.getName() ); } catch ( Exception e ) { log.logError( e.getMessage() ); } return response; } public InputStream getSiteFileInputStream( String namedCluster, String siteFile ) { NamedCluster nc = namedClusterService.getNamedClusterByName( namedCluster, this.metaStore ); return nc.getSiteFileInputStream( siteFile ); } public ThinNameClusterModel getNamedCluster( String namedCluster ) { ThinNameClusterModel model = null; try { List namedClusters = namedClusterService.list( metaStore ); for ( NamedCluster nc : namedClusters ) { if ( nc.getName().equalsIgnoreCase( namedCluster ) ) { model = new ThinNameClusterModel(); model.setName( nc.getName() ); model.setShimIdentifier( nc.getShimIdentifier()); model.setHdfsHost( nc.getHdfsHost() ); model.setHdfsUsername( nc.getHdfsUsername() ); model.setHdfsPassword( nc.getHdfsPassword() ); model.setHdfsPort( nc.getHdfsPort() ); model.setJobTrackerHost( nc.getJobTrackerHost() ); model.setJobTrackerPort( nc.getJobTrackerPort() ); model.setKafkaBootstrapServers( nc.getKafkaBootstrapServers() ); model.setOozieUrl( nc.getOozieUrl() ); model.setZooKeeperPort( nc.getZooKeeperPort() ); model.setZooKeeperHost( nc.getZooKeeperHost() ); model.setGatewayPassword( nc.getGatewayPassword() ); model.setGatewayUrl( nc.getGatewayUrl() ); model.setGatewayUsername( nc.getGatewayUsername() ); model.setSecurityType( SECURITY_TYPE.NONE.getValue() ); if ( nc.isUseGateway() ) { model.setSecurityType( SECURITY_TYPE.KNOX.getValue() ); } else { resolveKerberosSecurity( model, nc ); } model.setSiteFiles( nc.getSiteFiles().stream() .map( sf -> new SimpleImmutableEntry<>( NAME_KEY, sf.getSiteFileName() ) ) .collect( Collectors.toList() ) ); break; } } } catch ( MetaStoreException e ) { log.logError( e.getMessage() ); } return model; } private boolean configureNamedCluster( Map siteFilesSource, NamedCluster nc ) { resolveShimIdentifier( nc ); String oozieBaseUrl = "oozie.base.url"; Map properties = new HashMap(); extractProperties( siteFilesSource, "core-site.xml", properties, new String[] { "fs.defaultFS" } ); extractProperties( siteFilesSource, "yarn-site.xml", properties, new String[] { "yarn.resourcemanager.address", "yarn.resourcemanager.hostname" } ); extractProperties( siteFilesSource, "hive-site.xml", properties, new String[] { "hive.zookeeper.quorum", "hive.zookeeper.client.port" } ); extractProperties( siteFilesSource, "oozie-site.xml", properties, new String[] { oozieBaseUrl } ); if ( properties.get( oozieBaseUrl ) == null ) { extractProperties( siteFilesSource, "oozie-default.xml", properties, new String[] { oozieBaseUrl } ); } boolean isConfigurationSet = false; /* * Address taken from * fs.defaultFS * in * core-site.xml * */ String hdfsAddress = properties.get( "fs.defaultFS" ); if ( hdfsAddress != null ) { URI hdfsURL = URI.create( hdfsAddress ); nc.setHdfsHost( hdfsURL.getHost() ); nc.setHdfsPort( hdfsURL.getPort() != -1 ? hdfsURL.getPort() + "" : "" ); isConfigurationSet = true; } /* * Address taken from * yarn.resourcemanager.address * in * yarn-site.xml * * If address not available * Hostname taken from * yarn.resourcemanager.hostname * in * yarn-site.xml * */ String jobTrackerAddress = properties.get( "yarn.resourcemanager.address" ); String jobTrackerHostname = properties.get( "yarn.resourcemanager.hostname" ); if ( jobTrackerAddress != null ) { Map hostAndPort = extractHostAndPort( jobTrackerAddress ); nc.setJobTrackerHost( hostAndPort.get( "host" ) ); nc.setJobTrackerPort( hostAndPort.get( "port" ) ); isConfigurationSet = true; } else if ( jobTrackerHostname != null ) { nc.setJobTrackerHost( jobTrackerHostname ); isConfigurationSet = true; } /* * Address and port taken from * hive.zookeeper.quorum * hive.zookeeper.client.port * in * hive-site.xml * */ String zooKeeperAddress = properties.get( "hive.zookeeper.quorum" ); String zooKeeperPort = properties.get( "hive.zookeeper.client.port" ); if ( zooKeeperAddress != null ) { List addresses = Arrays.asList( zooKeeperAddress.split( "," ) ); List hostNames = addresses.stream().map( address -> extractHostAndPort( address ).get( "host" ) ).collect( Collectors.toList() ); zooKeeperAddress = String.join( ",", hostNames ); } if ( zooKeeperAddress != null && zooKeeperPort != null ) { nc.setZooKeeperHost( zooKeeperAddress ); nc.setZooKeeperPort( zooKeeperPort ); isConfigurationSet = true; } /* * Address and port taken from * oozie.base.url * in * oozie-site.xml * if it does not exist then it is taken from * oozie-default.xml * */ String oozieAddress = properties.get( oozieBaseUrl ); if ( oozieAddress != null ) { nc.setOozieUrl( oozieAddress ); isConfigurationSet = true; } return isConfigurationSet; } private void resolveShimIdentifier( NamedCluster nc ) { String shimIdentifier = getShimIdentifier(); if( shimIdentifier != null ) { nc.setShimIdentifier( shimIdentifier ); } } private void extractProperties( Map siteFilesSource, String fileName, Map properties, String[] keys ) { CachedFileItemStream siteFile = siteFilesSource.get( fileName ); if ( siteFile != null ) { Document document = parseSiteFileDocument( siteFile ); if ( document != null ) { XPathFactory xpathFactory = XPathFactory.newInstance(); XPath xpath = xpathFactory.newXPath(); for ( String key : keys ) { try { XPathExpression expr = xpath.compile( "/configuration/property[name[starts-with(.,'" + key + "')]]/value/text()" ); NodeList nodes = (NodeList) expr.evaluate( document, XPathConstants.NODESET ); if ( nodes.getLength() > 0 ) { properties.put( key, nodes.item( 0 ).getNodeValue() ); } } catch ( XPathExpressionException e ) { log.logMinimal( e.getMessage() ); } } } } } public JSONObject installDriver( FileItemInput driver ) { boolean success = false; if ( driver != null ) { String destination = Const.getShimDriverDeploymentLocation(); try ( final InputStream driverStream = driver.getInputStream() ) { FileUtils.copyInputStreamToFile( driverStream, new File( destination + fileSeparator + driver.getFieldName() ) ); success = true; } catch ( IOException e ) { log.logError( e.getMessage() ); } } JSONObject response = new JSONObject(); response.put( INSTALLED, success ); return response; } /** * Get Intersection of siteFiles * * @param model * @param existingSiteFiles * @return a list of siteFiles at the intersection of siteFiles in the model and the existingSiteFiles */ private List getIntersectionSiteFiles( ThinNameClusterModel model, List existingSiteFiles ) { List newSiteFileNames = Optional.ofNullable( model.getSiteFiles() ) .map( Collection::stream ) .orElseGet( Stream::empty ) .map( SimpleImmutableEntry::getValue ) .collect( Collectors.toList() ); return existingSiteFiles.stream() .filter( siteFile -> newSiteFileNames.contains( siteFile.getSiteFileName() ) ) .collect( Collectors.toList() ); } private void installSiteFiles( Map siteFileSource, NamedCluster nc ) throws IOException { for ( Map.Entry siteFile : siteFileSource.entrySet() ) { String name = siteFile.getValue().getFieldName(); if ( isValidConfigurationFile( name ) ) { if ( name.equals( KEYTAB_AUTH_FILE ) || name.equals( KEYTAB_IMPL_FILE ) || !name.endsWith( "-site.xml" ) ) { name = extractFileNameFromFullPath( siteFile.getValue().getName() ); addFileToConfigFolder( siteFile.getValue().getCachedOutputStream(), name, nc ); } else { addFileToNamedClusterSiteFiles( siteFile, name, nc ); } } } } private void addFileToConfigFolder( ByteArrayOutputStream outputStream, String fileName, NamedCluster nc ) throws IOException { File destination = new File( getNamedClusterConfigsRootDir() + fileSeparator + nc.getName() + fileSeparator + fileName ); destination.getParentFile().mkdirs(); try ( OutputStream fos = new FileOutputStream( destination ) ) { outputStream.writeTo( fos ); } } private void addFileToNamedClusterSiteFiles( Map.Entry cachedFileItemStreamMapEntry, String fileName, NamedCluster nc ) throws IOException { InputStream inputStream = cachedFileItemStreamMapEntry.getValue().getCachedInputStream(); InputStreamReader isReader = new InputStreamReader( inputStream ); BufferedReader reader = new BufferedReader( isReader ); StringBuilder sb = new StringBuilder(); String str; while ( ( str = reader.readLine() ) != null ) { sb.append( str ); } //skip placeholder site file contents because the site file content is in the NamedCluster already if ( !sb.toString().equals( PLACEHOLDER_VALUE ) ) { boolean nameExists = false; //replace the contents if the name exists for ( NamedClusterSiteFile siteFile : nc.getSiteFiles() ) { if ( siteFile.getSiteFileName().equals( fileName ) ) { siteFile.setSiteFileContents( sb.toString() ); nameExists = true; break; } } //Add the file if the name didn't exist if ( !nameExists ) { nc.addSiteFile( new NamedClusterSiteFileImpl( fileName, cachedFileItemStreamMapEntry.getValue().getLastModified(), sb.toString() ) ); } } } public boolean isValidConfigurationFile( String fileName ) { return fileName != null && ( fileName.endsWith( "-site.xml" ) || fileName.endsWith( "-default.xml" ) || fileName.equals( CONFIG_PROPERTIES ) || fileName.equals( KEYTAB_AUTH_FILE ) || fileName.equals( KEYTAB_IMPL_FILE ) || fileName.equals( "data" ) ); } private Document parseSiteFileDocument( CachedFileItemStream file ) { Document document = null; try { document = XMLHandler.loadXMLFile( file.getCachedInputStream() ); } catch ( KettleXMLException e ) { log.logMinimal( String.format( "Site file %s is not a well formed XML document", file.getName() ) ); } return document; } private void createConfigProperties( NamedCluster namedCluster ) throws IOException { Path clusterConfigDirPath = Paths.get( getNamedClusterConfigsRootDir() + fileSeparator + namedCluster.getName() ); Path configPropertiesPath = Paths.get( getNamedClusterConfigsRootDir() + fileSeparator + namedCluster.getName() + fileSeparator + CONFIG_PROPERTIES ); Files.createDirectories( clusterConfigDirPath ); String sampleConfigProperties = namedCluster.getShimIdentifier() + "sampleconfig.properties"; InputStream inputStream = HadoopClusterDelegateImpl.class.getClassLoader().getResourceAsStream( sampleConfigProperties ); if ( inputStream != null ) { Files.copy( inputStream, configPropertiesPath, StandardCopyOption.REPLACE_EXISTING ); } } private void setupKerberosSecurity( ThinNameClusterModel model, Map siteFilesSource, String keytabAuthenticationLocation, String keytabImpersonationLocation ) { Path configPropertiesPath = Paths .get( getNamedClusterConfigsRootDir() + fileSeparator + model.getName() + fileSeparator + CONFIG_PROPERTIES ); String securityType = model.getSecurityType(); if ( !StringUtil.isEmpty( securityType ) ) { resetKerberosSecurity( configPropertiesPath ); if ( securityType.equals( SECURITY_TYPE.KERBEROS.getValue() ) ) { String kerberosSubType = model.getKerberosSubType(); if ( kerberosSubType.equals( KERBEROS_SUBTYPE.PASSWORD.getValue() ) ) { setupKerberosPasswordSecurity( configPropertiesPath, model ); } if ( kerberosSubType.equals( KERBEROS_SUBTYPE.KEYTAB.getValue() ) ) { setupKeytabSecurity( model, configPropertiesPath, siteFilesSource, keytabAuthenticationLocation, keytabImpersonationLocation ); } } } } private void resetKerberosSecurity( Path configPropertiesPath ) { try { PropertiesConfiguration config = new PropertiesConfiguration( configPropertiesPath.toFile() ); config.setProperty( KEYTAB_AUTHENTICATION_LOCATION, "" ); config.setProperty( KEYTAB_IMPERSONATION_LOCATION, "" ); config.setProperty( IMPERSONATION, IMPERSONATION_TYPE.DISABLED.getValue() ); config.setProperty( KERBEROS_AUTHENTICATION_USERNAME, "" ); config.setProperty( KERBEROS_AUTHENTICATION_PASS, "" ); config.setProperty( KERBEROS_IMPERSONATION_USERNAME, "" ); config.setProperty( KERBEROS_IMPERSONATION_PASS, "" ); config.save(); } catch ( ConfigurationException e ) { log.logMinimal( e.getMessage() ); } } private void retrieveKerberosSecurity( ThinNameClusterModel model, NamedCluster nc ) { try { String endpointURL = NamedClusterHelper.getEndpointURL( "getNamedCluster" ); endpointURL = endpointURL + "&namedCluster=" + nc.getName(); String result = doGet( endpointURL ); JSONObject jsonObject = (JSONObject) new JSONParser().parse( result ); String securityType = (String) jsonObject.get( "securityType" ); String kerberosSubType = (String) jsonObject.get( "kerberosSubType" ); String kerberosAuthenticationUsername = (String) jsonObject.get( "kerberosAuthenticationUsername" ); String kerberosAuthenticationPassword = (String) jsonObject.get( "kerberosAuthenticationPassword" ); String kerberosImpersonationUsername = (String) jsonObject.get( "kerberosImpersonationUsername" ); String kerberosImpersonationPassword = (String) jsonObject.get( "kerberosImpersonationPassword" ); String keytabAuthFile = (String) jsonObject.get( "keytabAuthFile" ); String keytabImpFile = (String) jsonObject.get( "keytabImpFile" ); model.setSecurityType( securityType ); model.setKerberosSubType( kerberosSubType ); model.setKerberosAuthenticationUsername( kerberosAuthenticationUsername ); model.setKerberosAuthenticationPassword( kerberosAuthenticationPassword ); model.setKerberosImpersonationUsername( kerberosImpersonationUsername ); model.setKerberosImpersonationPassword( kerberosImpersonationPassword ); model.setKeytabAuthFile( keytabAuthFile ); model.setKeytabImpFile( keytabImpFile ); } catch ( ParseException e ) { log.logError( e.getMessage() ); } } private void resolveKerberosSecurity( ThinNameClusterModel model, NamedCluster nc ) { if ( NamedClusterHelper.isConnectedToRepo() ) { retrieveKerberosSecurity( model, nc ); } else { try { String configFile = getNamedClusterConfigsRootDir() + fileSeparator + nc.getName() + fileSeparator + CONFIG_PROPERTIES; PropertiesConfiguration config = new PropertiesConfiguration( new File( configFile ) ); model.setKerberosAuthenticationUsername( (String) config.getProperty( KERBEROS_AUTHENTICATION_USERNAME ) ); model.setKerberosAuthenticationPassword( (String) config.getProperty( KERBEROS_AUTHENTICATION_PASS ) ); model.setKerberosImpersonationUsername( (String) config.getProperty( KERBEROS_IMPERSONATION_USERNAME ) ); model.setKerberosImpersonationPassword( (String) config.getProperty( KERBEROS_IMPERSONATION_PASS ) ); String keytabAuthenticationLocation = (String) config.getProperty( KEYTAB_AUTHENTICATION_LOCATION ); String keytabImpersonationLocation = (String) config.getProperty( KEYTAB_IMPERSONATION_LOCATION ); // Resolve the keytab auth and impl files if set to be displayed in the UI. if ( !StringUtil.isEmpty( keytabAuthenticationLocation ) ) { model.setKeytabAuthFile( keytabAuthenticationLocation ); } if ( !StringUtil.isEmpty( keytabImpersonationLocation ) ) { model.setKeytabImpFile( keytabImpersonationLocation ); } // If Kerberos security properties are empty then security type is None else if at least one of them has a // value then the security type is Kerberos if ( StringUtil.isEmpty( keytabAuthenticationLocation ) && StringUtil.isEmpty( keytabImpersonationLocation ) && StringUtil.isEmpty( model.getKerberosAuthenticationPassword() ) && StringUtil.isEmpty( model.getKerberosImpersonationPassword() ) ) { model.setSecurityType( SECURITY_TYPE.NONE.getValue() ); } else { model.setSecurityType( SECURITY_TYPE.KERBEROS.getValue() ); } // If kerberos keytab impersonation and kerberos keytab impersonation location are empty then kerberos sub type // is Password else is Keytab if ( StringUtil.isEmpty( keytabAuthenticationLocation ) && StringUtil.isEmpty( keytabImpersonationLocation ) ) { model.setKerberosSubType( KERBEROS_SUBTYPE.PASSWORD.getValue() ); } else { model.setKerberosSubType( KERBEROS_SUBTYPE.KEYTAB.getValue() ); } } catch ( ConfigurationException e ) { log.logError( e.getMessage() ); } } } private void setupKerberosPasswordSecurity( Path configPropertiesPath, ThinNameClusterModel model ) { try { PropertiesConfiguration config = new PropertiesConfiguration( configPropertiesPath.toFile() ); config.setProperty( KERBEROS_AUTHENTICATION_USERNAME, model.getKerberosAuthenticationUsername() ); if ( !StringUtil.isEmpty( model.getKerberosAuthenticationPassword() ) ) { config.setProperty( KERBEROS_AUTHENTICATION_PASS, encodePassword( model.getKerberosAuthenticationPassword() ) ); } else { config.setProperty( KERBEROS_AUTHENTICATION_PASS, "" ); } config.setProperty( KERBEROS_IMPERSONATION_USERNAME, model.getKerberosImpersonationUsername() ); if ( !StringUtil.isEmpty( model.getKerberosImpersonationPassword() ) ) { config.setProperty( KERBEROS_IMPERSONATION_PASS, encodePassword( model.getKerberosImpersonationPassword() ) ); } else { config.setProperty( KERBEROS_IMPERSONATION_PASS, "" ); } if ( ( !StringUtil.isEmpty( model.getKerberosImpersonationUsername() ) && !StringUtil.isEmpty( model.getKerberosImpersonationPassword() ) ) || ( !StringUtil.isEmpty( model.getKerberosAuthenticationUsername() ) && !StringUtil.isEmpty( model.getKerberosAuthenticationPassword() ) ) ) { config.setProperty( IMPERSONATION, IMPERSONATION_TYPE.SIMPLE.getValue() ); } else { config.setProperty( IMPERSONATION, IMPERSONATION_TYPE.DISABLED.getValue() ); } config.save(); } catch ( ConfigurationException e ) { log.logMinimal( e.getMessage() ); } } private String extractFileNameFromFullPath( String fileName ) { /* * This method is necessary because a difference in upload functionality from Linux and Windows. * On Linux the file uploaded is provided with the name only. * On Windows the file uploaded is provided with the full path and we only need the name. * */ int lastIndex = fileName.lastIndexOf( '/' ) != -1 ? fileName.lastIndexOf( '/' ) : fileName.lastIndexOf( '\\' ); lastIndex = lastIndex == -1 ? 0 : lastIndex + 1; fileName = fileName.substring( lastIndex ); return fileName; } private void setupKeytabSecurity( ThinNameClusterModel model, Path configPropertiesPath, Map siteFilesSource, String keytabAuthenticationLocation, String keytabImpersonationLocation ) { String namedClusterName = model.getName(); CachedFileItemStream keytabImpFile = siteFilesSource.get( KEYTAB_IMPL_FILE ); // Process the keytabAuthenticationLocation in case the Named Cluster name changed. // If it didn't then the resulting value should be the same. // Required for deleting orphaned keytab files. if ( !keytabAuthenticationLocation.isEmpty() ) { String name = extractFileNameFromFullPath( keytabAuthenticationLocation ); keytabAuthenticationLocation = getNamedClusterConfigsRootDir() + fileSeparator + namedClusterName + fileSeparator + name; } // Process the keytabImpersonationLocation in case the Named Cluster name changed. // If it didn't then the resulting value should be the same. if ( !keytabImpersonationLocation.isEmpty() ) { String name = extractFileNameFromFullPath( keytabImpersonationLocation ); keytabImpersonationLocation = getNamedClusterConfigsRootDir() + fileSeparator + namedClusterName + fileSeparator + name; } String authenticationLocation = keytabAuthenticationLocation; String impersonationLocation = keytabImpersonationLocation; if ( model.getKeytabAuthFile() != null && !model.getKeytabAuthFile().isEmpty() ) { String name = extractFileNameFromFullPath( model.getKeytabAuthFile() ); authenticationLocation = getNamedClusterConfigsRootDir() + fileSeparator + namedClusterName + fileSeparator + name; } if ( model.getKeytabImpFile() != null && !model.getKeytabImpFile().isEmpty() ) { String name = extractFileNameFromFullPath( model.getKeytabImpFile() ); impersonationLocation = getNamedClusterConfigsRootDir() + fileSeparator + namedClusterName + fileSeparator + name; } try { PropertiesConfiguration config = new PropertiesConfiguration( configPropertiesPath.toFile() ); // Authentication config.setProperty( KERBEROS_AUTHENTICATION_USERNAME, model.getKerberosAuthenticationUsername() ); if ( !StringUtil.isEmpty( authenticationLocation ) ) { config.setProperty( KEYTAB_AUTHENTICATION_LOCATION, authenticationLocation ); } // Impersonation config.setProperty( KERBEROS_IMPERSONATION_USERNAME, model.getKerberosImpersonationUsername() ); if ( keytabImpFile == null && StringUtil.isEmpty( model.getKeytabImpFile() ) ) { config.setProperty( KEYTAB_IMPERSONATION_LOCATION, "" ); } else if ( !StringUtil.isEmpty( impersonationLocation ) ) { config.setProperty( KEYTAB_IMPERSONATION_LOCATION, impersonationLocation ); } // If the keytabAuthFile is not used anymore delete it. if ( !keytabAuthenticationLocation.isEmpty() ) { if ( !keytabAuthenticationLocation.equals( authenticationLocation ) ) { if ( !keytabAuthenticationLocation.equals( impersonationLocation ) ) { File toDelete = new File( keytabAuthenticationLocation ); if ( toDelete.exists() ) { toDelete.delete(); } } } } // If the keytabImpFile is not used anymore delete it. if ( !keytabImpersonationLocation.isEmpty() ) { if ( !keytabImpersonationLocation.equals( impersonationLocation ) || model.getKeytabImpFile().isEmpty() ) { if ( !keytabImpersonationLocation.equals( authenticationLocation ) ) { File toDelete = new File( keytabImpersonationLocation ); if ( toDelete.exists() ) { toDelete.delete(); } } } } if ( !StringUtil.isEmpty( (String) config.getProperty( KEYTAB_AUTHENTICATION_LOCATION ) ) || !StringUtil.isEmpty( (String) config.getProperty( KEYTAB_IMPERSONATION_LOCATION ) ) ) { config.setProperty( IMPERSONATION, IMPERSONATION_TYPE.SIMPLE.getValue() ); } else { config.setProperty( IMPERSONATION, IMPERSONATION_TYPE.DISABLED.getValue() ); } config.save(); } catch ( ConfigurationException e ) { log.logMinimal( e.getMessage() ); } } private void setupKnoxSecurity( NamedCluster nc, ThinNameClusterModel model ) { if ( model.getSecurityType() != null && model.getSecurityType().equals( SECURITY_TYPE.KNOX.getValue() ) ) { String userName = model.getGatewayUsername(); String url = model.getGatewayUrl(); String password = model.getGatewayPassword(); nc.setGatewayPassword( encodePassword( password ) ); nc.setGatewayUrl( encodePassword( url ) ); nc.setGatewayUsername( userName ); nc.setUseGateway( !StringUtil.isEmpty( userName ) && !StringUtil.isEmpty( url ) && !StringUtil.isEmpty( password ) ); } } /* * Extract Hostname and Port from a hostname/URL pattern */ private Map extractHostAndPort( String urlPattern ) { final String HTTP_PATTERN = "http://"; if ( !urlPattern.startsWith( HTTP_PATTERN ) ) { urlPattern = HTTP_PATTERN + urlPattern; } URI parsedURI = URI.create( urlPattern ); Map map = new HashMap<>(); map.put( "host", parsedURI.getHost() ); map.put( "port", parsedURI.getPort() != -1 ? parsedURI.getPort() + "" : "" ); return map; } public void deleteNamedCluster( IMetaStore metaStore, String namedCluster, boolean refreshTree ) { try { if ( namedClusterService.read( namedCluster, metaStore ) != null ) { namedClusterService.delete( namedCluster, metaStore ); if ( isConnectedToRepo() ) { String endpointURL = NamedClusterHelper.getEndpointURL( "deleteNamedCluster" ); endpointURL = endpointURL + "&namedCluster=" + namedCluster; doGet( endpointURL ); } else { deleteConfigFolder( namedCluster ); } } if ( refreshTree ) { refreshTree(); } } catch ( Exception e ) { log.logMinimal( e.getMessage() ); } } public String getShimIdentifier() { String shimIdentifier = null; if ( isConnectedToRepo() ) { String endpointURL = NamedClusterHelper.getEndpointURL( "getShimIdentifier" ); shimIdentifier = doGet( endpointURL ); } else { shimIdentifier = BigDataServicesHelper.getShimIdentifier(); } return shimIdentifier; } public NamedCluster getNamedClusterByName( String namedCluster) { return namedClusterService.getNamedClusterByName( namedCluster, this.metaStore ); } public Object runTests( RuntimeTester runtimeTester, String namedCluster ) { NamedCluster nc = namedClusterService.getNamedClusterByName( namedCluster, this.metaStore ); if ( nc != null ) { try { if ( runtimeTester != null ) { runtimeTestStatus = null; runtimeTester.runtimeTest( nc, this ); synchronized ( this ) { while ( runtimeTestStatus == null ) { wait(); } } } } catch ( Exception e ) { log.logMinimal( e.getLocalizedMessage() ); } return produceTestCategories( runtimeTestStatus, nc ); } else { return "[]"; } } public Object[] produceTestCategories( RuntimeTestStatus runtimeTestStatus, NamedCluster nc ) { LinkedHashMap categories = new LinkedHashMap<>(); if ( NamedClusterHelper.isConnectedToRepo() ) { String endpointURL = NamedClusterHelper.getEndpointURL( "runTests" ); endpointURL = endpointURL + "&namedCluster=" + nc.getName(); String result = doGet( endpointURL ); ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue( result, TestCategory[].class ); } catch ( Exception e ) { log.logError( e.getMessage() ); } } else { categories.put( HADOOP_FILE_SYSTEM, new TestCategory( "Hadoop file system" ) ); categories.put( ZOOKEEPER, new TestCategory( "Zookeeper connection" ) ); categories.put( MAP_REDUCE, new TestCategory( "Job tracker / resource manager" ) ); categories.put( OOZIE, new TestCategory( "Oozie host connection" ) ); categories.put( KAFKA, new TestCategory( "Kafka connection" ) ); if ( runtimeTestStatus != null && nc != null ) { for ( RuntimeTestModuleResults moduleResults : runtimeTestStatus.getModuleResults() ) { for ( RuntimeTestResult testResult : moduleResults.getRuntimeTestResults() ) { RuntimeTest runtimeTest = testResult.getRuntimeTest(); String name = runtimeTest.getName(); String status = getTestStatus( testResult.getOverallStatusEntry() ); String module = runtimeTest.getModule(); Category category = categories.get( module ); category.setCategoryActive( true ); if ( module.equals( HADOOP_FILE_SYSTEM ) ) { Test test = new Test( name ); test.setTestStatus( status ); test.setTestActive( true ); category.addTest( test ); configureHadoopFileSystemTestCategory( category, !StringUtil.isEmpty( nc.getHdfsHost() ), status ); } else if ( module.equals( OOZIE ) ) { configureTestCategories( category, !StringUtil.isEmpty( nc.getOozieUrl() ), status ); } else if ( module.equals( KAFKA ) ) { configureTestCategories( category, !StringUtil.isEmpty( nc.getKafkaBootstrapServers() ), status ); } else if ( module.equals( ZOOKEEPER ) ) { configureTestCategories( category, !StringUtil.isEmpty( nc.getZooKeeperHost() ), status ); } else if ( module.equals( MAP_REDUCE ) ) { configureTestCategories( category, !StringUtil.isEmpty( nc.getJobTrackerHost() ), status ); } } } } } return categories.values().toArray(); } private void configureHadoopFileSystemTestCategory( Category category, boolean isActive, String status ) { category.setCategoryActive( isActive ); if ( category.isCategoryActive() ) { String currentStatus = category.getCategoryStatus(); if ( status.equals( FAIL ) || ( status.equals( WARNING ) && !currentStatus.equals( FAIL ) ) || ( status.equals( PASS ) && StringUtil.isEmpty( currentStatus ) ) ) { category.setCategoryStatus( status ); } } } private void configureTestCategories( Category category, boolean isActive, String status ) { category.setCategoryActive( isActive ); if ( category.isCategoryActive() ) { category.setCategoryStatus( status ); } } private String getTestStatus( RuntimeTestResultEntry summary ) { String status = ""; switch ( summary.getSeverity() ) { case INFO: status = PASS; break; case SKIPPED: status = WARNING; break; case FATAL: status = FAIL; break; case ERROR: status = FAIL; break; case WARNING: status = FAIL; break; default: break; } return status; } public void onProgress( final RuntimeTestStatus clusterTestStatus ) { synchronized ( this ) { if ( clusterTestStatus.isDone() ) { runtimeTestStatus = clusterTestStatus; notifyAll(); } } } @VisibleForTesting void refreshTree() { if ( spoon != null && spoon.getShell() != null ) { spoon.getShell().getDisplay().asyncExec( () -> spoon.refreshTree( STRING_NAMED_CLUSTERS ) ); } } @VisibleForTesting String getNamedClusterConfigsRootDir() { return System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs"; } public static String doGet( String endpointURL ) { String result = null; try { HttpGet httpGet = new HttpGet( endpointURL ); HttpHost targetHost = new HttpHost( httpGet.getURI().getHost(), httpGet.getURI().getPort(), httpGet.getURI().getScheme() ); BasicCredentialsProvider credsProvider = new BasicCredentialsProvider(); AuthScope authScope = new AuthScope( targetHost ); String userName = NamedClusterHelper.getSecurityCredentials().get( NamedClusterHelper.USERNAME ); String password = NamedClusterHelper.getSecurityCredentials().get( NamedClusterHelper.PASSWORD ); credsProvider.setCredentials( authScope, new UsernamePasswordCredentials( userName, password ) ); AuthCache authCache = new BasicAuthCache(); authCache.put( targetHost, new BasicScheme() ); HttpClientContext context = HttpClientContext.create(); context.setCredentialsProvider( credsProvider ); context.setAuthCache( authCache ); try ( CloseableHttpClient httpClient = createHttpClientAcceptingTlsIfNeeded( endpointURL ) ) { try ( CloseableHttpResponse response = httpClient.execute( httpGet, context ) ) { HttpEntity entity = response.getEntity(); if ( entity != null ) { result = EntityUtils.toString( entity ); } } } } catch ( Exception e ) { log.logError( e.getMessage() ); } return result; } private boolean doMultipartHttpPost( String endpoint, ThinNameClusterModel thinNameClusterModel, File driverFile ) throws BadSiteFilesException, IOException { boolean result; String endpointURL = NamedClusterHelper.getEndpointURL( endpoint ); HttpPost httpPost = new HttpPost( endpointURL ); HttpHost targetHost = new HttpHost( httpPost.getURI().getHost(), httpPost.getURI().getPort(), httpPost.getURI().getScheme() ); BasicCredentialsProvider credsProvider = new BasicCredentialsProvider(); AuthScope authScope = new AuthScope( targetHost ); String userName = NamedClusterHelper.getSecurityCredentials().get( NamedClusterHelper.USERNAME ); String password = NamedClusterHelper.getSecurityCredentials().get( NamedClusterHelper.PASSWORD ); credsProvider.setCredentials( authScope, new UsernamePasswordCredentials( userName, password ) ); AuthCache authCache = new BasicAuthCache(); authCache.put( targetHost, new BasicScheme() ); HttpClientContext context = HttpClientContext.create(); context.setCredentialsProvider( credsProvider ); context.setAuthCache( authCache ); MultipartEntityBuilder builder = MultipartEntityBuilder.create(); if ( driverFile != null ) { builder.addBinaryBody( driverFile.getName(), driverFile, ContentType.APPLICATION_OCTET_STREAM, driverFile.getName() ); } else { Map siteFileSource = NamedClusterHelper.processSiteFiles( thinNameClusterModel, this ); for ( Map.Entry siteFile : siteFileSource.entrySet() ) { String name = siteFile.getValue().getFieldName(); if ( isValidConfigurationFile( name ) ) { if ( name.equals( KEYTAB_AUTH_FILE ) || name.equals( KEYTAB_IMPL_FILE ) || !name.endsWith( "-site.xml" ) ) { builder.addBinaryBody( name, siteFile.getValue().getCachedInputStream(), ContentType.APPLICATION_OCTET_STREAM, siteFile.getValue().getName() ); } else { builder.addBinaryBody( siteFile.getValue().getName(), siteFile.getValue().getCachedInputStream(), ContentType.APPLICATION_OCTET_STREAM, siteFile.getValue().getName() ); } } } } if ( thinNameClusterModel != null ) { ObjectMapper mapper = new ObjectMapper(); String json = mapper.writeValueAsString( thinNameClusterModel ); builder.addTextBody( "data", json, ContentType.APPLICATION_JSON ); } // Use an HTTP client that can accept TLS or load certificates when needed. try { CloseableHttpClient httpClient = createHttpClientAcceptingTlsIfNeeded( endpointURL ); try ( CloseableHttpClient client = httpClient ) { HttpEntity multipart = builder.build(); httpPost.setEntity( multipart ); try ( CloseableHttpResponse response = client.execute( httpPost, context ) ) { result = response.getStatusLine().getStatusCode() == 200; } } } catch ( Exception e ) { throw new IOException( e ); } return result; } private static CloseableHttpClient createHttpClientAcceptingTlsIfNeeded( String endpointURL ) throws Exception { if ( endpointURL != null && endpointURL.toLowerCase().startsWith( "https" ) ) { // Create an SSLContext that trusts all certificates (useful for self-signed certs). // Warning: this disables certificate validation and hostname verification — use only if you understand the risks. javax.net.ssl.TrustManager[] trustAll = new javax.net.ssl.TrustManager[] { new javax.net.ssl.X509TrustManager() { public java.security.cert.X509Certificate[] getAcceptedIssuers() { return new java.security.cert.X509Certificate[0]; } public void checkClientTrusted( java.security.cert.X509Certificate[] certs, String authType ) { } public void checkServerTrusted( java.security.cert.X509Certificate[] certs, String authType ) { } } }; javax.net.ssl.SSLContext sslContext = javax.net.ssl.SSLContext.getInstance( "TLS" ); sslContext.init( null, trustAll, new java.security.SecureRandom() ); org.apache.http.conn.ssl.SSLConnectionSocketFactory sslsf = new org.apache.http.conn.ssl.SSLConnectionSocketFactory( sslContext, org.apache.http.conn.ssl.NoopHostnameVerifier.INSTANCE ); org.apache.http.config.Registry registry = org.apache.http.config.RegistryBuilder.create() .register( "https", sslsf ) .register( "http", new org.apache.http.conn.socket.PlainConnectionSocketFactory() ) .build(); org.apache.http.impl.conn.PoolingHttpClientConnectionManager cm = new org.apache.http.impl.conn.PoolingHttpClientConnectionManager( registry ); return HttpClients.custom().setConnectionManager( cm ).build(); } else { return HttpClients.createDefault(); } } public boolean processDriverFile( String driverFile, HadoopClusterManager manager ) throws Exception { boolean result = false; if ( NamedClusterHelper.isConnectedToRepo() ) { File file = new File( driverFile ); if ( NamedClusterHelper.isValidUpload( file.getName(), NamedClusterHelper.FileType.DRIVER, manager ) ) { result = doMultipartHttpPost( "installDriver", null, file ); } } else { File file = new File( driverFile ); FileInputStream driverStream = new FileInputStream( file ); if ( NamedClusterHelper.isValidUpload( file.getName(), NamedClusterHelper.FileType.DRIVER, manager ) ) { String destination = Const.getShimDriverDeploymentLocation(); FileUtils.copyInputStreamToFile( driverStream, new File( destination + File.separator + file.getName() ) ); result = true; } } return result; } public void saveNewNamedCluster( ThinNameClusterModel thinNameClusterModel, String dialogState ) throws IOException, BadSiteFilesException { if ( NamedClusterHelper.isConnectedToRepo() ) { if ( dialogState.equals( "new-edit" ) ) { doMultipartHttpPost( "createNamedCluster", thinNameClusterModel, null ); } if ( dialogState.equals( "import" ) ) { doMultipartHttpPost( "importNamedCluster", thinNameClusterModel, null ); } } else { Map siteFiles = processSiteFiles( thinNameClusterModel, this ); if ( dialogState.equals( "new-edit" ) ) { createNamedCluster( thinNameClusterModel, siteFiles ); } if ( dialogState.equals( "import" ) ) { importNamedCluster( thinNameClusterModel, siteFiles ); } } } public void saveEditedNamedCluster( ThinNameClusterModel thinNameClusterModel, boolean isEditMode ) throws IOException, BadSiteFilesException { if ( NamedClusterHelper.isConnectedToRepo() ) { if( isEditMode ) { doMultipartHttpPost( "editNamedCluster", thinNameClusterModel, null ); } else { doMultipartHttpPost( "duplicateNamedCluster", thinNameClusterModel, null ); } } else { Map siteFiles = processSiteFiles( thinNameClusterModel, this ); editNamedCluster( thinNameClusterModel, isEditMode, siteFiles ); } } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/endpoints/Test.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints; public class Test { private String testName = ""; private String testStatus = ""; private boolean isTestActive = false; public Test() { } public Test( String name ) { setTestName( name ); } public String getTestName() { return testName; } public void setTestName( String testName ) { this.testName = testName; } public String getTestStatus() { return testStatus; } public void setTestStatus( String testStatus ) { this.testStatus = testStatus; } public boolean isTestActive() { return isTestActive; } public void setTestActive( boolean testActive ) { isTestActive = testActive; } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/endpoints/TestCategory.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints; import java.util.ArrayList; import java.util.List; public class TestCategory implements Category { private List tests = new ArrayList<>(); private String categoryName = ""; private String categoryStatus = ""; private boolean isCategoryActive = false; public TestCategory() { } public TestCategory( String name ) { setCategoryName( name ); } public List getTests() { return tests; } public String getCategoryName() { return categoryName; } public void setCategoryName( String categoryName ) { this.categoryName = categoryName; } public void setTests( List tests ) { this.tests = tests; } public String getCategoryStatus() { return categoryStatus; } public void setCategoryStatus( String categoryStatus ) { this.categoryStatus = categoryStatus; } public boolean isCategoryActive() { return isCategoryActive; } public void setCategoryActive( boolean categoryActive ) { isCategoryActive = categoryActive; } public void addTest( Test test ) { this.tests.add( test ); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/lifecycle/HadoopClusterLifecycleListener.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.lifecycle; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.tree.ThinHadoopClusterFolderProvider; import org.pentaho.di.core.annotations.LifecyclePlugin; import org.pentaho.di.core.lifecycle.LifeEventHandler; import org.pentaho.di.core.lifecycle.LifecycleException; import org.pentaho.di.core.lifecycle.LifecycleListener; import org.pentaho.di.ui.spoon.Spoon; import java.util.function.Supplier; @LifecyclePlugin( id = "HadoopClusterLifecycleListener" ) public class HadoopClusterLifecycleListener implements LifecycleListener { private Supplier spoonSupplier = Spoon::getInstance; @Override public void onStart( LifeEventHandler handler ) throws LifecycleException { Spoon spoon = spoonSupplier.get(); if ( spoon != null ) { spoon.getTreeManager().addTreeProvider( Spoon.STRING_CONFIGURATIONS, new ThinHadoopClusterFolderProvider( ) ); } } @Override public void onExit( LifeEventHandler handler ) throws LifecycleException { } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/model/ThinNameClusterModel.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.CachedFileItemStream; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import java.io.InputStreamReader; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.List; import java.util.Map; import java.util.stream.Collectors; public class ThinNameClusterModel { private static final LogChannelInterface log = KettleLogStore.getLogChannelInterfaceFactory().create( "ThinNameClusterModel" ); public static final String NAME_KEY = "name"; private String name; private String shimIdentifier; private String hdfsHost; private String hdfsPort; private String hdfsUsername; private String hdfsPassword; private String jobTrackerHost; private String jobTrackerPort; private String zooKeeperHost; private String zooKeeperPort; private String oozieUrl; private String kafkaBootstrapServers; private String oldName; private String securityType; private String kerberosSubType; private String kerberosAuthenticationUsername; private String kerberosAuthenticationPassword; private String kerberosImpersonationUsername; private String kerberosImpersonationPassword; private String gatewayUrl; private String gatewayUsername; private String gatewayPassword; private String keytabAuthFile; private String keytabImpFile; private List> siteFiles; public void setShimIdentifier(String shimIdentifier ) { this.shimIdentifier = shimIdentifier; } public String getShimIdentifier() { return shimIdentifier; } public String getHdfsHost() { return hdfsHost; } public void setHdfsHost( String hdfsHost ) { this.hdfsHost = hdfsHost; } public String getHdfsPort() { return hdfsPort; } public void setHdfsPort( String hdfsPort ) { this.hdfsPort = hdfsPort; } public String getHdfsUsername() { return hdfsUsername; } public void setHdfsUsername( String hdfsUsername ) { this.hdfsUsername = hdfsUsername; } public String getHdfsPassword() { return hdfsPassword; } public void setHdfsPassword( String hdfsPassword ) { this.hdfsPassword = hdfsPassword; } public String getJobTrackerHost() { return jobTrackerHost; } public void setJobTrackerHost( String jobTrackerHost ) { this.jobTrackerHost = jobTrackerHost; } public String getJobTrackerPort() { return jobTrackerPort; } public void setJobTrackerPort( String jobTrackerPort ) { this.jobTrackerPort = jobTrackerPort; } public String getZooKeeperHost() { return zooKeeperHost; } public void setZooKeeperHost( String zooKeeperHost ) { this.zooKeeperHost = zooKeeperHost; } public String getZooKeeperPort() { return zooKeeperPort; } public void setZooKeeperPort( String zooKeeperPort ) { this.zooKeeperPort = zooKeeperPort; } public String getKafkaBootstrapServers() { return kafkaBootstrapServers; } public void setKafkaBootstrapServers( String kafkaBootstrapServers ) { this.kafkaBootstrapServers = kafkaBootstrapServers; } public String getName() { return name; } public void setName( String name ) { this.name = name; } public String getOozieUrl() { return oozieUrl; } public void setOozieUrl( String oozieUrl ) { this.oozieUrl = oozieUrl; } public String getOldName() { return oldName; } public void setOldName( String oldName ) { this.oldName = oldName; } public String getSecurityType() { return securityType; } public void setSecurityType( String securityType ) { this.securityType = securityType; } public String getKerberosSubType() { return kerberosSubType; } public void setKerberosSubType( String kerberosSubType ) { this.kerberosSubType = kerberosSubType; } public String getKerberosAuthenticationUsername() { return kerberosAuthenticationUsername; } public void setKerberosAuthenticationUsername( String kerberosAuthenticationUsername ) { this.kerberosAuthenticationUsername = kerberosAuthenticationUsername; } public String getKerberosAuthenticationPassword() { return kerberosAuthenticationPassword; } public void setKerberosAuthenticationPassword( String kerberosAuthenticationPassword ) { this.kerberosAuthenticationPassword = kerberosAuthenticationPassword; } public String getKerberosImpersonationUsername() { return kerberosImpersonationUsername; } public void setKerberosImpersonationUsername( String kerberosImpersonationUsername ) { this.kerberosImpersonationUsername = kerberosImpersonationUsername; } public String getKerberosImpersonationPassword() { return kerberosImpersonationPassword; } public void setKerberosImpersonationPassword( String kerberosImpersonationPassword ) { this.kerberosImpersonationPassword = kerberosImpersonationPassword; } public String getGatewayUrl() { return gatewayUrl; } public void setGatewayUrl( String gatewayUrl ) { this.gatewayUrl = gatewayUrl; } public String getGatewayUsername() { return gatewayUsername; } public void setGatewayUsername( String gatewayUsername ) { this.gatewayUsername = gatewayUsername; } public String getGatewayPassword() { return gatewayPassword; } public void setGatewayPassword( String gatewayPassword ) { this.gatewayPassword = gatewayPassword; } public String getKeytabAuthFile() { return keytabAuthFile; } public void setKeytabAuthFile( String keytabAuthFile ) { this.keytabAuthFile = keytabAuthFile; } public String getKeytabImpFile() { return keytabImpFile; } public void setKeytabImpFile( String keytabImpFile ) { this.keytabImpFile = keytabImpFile; } public List> getSiteFiles() { return siteFiles; } public void setSiteFiles( List> siteFiles ) { this.siteFiles = siteFiles; } public static ThinNameClusterModel unmarshall( Map siteFilesSource ) { ThinNameClusterModel model = new ThinNameClusterModel(); try { final CachedFileItemStream fileItemStream = siteFilesSource.remove( "data" ); InputStreamReader inputStreamReader = new InputStreamReader( fileItemStream.getCachedInputStream() ); JSONParser parser = new JSONParser(); JSONObject json = (JSONObject) parser.parse( inputStreamReader ); model.setName( (String) json.get( "name" ) ); model.setShimIdentifier( (String) json.get( "shimIdentifier" ) ); model.setHdfsHost( (String) json.get( "hdfsHost" ) ); model.setHdfsPort( (String) json.get( "hdfsPort" ) ); model.setHdfsUsername( (String) json.get( "hdfsUsername" ) ); model.setHdfsPassword( (String) json.get( "hdfsPassword" ) ); model.setJobTrackerHost( (String) json.get( "jobTrackerHost" ) ); model.setJobTrackerPort( (String) json.get( "jobTrackerPort" ) ); model.setZooKeeperHost( (String) json.get( "zooKeeperHost" ) ); model.setZooKeeperPort( (String) json.get( "zooKeeperPort" ) ); model.setOozieUrl( (String) json.get( "oozieUrl" ) ); model.setKafkaBootstrapServers( (String) json.get( "kafkaBootstrapServers" ) ); model.setOldName( (String) json.get( "oldName" ) ); model.setSecurityType( (String) json.get( "securityType" ) ); model.setKerberosSubType( (String) json.get( "kerberosSubType" ) ); model.setKerberosAuthenticationUsername( (String) json.get( "kerberosAuthenticationUsername" ) ); model.setKerberosAuthenticationPassword( (String) json.get( "kerberosAuthenticationPassword" ) ); model.setKerberosImpersonationUsername( (String) json.get( "kerberosImpersonationUsername" ) ); model.setKerberosImpersonationPassword( (String) json.get( "kerberosImpersonationPassword" ) ); model.setGatewayUrl( (String) json.get( "gatewayUrl" ) ); model.setGatewayUsername( (String) json.get( "gatewayUsername" ) ); model.setGatewayPassword( (String) json.get( "gatewayPassword" ) ); model.setKeytabImpFile( (String) json.get( "keytabImpFile" ) ); model.setKeytabAuthFile( (String) json.get( "keytabAuthFile" ) ); model.setSiteFiles( siteFilesSource.keySet().stream() .map( name -> new SimpleImmutableEntry<>( NAME_KEY, name ) ) .collect( Collectors.toList() ) ); } catch ( Exception e ) { log.logError( e.getMessage() ); } return model; } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/tree/HadoopClusterPopupMenuExtension.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.tree; import com.google.common.collect.ImmutableMap; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.MenuItem; import org.eclipse.swt.widgets.Tree; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.HadoopClusterDelegate; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.HadoopClusterManager; import org.pentaho.di.core.extension.ExtensionPoint; import org.pentaho.di.core.extension.ExtensionPointInterface; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.Repository; import org.pentaho.di.ui.core.ConstUI; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.spoon.TreeSelection; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.Collections; import java.util.Map; import java.util.function.Supplier; import static org.pentaho.di.i18n.BaseMessages.getString; @ExtensionPoint( id = "HadoopClusterPopupMenuExtension", description = "Creates popup menus for Hadoop clusters", extensionPointId = "SpoonPopupMenuExtension" ) public class HadoopClusterPopupMenuExtension implements ExtensionPointInterface { private static final Class PKG = HadoopClusterPopupMenuExtension.class; public static final String IMPORT_STATE = "import"; public static final String NEW_EDIT_STATE = "new-edit"; public static final String TESTING_STATE = "testing"; public static final String DELETE_STATE = "delete"; private static final int RESULT_YES = 0; private Supplier spoonSupplier = Spoon::getInstance; private Menu rootMenu; private Menu itemMenu; private HadoopClusterDelegate hadoopClusterDelegate; private NamedClusterService namedClusterService; private String internalShim; private static final Logger logChannel = LogManager.getLogger( HadoopClusterPopupMenuExtension.class ); private NamedCluster lastNamedCluster; private RuntimeTester runtimeTester = RuntimeTesterImpl.getInstance(); private HadoopClusterManager hadoopClusterManager; public HadoopClusterPopupMenuExtension() { this.namedClusterService = BigDataServicesHelper.getNamedClusterService(); this.hadoopClusterDelegate = new HadoopClusterDelegate( this.namedClusterService, runtimeTester ); this.internalShim = ""; this.hadoopClusterManager = new HadoopClusterManager( spoonSupplier.get(), namedClusterService, spoonSupplier.get().getMetaStore(), internalShim ); } public HadoopClusterPopupMenuExtension( HadoopClusterDelegate hadoopClusterDelegate, NamedClusterService namedClusterService, String internalShim ) { this.hadoopClusterDelegate = hadoopClusterDelegate; this.namedClusterService = namedClusterService; this.internalShim = internalShim; } public void callExtensionPoint( LogChannelInterface log, Object extension ) { final Tree selectionTree = (Tree) extension; createNewPopupMenu( selectionTree ); } private void createNewPopupMenu( final Tree selectionTree ) { Menu popupMenu = null; TreeSelection[] objects = spoonSupplier.get().getTreeObjects( selectionTree ); if ( objects.length != 1 ) { return; } TreeSelection object = objects[ 0 ]; Object selection = object.getSelection(); if ( selection instanceof Class && selection.equals( NamedCluster.class ) ) { popupMenu = createRootPopupMenu( selectionTree ); } else if ( selection instanceof NamedCluster ) { popupMenu = createMaintPopupMenu( selectionTree, (NamedCluster) selection ); } if ( popupMenu != null ) { ConstUI.displayMenu( popupMenu, selectionTree ); } else { selectionTree.setMenu( null ); } } private Menu createRootPopupMenu( final Tree tree ) { if ( !showAdminFunctions() ) { return null; } if ( rootMenu == null ) { rootMenu = new Menu( tree ); createPopupMenuItem( rootMenu, getString( PKG, "HadoopClusterPopupMenuExtension.MenuItem.New" ), NEW_EDIT_STATE ); createPopupMenuItem( rootMenu, getString( PKG, "HadoopClusterPopupMenuExtension.MenuItem.Import" ), IMPORT_STATE ); } return rootMenu; } public Menu createMaintPopupMenu( final Tree selectionTree, NamedCluster namedCluster ) { // don't create another menu if the current one is for this namedCluster, // otherwise we can see extra pop-up menus. if ( itemMenu == null || !namedCluster.equals( this.lastNamedCluster ) ) { this.lastNamedCluster = namedCluster; itemMenu = new Menu( selectionTree ); try { String name = URLEncoder.encode( namedCluster.getName(), "UTF-8" ); if ( showAdminFunctions() ) { createPopupMenuItem( itemMenu, getString( PKG, "HadoopClusterPopupMenuExtension.MenuItem.Edit" ), NEW_EDIT_STATE, ImmutableMap.of( "name", name ) ); createPopupMenuItem( itemMenu, getString( PKG, "HadoopClusterPopupMenuExtension.MenuItem.Duplicate" ), NEW_EDIT_STATE, ImmutableMap.of( "name", name, "duplicateName", getString( PKG, "HadoopClusterPopupMenuExtension.Duplicate.Prefix" ) + name ) ); } createPopupMenuItem( itemMenu, getString( PKG, "HadoopClusterPopupMenuExtension.MenuItem.Test" ), TESTING_STATE, ImmutableMap.of( "name", name ) ); if ( showAdminFunctions() ) { createDeleteMenuItem( itemMenu, getString( PKG, "HadoopClusterPopupMenuExtension.MenuItem.Delete" ), name ); } } catch ( UnsupportedEncodingException e ) { logChannel.error( e.getMessage() ); } } return itemMenu; } private boolean showAdminFunctions() { Repository repo = spoonSupplier.get().getRepository(); if ( repo != null && repo.getUri().isPresent() ) { return repo.getSecurityProvider().getUserInfo().isAdmin(); } return true; } private void createPopupMenuItem( Menu menu, String menuItemLabel, String state ) { createPopupMenuItem( menu, menuItemLabel, state, Collections.emptyMap() ); } private void createPopupMenuItem( Menu menu, String menuItemLabel, String state, Map urlParams ) { MenuItem menuItem = new MenuItem( menu, SWT.NONE ); menuItem.setText( menuItemLabel ); menuItem.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent selectionEvent ) { hadoopClusterDelegate.openDialog( state, urlParams ); } } ); } private void createDeleteMenuItem( Menu menu, String menuItemLabel, String namedCluster ) { MenuItem menuItem = new MenuItem( menu, SWT.NONE ); menuItem.setText( menuItemLabel ); menuItem.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent selectionEvent ) { try { String nCluster = URLDecoder.decode( namedCluster, "UTF-8" ); String title = BaseMessages.getString( PKG, "PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.Title" ); String message = BaseMessages.getString( PKG, "PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.Message", nCluster ); String deleteButton = BaseMessages.getString( PKG, "PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.Delete" ); String doNotDeleteButton = BaseMessages.getString( PKG, "PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.DoNotDelete" ); MessageDialog dialog = new MessageDialog( spoonSupplier.get().getShell(), title, null, message, MessageDialog.WARNING, new String[] { deleteButton, doNotDeleteButton }, 0 ); int response = dialog.open(); if ( response != RESULT_YES ) { return; } hadoopClusterManager.deleteNamedCluster( spoonSupplier.get().getMetaStore(), nCluster, true ); } catch ( UnsupportedEncodingException e ) { logChannel.error( e.getMessage() ); } } } ); } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/tree/ThinHadoopClusterEditExtension.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.tree; import com.google.common.collect.ImmutableMap; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.dialog.HadoopClusterDelegate; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.extension.ExtensionPoint; import org.pentaho.di.core.extension.ExtensionPointInterface; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.ui.spoon.SelectionTreeExtension; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.Collections; @ExtensionPoint( id = "ThinHadoopClusterEditExtension", description = "Edits named cluster", extensionPointId = "SpoonViewTreeExtension" ) public class ThinHadoopClusterEditExtension implements ExtensionPointInterface { HadoopClusterDelegate hadoopClusterDelegate; private static final Logger logChannel = LogManager.getLogger( ThinHadoopClusterEditExtension.class ); public ThinHadoopClusterEditExtension() { this.hadoopClusterDelegate = new HadoopClusterDelegate( BigDataServicesHelper.getNamedClusterService(), RuntimeTesterImpl.getInstance() ); } public ThinHadoopClusterEditExtension( HadoopClusterDelegate hadoopClusterDelegate ) { this.hadoopClusterDelegate = hadoopClusterDelegate; } public void callExtensionPoint( LogChannelInterface log, Object extension ) throws KettleException { try { SelectionTreeExtension selectionTreeExtension = (SelectionTreeExtension) extension; Object selection = selectionTreeExtension.getSelection(); if ( selectionTreeExtension.getAction().equals( Spoon.EDIT_SELECTION_EXTENSION ) ) { if ( selection instanceof NamedCluster ) { NamedCluster namedCluster = (NamedCluster) selection; String name = URLEncoder.encode( namedCluster.getName(), "UTF-8" ); hadoopClusterDelegate.openDialog( "new-edit", ImmutableMap.of( "name", name ) ); } } else if ( selectionTreeExtension.getAction().equals( Spoon.CREATE_NEW_SELECTION_EXTENSION ) ) { if ( selection.equals( NamedCluster.class ) ) { hadoopClusterDelegate.openDialog("new-edit", Collections.emptyMap()); } } } catch ( UnsupportedEncodingException e ) { logChannel.error( e.getMessage() ); } } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/tree/ThinHadoopClusterFolderProvider.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.tree; import org.eclipse.swt.graphics.Image; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.di.base.AbstractMeta; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.core.ConstUI; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.widget.tree.TreeNode; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.spoon.tree.TreeFolderProvider; import org.pentaho.di.ui.util.SwtSvgImageUtil; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.exceptions.MetaStoreException; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; import java.util.Optional; public class ThinHadoopClusterFolderProvider extends TreeFolderProvider { public static final String STRING_NEW_HADOOP_CLUSTER = BaseMessages.getString( ThinHadoopClusterFolderProvider.class, "HadoopClusterTree.Title" ); private static Class PKG = Spoon.class; private Supplier spoonSupplier = Spoon::getInstance; private NamedClusterService namedClusterService; @Override public void refresh( Optional meta, TreeNode treeNode, String filter ) { if ( getNamedClusterService() != null ) { List namedClusters = null; List exceptionList = new ArrayList<>(); try { namedClusters = getNamedClusterService().list( Spoon.getInstance().getMetaStore(), exceptionList ); for ( MetaStoreException e : exceptionList ) { new ErrorDialog( Spoon.getInstance().getShell(), BaseMessages.getString( PKG, "Spoon.ErrorDialog.Title" ), BaseMessages.getString( PKG, "Spoon.ErrorDialog.ErrorFetchingFromRepo.NamedCluster" ), e ); } } catch ( MetaStoreException e ) { new ErrorDialog( Spoon.getInstance().getShell(), BaseMessages.getString( PKG, "Spoon.ErrorDialog.Title" ), BaseMessages.getString( PKG, "Spoon.ErrorDialog.ErrorFetchingFromRepo.NamedCluster" ), e ); return; } for ( NamedCluster namedCluster : namedClusters ) { if ( !filterMatch( namedCluster.getName(), filter ) ) { continue; } createTreeNode( treeNode, namedCluster.getName(), getHadoopClusterImage() ); } } } @Override public String getTitle() { return STRING_NEW_HADOOP_CLUSTER; } @Override public Class getType() { return NamedCluster.class; } private Image getHadoopClusterImage() { return SwtSvgImageUtil .getImage( spoonSupplier.get().getShell().getDisplay(), getClass().getClassLoader(), "images/hadoop_clusters.svg", ConstUI.ICON_SIZE, ConstUI.ICON_SIZE ); } private NamedClusterService getNamedClusterService() { if ( namedClusterService == null ) { namedClusterService = BigDataServicesHelper.getNamedClusterService(); } return namedClusterService; } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/tree/ThinHadoopClusterTreeDelegateExtension.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.tree; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.di.base.AbstractMeta; import org.pentaho.di.core.extension.ExtensionPoint; import org.pentaho.di.core.extension.ExtensionPointInterface; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.spoon.TreeSelection; import org.pentaho.di.ui.spoon.delegates.SpoonTreeDelegateExtension; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.exceptions.MetaStoreException; import java.util.List; import java.util.function.Supplier; @ExtensionPoint( id = "ThinHadoopClusterTreeDelegateExtension", description = "", extensionPointId = "SpoonTreeDelegateExtension" ) public class ThinHadoopClusterTreeDelegateExtension implements ExtensionPointInterface { private Supplier spoonSupplier = Spoon::getInstance; private NamedClusterService namedClusterService; public void callExtensionPoint( LogChannelInterface log, Object extension ) { SpoonTreeDelegateExtension treeDelExt = (SpoonTreeDelegateExtension) extension; int caseNumber = treeDelExt.getCaseNumber(); AbstractMeta meta = treeDelExt.getTransMeta(); String[] path = treeDelExt.getPath(); List objects = treeDelExt.getObjects(); TreeSelection object = null; switch ( caseNumber ) { case 2: if ( path[ 1 ].equals( ThinHadoopClusterFolderProvider.STRING_NEW_HADOOP_CLUSTER ) ) { object = new TreeSelection( path[ 1 ], NamedCluster.class, meta ); } break; case 3: if ( path[ 1 ].equals( ThinHadoopClusterFolderProvider.STRING_NEW_HADOOP_CLUSTER ) ) { try { String name = path[2]; NamedClusterService ncs = getNamedClusterService(); if ( ncs != null ) { NamedCluster nc = ncs.read( name, spoonSupplier.get().getMetaStore() ); object = new TreeSelection( path[2], nc, meta ); } } catch ( MetaStoreException e ) { // Ignore } } break; } if ( object != null ) { objects.add( object ); } } private NamedClusterService getNamedClusterService() { if ( namedClusterService == null ) { namedClusterService = BigDataServicesHelper.getNamedClusterService(); } return namedClusterService; } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/resources/kettle-password-encoder-plugins.xml ================================================ Kettle Password Encoder org.pentaho.support.encryption.KettleTwoWayPasswordEncoder ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/resources/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/messages/messages.properties ================================================ HadoopClusterTree.Title=Hadoop clusters ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/resources/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/messages/messages_en_US.properties ================================================ HadoopClusterTree.Title=Hadoop clusters ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/resources/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/pages/messages/messages.properties ================================================ HadoopClusterTree.Title=Hadoop clusters NamedClusterDialog.newCluster.title=New Cluster NamedClusterDialog.editCluster.title=Edit Cluster NamedClusterDialog.importCluster.title=Import Cluster NamedClusterDialog.newCluster=Hadoop Cluster NamedClusterDialog.clusterName=Cluster Name NamedClusterDialog.hdfs=HDFS NamedClusterDialog.hostname=Hostname NamedClusterDialog.port=Port NamedClusterDialog.username=Username NamedClusterDialog.password=Password NamedClusterDialog.jobTracker=Jobtracker NamedClusterDialog.activeDriver=Current Configured Driver: NamedClusterDialog.originalDriver=Original Configured Driver: NamedClusterDialog.noDriver=No driver configured NamedClusterDialog.mismatchedDriver=WARNING - Mismatched driver may have unpredictable results NamedClusterDialog.siteXmlFiles=Site XML files NamedClusterDialog.browseButton=Browse to add file(s) NamedClusterDialog.zooKeeper=ZooKeeper NamedClusterDialog.oozie=Oozie NamedClusterDialog.kafka=Kafka NamedClusterDialog.bootstrapServers=Boostrap servers NamedClusterDialog.file=File NamedClusterDialog.security=What is your security type? NamedClusterDialog.none=None NamedClusterDialog.kerberos=Kerberos NamedClusterDialog.knox=Knox NamedClusterDialog.securityMethod=Security method NamedClusterDialog.authenticationUsername=Authentication username NamedClusterDialog.impersonationUsername=Impersonation username NamedClusterDialog.authenticationKeytab=Authentication Keytab NamedClusterDialog.impersonationKeytab=Impersonation Keytab NamedClusterDialog.gatewayURL=Gateway URL NamedClusterDialog.gatewayUsername=Gateway Username NamedClusterDialog.gatewayPassword=Gateway Password NamedClusterDialog.browse=Browse NamedClusterDialog.remove=Remove NamedClusterDialog.removeSiteFile=Removes selected files NamedClusterDialog.siteFileAlert=Do you want to remove the selected site files? NamedClusterDialog.noFileSelected=No file selected NamedClusterDialog.question=What would you like to do? NamedClusterDialog.viewTestResults=View test results NamedClusterDialog.editCluster=Edit this cluster NamedClusterDialog.createNewCluster=Create a new cluster NamedClusterDialog.importNewCluster=Import a new Cluster NamedClusterDialog.clusterNameExists=A hadoop cluster with the provided name already exists. Please provide a different name. NamedClusterDialog.clusterOverwriteTitle=Overwrite Cluster Configuration? NamedClusterDialog.clusterOverwrite=A hadoop cluster with the name ''{0}'' already exists.\n\nDo you want to overwrite the existing configuration? NamedClusterDialog.testResults=Test results NamedClusterDialog.fail=We couldn't connect. NamedClusterDialog.fail.description=Your Hadoop cluster has been created, and it is working. However, we were unable to connect to some services. Please check your Hadoop cluster configuration files(s), and view the test results for further details. NamedClusterDialog.pass=Congratulations! NamedClusterDialog.description.pass=Your Hadoop cluster has been created, and all services appear to be up and running. NamedClusterDialog.import.fail=Import Failed. NamedClusterDialog.import.fail.description=Your Hadoop cluster was not created. Please check your Hadoop cluster configuration file(s). NamedClusterDialog.test.fail=Fail NamedClusterDialog.test.warning=Warning NamedClusterDialog.test.pass=Pass NamedClusterDialog.test.importFailed=ImportFailed NamedClusterDialog.test.hadoopFileSystemConnection=Hadoop File System Connection NamedClusterDialog.clear=Clear NamedClusterDialog.repositoryNotification=This cluster will be stored on the repository, changes may impact other users NamedClusterDialog.help=https://docs.pentaho.com/pdia/11.0-data-integration/extracting-data-into-pdi/connecting-to-a-hadoop-cluster-with-the-pdi-client-article ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/resources/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/dialog/wizard/pages/messages/messages_en_US.properties ================================================ HadoopClusterTree.Title=Hadoop clusters NamedClusterDialog.newCluster.title=New Cluster NamedClusterDialog.editCluster.title=Edit Cluster NamedClusterDialog.importCluster.title=Import Cluster NamedClusterDialog.newCluster=Hadoop Cluster NamedClusterDialog.clusterName=Cluster Name NamedClusterDialog.hdfs=HDFS NamedClusterDialog.hostname=Hostname NamedClusterDialog.port=Port NamedClusterDialog.username=Username NamedClusterDialog.password=Password NamedClusterDialog.jobTracker=Jobtracker NamedClusterDialog.activeDriver=Current Configured Driver: NamedClusterDialog.originalDriver=Original Configured Driver: NamedClusterDialog.noDriver=No driver configured NamedClusterDialog.mismatchedDriver=WARNING - Mismatched driver may have unpredictable results NamedClusterDialog.siteXmlFiles=Site XML files NamedClusterDialog.browseButton=Browse to add file(s) NamedClusterDialog.zooKeeper=ZooKeeper NamedClusterDialog.oozie=Oozie NamedClusterDialog.kafka=Kafka NamedClusterDialog.bootstrapServers=Boostrap servers NamedClusterDialog.file=File NamedClusterDialog.security=What is your security type? NamedClusterDialog.none=None NamedClusterDialog.kerberos=Kerberos NamedClusterDialog.knox=Knox NamedClusterDialog.securityMethod=Security method NamedClusterDialog.authenticationUsername=Authentication username NamedClusterDialog.impersonationUsername=Impersonation username NamedClusterDialog.authenticationKeytab=Authentication Keytab NamedClusterDialog.impersonationKeytab=Impersonation Keytab NamedClusterDialog.gatewayURL=Gateway URL NamedClusterDialog.gatewayUsername=Gateway Username NamedClusterDialog.gatewayPassword=Gateway Password NamedClusterDialog.browse=Browse NamedClusterDialog.remove=Remove NamedClusterDialog.removeSiteFile=Removes selected files NamedClusterDialog.siteFileAlert=Do you want to remove the selected site files? NamedClusterDialog.noFileSelected=No file selected NamedClusterDialog.question=What would you like to do? NamedClusterDialog.viewTestResults=View test results NamedClusterDialog.editCluster=Edit this cluster NamedClusterDialog.createNewCluster=Create a new cluster NamedClusterDialog.importNewCluster=Import a new Cluster NamedClusterDialog.clusterNameExists=A hadoop cluster with the provided name already exists. Please provide a different name. NamedClusterDialog.clusterOverwriteTitle=Overwrite Cluster Configuration? NamedClusterDialog.clusterOverwrite=A hadoop cluster with the name ''{0}'' already exists.\n\nDo you want to overwrite the existing configuration? NamedClusterDialog.testResults=Test results NamedClusterDialog.fail=We couldn't connect. NamedClusterDialog.fail.description=Your Hadoop cluster has been created, and it is working. However, we were unable to connect to some services. Please check your Hadoop cluster configuration files(s), and view the test results for further details. NamedClusterDialog.pass=Congratulations! NamedClusterDialog.description.pass=Your Hadoop cluster has been created, and all services appear to be up and running. NamedClusterDialog.import.fail=Import Failed. NamedClusterDialog.import.fail.description=Your Hadoop cluster was not created. Please check your Hadoop cluster configuration file(s). NamedClusterDialog.test.fail=Fail NamedClusterDialog.test.warning=Warning NamedClusterDialog.test.pass=Pass NamedClusterDialog.test.importFailed=ImportFailed NamedClusterDialog.test.hadoopFileSystemConnection=Hadoop File System Connection NamedClusterDialog.clear=Clear NamedClusterDialog.repositoryNotification=This cluster will be stored on the repository, changes may impact other users NamedClusterDialog.help=https://docs.pentaho.com/pdia/11.0-data-integration/extracting-data-into-pdi/connecting-to-a-hadoop-cluster-with-the-pdi-client-article ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/resources/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/messages/messages_en_US.properties ================================================ HadoopClusterTree.Title=Hadoop clusters HadoopClusterPopupMenuExtension.MenuItem.New=New cluster HadoopClusterPopupMenuExtension.MenuItem.Import=Import cluster HadoopClusterPopupMenuExtension.MenuItem.Edit=Edit cluster HadoopClusterPopupMenuExtension.MenuItem.Delete=Delete cluster PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.Message=Are you sure you want to delete your Hadoop Cluster {0}? This cannot be undone! PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.Title=Delete Hadoop Cluster PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.Delete=Yes, Delete PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.DoNotDelete=No ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/main/resources/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/tree/messages/messages_en_US.properties ================================================ HadoopClusterTree.Title=Hadoop clusters HadoopClusterPopupMenuExtension.MenuItem.New=New cluster HadoopClusterPopupMenuExtension.MenuItem.Import=Import cluster HadoopClusterPopupMenuExtension.MenuItem.Edit=Edit cluster HadoopClusterPopupMenuExtension.MenuItem.Delete=Delete cluster HadoopClusterPopupMenuExtension.MenuItem.Duplicate=Duplicate cluster HadoopClusterPopupMenuExtension.MenuItem.Test=Test cluster HadoopClusterPopupMenuExtension.Duplicate.Prefix=(copy of)\ PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.Message=Are you sure you want to delete your Hadoop Cluster {0}? This cannot be undone! PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.Title=Delete Hadoop Cluster PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.Delete=Yes, Delete PopupMenuFactory.NAMEDCLUSTERS.DeleteNamedClusterAsk.DoNotDelete=No ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/java/org/pentaho/big/data/kettle/plugins/hadoopcluster/ui/endpoints/HadoopClusterManagerTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints; import com.google.common.collect.ImmutableList; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.fileupload2.core.FileItemInput; import org.apache.commons.io.FileUtils; import org.json.simple.JSONObject; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.encryption.TwoWayPasswordEncoderPluginType; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.logging.LogChannelInterfaceFactory; import org.pentaho.di.core.osgi.api.NamedClusterSiteFile; import org.pentaho.di.core.osgi.impl.NamedClusterSiteFileImpl; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.core.ShimIdentifierInterface; import org.pentaho.metastore.stores.delegate.DelegatingMetaStore; import org.pentaho.runtime.test.RuntimeTestStatus; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.HadoopClusterManager.PLACEHOLDER_VALUE; import static org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.model.ThinNameClusterModel.NAME_KEY; @RunWith( MockitoJUnitRunner.class ) public class HadoopClusterManagerTest { private static final String CORE_SITE = "core-site.xml"; private static final String HIVE_SITE = "hive-site.xml"; private static final String OOZIE_SITE = "oozie-site.xml"; private static final String YARN_SITE = "yarn-site.xml"; public static final String MAPR_SHIM_VENDOR = "Map-R"; public static final String MAPRFS_SCHEME = "maprfs"; @Mock private Spoon spoon; @Mock private LogChannelInterfaceFactory logChannelFactory; @Mock private LogChannelInterface logChannel; @Mock private NamedClusterService namedClusterService; @Mock private DelegatingMetaStore metaStore; @Mock private NamedCluster namedCluster; @Mock private NamedCluster knoxNamedCluster; @Mock( lenient = true ) private ShimIdentifierInterface cdhShim; @Mock( lenient = true ) private ShimIdentifierInterface internalShim; @Mock( lenient = true ) private ShimIdentifierInterface maprShim; @Captor ArgumentCaptor siteFileCaptor; private String ncTestName = "ncTest"; private String knoxNC = "knoxNC"; private HadoopClusterManager hadoopClusterManager; @Before public void setup() throws Exception { KettleLogStore.setLogChannelInterfaceFactory( logChannelFactory ); when( logChannelFactory.create( any() ) ).thenReturn( logChannel ); PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() ); PluginRegistry.init( false ); Encr.init( "Kettle" ); if ( getShimTestDir().exists() ) { FileUtils.deleteDirectory( getShimTestDir() ); } when( cdhShim.getId() ).thenReturn( "cdh514" ); when( cdhShim.getVendor() ).thenReturn( "Cloudera" ); when( internalShim.getId() ).thenReturn( "apache" ); when( internalShim.getVendor() ).thenReturn( "Apache" ); when( maprShim.getId() ).thenReturn( "mapr" ); when( maprShim.getVendor() ).thenReturn( MAPR_SHIM_VENDOR ); when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); when( namedCluster.getName() ).thenReturn( ncTestName ); when( namedClusterService.getNamedClusterByName( ncTestName, metaStore ) ).thenReturn( namedCluster ); when( namedCluster.getShimIdentifier() ).thenReturn( "cdh514" ); when( namedClusterService.getNamedClusterByName( knoxNC, metaStore ) ).thenReturn( knoxNamedCluster ); when( knoxNamedCluster.isUseGateway() ).thenReturn( true ); when( knoxNamedCluster.getGatewayPassword() ).thenReturn( "password" ); when( knoxNamedCluster.getGatewayUrl() ).thenReturn( "http://localhost:8008" ); when( knoxNamedCluster.getGatewayUsername() ).thenReturn( "username" ); hadoopClusterManager = new HadoopClusterManager( spoon, namedClusterService, metaStore, "apache" ); when( namedClusterService.list( metaStore ) ).thenReturn( ImmutableList.of( namedCluster ) ); } @Test public void testSecuredImportNamedCluster() throws Exception { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); Map cachedFileItemStreamMap = getFiles( "src/test/resources/secured" ); File keytabFileDirectory = new File( "src/test/resources/keytab" ); Map keytabFileItems = getFiles( keytabFileDirectory.getPath(), "keytabAuthFile" ); cachedFileItemStreamMap.putAll( keytabFileItems ); JSONObject result = hadoopClusterManager.importNamedCluster( model, cachedFileItemStreamMap ); assertEquals( ncTestName, result.get( "namedCluster" ) ); verify( namedCluster, times(3) ).addSiteFile( siteFileCaptor.capture() ); assertSiteFields( cachedFileItemStreamMap, siteFileCaptor.getAllValues(), new String[] { "keytabAuthFile" } ); assertTrue( new File( getShimTestDir(), "test.keytab" ).exists() ); } @Test public void testUnsecuredImportNamedCluster() { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); Map cachedFileItemStreamMap = getFiles( "src/test/resources/unsecured" ); JSONObject result = hadoopClusterManager.importNamedCluster( model, cachedFileItemStreamMap ); assertEquals( ncTestName, result.get( "namedCluster" ) ); verify( namedCluster, times(3) ).addSiteFile( siteFileCaptor.capture() ); assertSiteFields( cachedFileItemStreamMap, siteFileCaptor.getAllValues(), new String[] { "oozie-default.xml"} ); } private void assertSiteFields( Map streamMap, List siteFiles, String[] missingFiles ) { assertEquals( siteFiles.size(), streamMap.size() - missingFiles.length ); for ( int i = 0; i < siteFiles.size(); i++ ) { NamedClusterSiteFile namedClusterSiteFile = siteFiles.get( i ); for ( String missingFile : missingFiles ) { assertNotEquals( namedClusterSiteFile.getSiteFileName(), missingFile ); } assert ( streamMap.containsKey( namedClusterSiteFile.getSiteFileName() ) ); CachedFileItemStream cachedFileItemStream = streamMap.get( namedClusterSiteFile.getSiteFileName() ); assertEquals( cachedFileItemStream.getLastModified(), namedClusterSiteFile.getSourceFileModificationTime() ); } } @Test public void testMissingInfoImportNamedCluster() { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); Map cachedFileItemStreamMap = getFiles( "src/test/resources/missing-info" ); JSONObject result = hadoopClusterManager.importNamedCluster( model, cachedFileItemStreamMap ); assertEquals( ncTestName, result.get( "namedCluster" ) ); verify( namedCluster, times(3) ).addSiteFile( siteFileCaptor.capture() ); assertSiteFields( cachedFileItemStreamMap, siteFileCaptor.getAllValues(), new String[] { "oozie-default.xml"} ); ThinNameClusterModel thinNameClusterModel = hadoopClusterManager.getNamedCluster( ncTestName ); assertTrue( StringUtil.isEmpty( thinNameClusterModel.getHdfsHost() ) ); assertTrue( StringUtil.isEmpty( thinNameClusterModel.getHdfsPort() ) ); assertTrue( StringUtil.isEmpty( thinNameClusterModel.getJobTrackerPort() ) ); } @Test public void testSiteXMLParsingImportNamedCluster() { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); JSONObject result = hadoopClusterManager.importNamedCluster( model, getFiles( "src/test/resources/unsecured" ) ); assertEquals( ncTestName, result.get( "namedCluster" ) ); verify( namedCluster ).setJobTrackerHost( "svqxbdcn6cdh514un3.pentahoqa.com" ); verify( namedCluster ).setJobTrackerPort( "8032" ); verify( namedCluster ).setZooKeeperHost( "svqxbdcn6cdh514un1.pentahoqa.com,svqxbdcn6cdh514un5.pentahoqa.com," + "svqxbdcn6cdh514un4.pentahoqa.com,svqxbdcn6cdh514un2.pentahoqa.com,svqxbdcn6cdh514un3.pentahoqa.com" ); } @Test public void testSiteXMLParsingImportDataprocNamedCluster() { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); JSONObject result = hadoopClusterManager.importNamedCluster( model, getFiles( "src/test/resources/dataproc" ) ); assertEquals( ncTestName, result.get( "namedCluster" ) ); verify( namedCluster ).setJobTrackerHost( "cluster-ec0a-m-0" ); verify( namedCluster ).setJobTrackerPort( "" ); verify( namedCluster ).setZooKeeperHost( "cluster-ec0a-m-0,cluster-ec0a-m-1,cluster-ec0a-m-2" ); } @Test public void testCreateNamedCluster() { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); JSONObject result = hadoopClusterManager.createNamedCluster( model, getFiles( "/" ) ); assertEquals( ncTestName, result.get( "namedCluster" ) ); verify( namedCluster, never() ).setStorageScheme( any( String.class ) ); } @Test public void testOverwriteNamedClusterCaseInsensitive() { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( "NCTESTName" ); hadoopClusterManager.createNamedCluster( model, getFiles( "/" ) ); model = new ThinNameClusterModel(); model.setName( ncTestName ); JSONObject result = hadoopClusterManager.createNamedCluster( model, getFiles( "/" ) ); assertEquals( ncTestName, result.get( "namedCluster" ) ); String shimTestDir = System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs" + File.separator + ncTestName; assertTrue( new File( shimTestDir ).exists() ); } @Test public void testEditNamedCluster() throws Exception { ThinNameClusterModel model = new ThinNameClusterModel(); model.setKerberosSubType( "" ); model.setName( ncTestName ); model.setOldName( ncTestName ); JSONObject result = hadoopClusterManager.editNamedCluster( model, true, getFiles( "/" ) ); verify( namedClusterService ).update( any(), any() ); assertEquals( ncTestName, result.get( "namedCluster" ) ); //Not really testing anything here since result is mocked } @Test public void testEditNamedClusterNameChange() throws Exception{ String newNcName = "newNcName"; ThinNameClusterModel model = new ThinNameClusterModel(); model.setKerberosSubType( "" ); model.setName( newNcName ); model.setOldName( ncTestName ); JSONObject result = hadoopClusterManager.editNamedCluster( model, true, getFiles( "/" ) ); verify( namedCluster ).setName( newNcName ); verify( namedClusterService ).create( any(), any() ); } @Test public void testEditRemoveSiteFileNotInModel() { //Create thin model with reference to only 1 site file (mimics 3 being removed in the UI) ThinNameClusterModel model = new ThinNameClusterModel(); model.setKerberosSubType( "" ); model.setName( ncTestName ); model.setOldName( ncTestName ); List> siteFiles = new ArrayList<>(); SimpleImmutableEntry onlySiteFile = new SimpleImmutableEntry<>( NAME_KEY, CORE_SITE ); siteFiles.add( onlySiteFile ); model.setSiteFiles( siteFiles ); //Initialize named cluster mock with 4 site files List namedClusterSiteFiles = new ArrayList<>(); NamedClusterSiteFileImpl sf1 = new NamedClusterSiteFileImpl(); sf1.setSiteFileName( CORE_SITE ); namedClusterSiteFiles.add( sf1 ); NamedClusterSiteFileImpl sf2 = new NamedClusterSiteFileImpl(); sf2.setSiteFileName( HIVE_SITE ); namedClusterSiteFiles.add( sf2 ); NamedClusterSiteFileImpl sf3 = new NamedClusterSiteFileImpl(); sf3.setSiteFileName( OOZIE_SITE ); namedClusterSiteFiles.add( sf3 ); NamedClusterSiteFileImpl sf4 = new NamedClusterSiteFileImpl(); sf4.setSiteFileName( YARN_SITE ); namedClusterSiteFiles.add( sf4 ); //Implement getSiteFiles() for the namedCluster mock when( namedCluster.getSiteFiles() ).thenReturn( namedClusterSiteFiles ); //Get the files, but remove all but 1 to match the thin model final Map filesFromThinClient = getFiles( "src/test/resources/unsecured" ); filesFromThinClient.remove( HIVE_SITE ); filesFromThinClient.remove( OOZIE_SITE ); filesFromThinClient.remove( YARN_SITE ); //Call the edit method JSONObject result = hadoopClusterManager.editNamedCluster( model, true, filesFromThinClient ); //Capture setSiteFiles() for the namedCluster mock ArgumentCaptor> siteFileCaptor = ArgumentCaptor.forClass( (Class) List.class ); verify( namedCluster ).setSiteFiles( siteFileCaptor.capture() ); //Assert that setSiteFiles() siteFileCaptor argument was set to have only 1 site file, matching the model assertEquals( 1, siteFileCaptor.getValue().size() ); //Assert that the edit method ran without error and returned the name of the cluster assertEquals( ncTestName, result.get( "namedCluster" ) ); } @Test public void testKeepFilesNotChangedInThinClient() throws IOException { //Create thin model ThinNameClusterModel model = new ThinNameClusterModel(); model.setKerberosSubType( "" ); model.setName( ncTestName ); model.setOldName( ncTestName ); //Thin model references 4 site files List> siteFiles = new ArrayList<>(); SimpleImmutableEntry thinSiteFile1 = new SimpleImmutableEntry<>( "name", CORE_SITE ); siteFiles.add( thinSiteFile1 ); SimpleImmutableEntry thinSiteFile2 = new SimpleImmutableEntry<>( "name", HIVE_SITE ); siteFiles.add( thinSiteFile2 ); SimpleImmutableEntry thinSiteFile3 = new SimpleImmutableEntry<>( "name", OOZIE_SITE ); siteFiles.add( thinSiteFile3 ); SimpleImmutableEntry thinSiteFile4 = new SimpleImmutableEntry<>( "name", YARN_SITE ); siteFiles.add( thinSiteFile4 ); model.setSiteFiles( siteFiles ); //Initialize named cluster mock with 4 site files List namedClusterSiteFiles = new ArrayList<>(); NamedClusterSiteFileImpl sf1 = new NamedClusterSiteFileImpl(); sf1.setSiteFileName( CORE_SITE ); String originalContent = "originalContent"; sf1.setSiteFileContents( originalContent ); namedClusterSiteFiles.add( sf1 ); NamedClusterSiteFileImpl sf2 = new NamedClusterSiteFileImpl(); sf2.setSiteFileName( HIVE_SITE ); sf2.setSiteFileContents( originalContent ); namedClusterSiteFiles.add( sf2 ); NamedClusterSiteFileImpl sf3 = new NamedClusterSiteFileImpl(); sf3.setSiteFileName( OOZIE_SITE ); String originalOozieFileContent = "orig ofc"; sf3.setSiteFileContents( originalOozieFileContent ); namedClusterSiteFiles.add( sf3 ); NamedClusterSiteFileImpl sf4 = new NamedClusterSiteFileImpl(); sf4.setSiteFileName( YARN_SITE ); String originalYarnFileContent = "orig yfc"; sf4.setSiteFileContents( originalYarnFileContent ); namedClusterSiteFiles.add( sf4 ); //Implement getSiteFiles() for the namedCluster mock when( namedCluster.getSiteFiles() ).thenReturn( namedClusterSiteFiles ); //Modify the content of the core and hive site files but leave the other two the same by using the placeholder value //The placeholder value represents a file in the thin client that was not changed. final Map filesFromThinClient = new HashMap<>(); String modifiedCoreFileContent = "new cfc"; CachedFileItemStream modifiedCoreSiteFile = new CachedFileItemStream( new ByteArrayInputStream( modifiedCoreFileContent.getBytes() ), CORE_SITE, CORE_SITE ); filesFromThinClient.put( CORE_SITE, modifiedCoreSiteFile ); String modifiedHiveFileContent = "new hfc"; CachedFileItemStream modifiedHiveSiteFile = new CachedFileItemStream( new ByteArrayInputStream( modifiedHiveFileContent.getBytes() ), HIVE_SITE, HIVE_SITE ); filesFromThinClient.put( HIVE_SITE, modifiedHiveSiteFile ); CachedFileItemStream unmodifiedOozieSiteFile = new CachedFileItemStream( new ByteArrayInputStream( PLACEHOLDER_VALUE.getBytes() ), OOZIE_SITE, OOZIE_SITE ); filesFromThinClient.put( OOZIE_SITE, unmodifiedOozieSiteFile ); CachedFileItemStream unmodifiedYarnSiteFile = new CachedFileItemStream( new ByteArrayInputStream( PLACEHOLDER_VALUE.getBytes() ), YARN_SITE, YARN_SITE ); filesFromThinClient.put( YARN_SITE, unmodifiedYarnSiteFile ); //Call the edit method JSONObject result = hadoopClusterManager.editNamedCluster( model, true, filesFromThinClient ); //Assert that the edit method ran without error and returned the name of the cluster assertEquals( ncTestName, result.get( "namedCluster" ) ); //Assert that Core and hive site files were modified, but that oozie and yarn maintained original content for ( NamedClusterSiteFile ncsf : namedClusterSiteFiles ) { if ( ncsf.getSiteFileName().equals( CORE_SITE ) ) { assertEquals( modifiedCoreFileContent, ncsf.getSiteFileContents() ); } else if ( ncsf.getSiteFileName().equals( HIVE_SITE ) ) { assertEquals( modifiedHiveFileContent, ncsf.getSiteFileContents() ); } else if ( ncsf.getSiteFileName().equals( OOZIE_SITE ) ) { assertEquals( originalOozieFileContent, ncsf.getSiteFileContents() ); } else if ( ncsf.getSiteFileName().equals( YARN_SITE ) ) { assertEquals( originalYarnFileContent, ncsf.getSiteFileContents() ); } else { fail(); } } } @Test public void testFailNamedCluster() { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); JSONObject result = hadoopClusterManager.importNamedCluster( model, getFiles( "src/test/resources/bad" ) ); assertEquals( "", result.get( "namedCluster" ) ); } @Test public void testInstallDriver() throws IOException { System.getProperties() .setProperty( "SHIM_DRIVER_DEPLOYMENT_LOCATION", "src/test/resources/driver-destination" ); File driverFile = new File( "src/test/resources/driver-source/driver.kar" ); FileItemInput fileItemStream = mock( FileItemInput.class ); when( fileItemStream.getFieldName() ).thenReturn( driverFile.getName() ); when( fileItemStream.getInputStream() ).thenReturn( new FileInputStream( driverFile ) ); JSONObject response = hadoopClusterManager.installDriver( fileItemStream ); boolean isSuccess = (boolean) response.get( "installed" ); if ( isSuccess ) { File driver = new File( "src/test/resources/driver-destination/driver.kar" ); assertTrue( driver.exists() ); } } @Test public void testRunTests() { RuntimeTestStatus runtimeTestStatus = mock( RuntimeTestStatus.class ); when( namedClusterService.getNamedClusterByName( ncTestName, this.metaStore ) ).thenReturn( namedCluster ); when( runtimeTestStatus.isDone() ).thenReturn( true ); hadoopClusterManager.onProgress( runtimeTestStatus ); Object[] categories = (Object[]) hadoopClusterManager.runTests( null, ncTestName ); for ( Object category : categories ) { TestCategory testCategory = (TestCategory) category; String categoryName = testCategory.getCategoryName(); boolean isCategoryNameValid = false; if ( categoryName.equals( "Hadoop file system" ) || categoryName.equals( "Oozie host connection" ) || categoryName .equals( "Kafka connection" ) || categoryName.equals( "Zookeeper connection" ) || categoryName .equals( "Job tracker / resource manager" ) ) { isCategoryNameValid = true; } assertTrue( isCategoryNameValid ); assertFalse( testCategory.isCategoryActive() ); List tests = testCategory.getTests(); for ( org.pentaho.big.data.kettle.plugins.hadoopcluster.ui.endpoints.Test test : tests ) { assertEquals( "Warning", test.getTestStatus() ); } } } @Test public void testNamedClusterKnoxSecurity() { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( knoxNC ); model.setSecurityType( "Knox" ); model.setGatewayUsername( "username" ); model.setGatewayUrl( "http://localhost:8008" ); model.setGatewayPassword( "password" ); hadoopClusterManager.createNamedCluster( model, getFiles( "/" ) ); NamedCluster namedCluster = namedClusterService.getNamedClusterByName( knoxNC, metaStore ); assertEquals( true, namedCluster.isUseGateway() ); assertEquals( "password", namedCluster.getGatewayPassword() ); assertEquals( "http://localhost:8008", namedCluster.getGatewayUrl() ); assertEquals( "username", namedCluster.getGatewayUsername() ); } @Test public void testNamedClusterKerberosPasswordSecurity() throws ConfigurationException { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); model.setSecurityType( "Kerberos" ); model.setKerberosSubType( "Password" ); model.setKerberosAuthenticationUsername( "username" ); model.setKerberosAuthenticationPassword( "password" ); model.setKerberosImpersonationUsername( "impersonationusername" ); model.setKerberosImpersonationPassword( "impersonationpassword" ); hadoopClusterManager.createNamedCluster( model, getFiles( "/" ) ); String configFile = System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs" + File.separator + "ncTest" + File.separator + "config.properties"; PropertiesConfiguration config = new PropertiesConfiguration( new File( configFile ) ); assertEquals( "username", config.getProperty( "pentaho.authentication.default.kerberos.principal" ) ); assertEquals( Encr.encryptPasswordIfNotUsingVariables( "password" ), config.getProperty( "pentaho.authentication.default.kerberos.password" ) ); assertEquals( "impersonationusername", config.getProperty( "pentaho.authentication.default.mapping.server.credentials.kerberos.principal" ) ); assertEquals( Encr.encryptPasswordIfNotUsingVariables( "impersonationpassword" ), config.getProperty( "pentaho.authentication.default.mapping.server.credentials.kerberos.password" ) ); assertEquals( "simple", config.getProperty( "pentaho.authentication.default.mapping.impersonation.type" ) ); ThinNameClusterModel retrievingModel = hadoopClusterManager.getNamedCluster( ncTestName ); assertEquals( "Kerberos", retrievingModel.getSecurityType() ); assertEquals( "Password", retrievingModel.getKerberosSubType() ); assertEquals( "username", retrievingModel.getKerberosAuthenticationUsername() ); assertEquals( "Encrypted 2be98afc86aa7f2e4bb18bd63c99dbdde", retrievingModel.getKerberosAuthenticationPassword() ); assertEquals( "impersonationusername", retrievingModel.getKerberosImpersonationUsername() ); assertEquals( "Encrypted 696d706570cdf7c1a91ece9d8abb18bd63c99dbdde", retrievingModel.getKerberosImpersonationPassword() ); } @Test public void testNamedClusterKerberosKeytabSecurity() throws ConfigurationException { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); model.setSecurityType( "Kerberos" ); model.setKerberosSubType( "Keytab" ); File keytabFileDirectory = new File( "src/test/resources/keytab" ); Map keytabFileItems = getFiles( keytabFileDirectory.getPath(), "keytabAuthFile" ); hadoopClusterManager.createNamedCluster( model, keytabFileItems, "src/test/resources/keytab/test.keytab", "" ); String configFile = System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs" + File.separator + "ncTest" + File.separator + "config.properties"; PropertiesConfiguration config = new PropertiesConfiguration( new File( configFile ) ); assertEquals( System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs" + File.separator + "ncTest" + File.separator + "test.keytab", config.getProperty( "pentaho.authentication.default.kerberos.keytabLocation" ) ); assertEquals( "", config.getProperty( "pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation" ) ); assertEquals( "simple", config.getProperty( "pentaho.authentication.default.mapping.impersonation.type" ) ); ThinNameClusterModel retrievingModel = hadoopClusterManager.getNamedCluster( ncTestName ); assertEquals( "Kerberos", retrievingModel.getSecurityType() ); assertEquals( "Keytab", retrievingModel.getKerberosSubType() ); } @Test public void testGetNamedCluster() throws ConfigurationException { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); JSONObject result = hadoopClusterManager.createNamedCluster( model, getFiles( "/" ) ); assertEquals( ncTestName, result.get( "namedCluster" ) ); ThinNameClusterModel nc = hadoopClusterManager.getNamedCluster( "NCTEST" ); assertEquals( "ncTest", nc.getName() ); } @Test public void testValidSiteFile() { assertFalse( hadoopClusterManager.isValidConfigurationFile( "file" ) ); assertTrue( hadoopClusterManager.isValidConfigurationFile( CORE_SITE ) ); assertTrue( hadoopClusterManager.isValidConfigurationFile( "config.properties" ) ); } @Test public void allowsNullSpoon() { hadoopClusterManager = new HadoopClusterManager( null, namedClusterService, metaStore, "apache" ); hadoopClusterManager.refreshTree(); assertTrue( hadoopClusterManager.getNamedClusterConfigsRootDir().endsWith( "Configs" ) ); } @Test public void testResetSecurity() throws ConfigurationException { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); model.setSecurityType( "None" ); model.setKerberosSubType( "Password" ); model.setKerberosAuthenticationUsername( "username" ); model.setKerberosAuthenticationPassword( "password" ); model.setKerberosImpersonationUsername( "impersonationusername" ); model.setKerberosImpersonationPassword( "impersonationpassword" ); hadoopClusterManager.createNamedCluster( model, getFiles( "/" ) ); String configFile = System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs" + File.separator + "ncTest" + File.separator + "config.properties"; PropertiesConfiguration config = new PropertiesConfiguration( new File( configFile ) ); assertEquals( "", config.getProperty( "pentaho.authentication.default.kerberos.principal" ) ); assertEquals( "", config.getProperty( "pentaho.authentication.default.kerberos.password" ) ); assertEquals( "", config.getProperty( "pentaho.authentication.default.mapping.server.credentials.kerberos.principal" ) ); assertEquals( "", config.getProperty( "pentaho.authentication.default.mapping.server.credentials.kerberos.password" ) ); assertEquals( "disabled", config.getProperty( "pentaho.authentication.default.mapping.impersonation.type" ) ); assertEquals( "", config.getProperty( "pentaho.authentication.default.kerberos.keytabLocation" ) ); assertEquals( "", config.getProperty( "pentaho.authentication.default.mapping.server.credentials.kerberos.keytabLocation" ) ); } @Test public void testNamedClusterKerberosPasswordSecurityWithBlankPassword() throws ConfigurationException { ThinNameClusterModel model = new ThinNameClusterModel(); model.setName( ncTestName ); model.setSecurityType( "Kerberos" ); model.setKerberosSubType( "Password" ); model.setKerberosAuthenticationUsername( "username" ); model.setKerberosAuthenticationPassword( "password" ); model.setKerberosImpersonationUsername( "" ); model.setKerberosImpersonationPassword( "" ); hadoopClusterManager.createNamedCluster( model, getFiles( "/" ) ); String configFile = System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs" + File.separator + "ncTest" + File.separator + "config.properties"; PropertiesConfiguration config = new PropertiesConfiguration( new File( configFile ) ); assertEquals( "username", config.getProperty( "pentaho.authentication.default.kerberos.principal" ) ); assertEquals( Encr.encryptPasswordIfNotUsingVariables( "password" ), config.getProperty( "pentaho.authentication.default.kerberos.password" ) ); assertEquals( "", config.getProperty( "pentaho.authentication.default.mapping.server.credentials.kerberos.principal" ) ); assertEquals( "", config.getProperty( "pentaho.authentication.default.mapping.server.credentials.kerberos.password" ) ); assertEquals( "simple", config.getProperty( "pentaho.authentication.default.mapping.impersonation.type" ) ); ThinNameClusterModel retrievingModel = hadoopClusterManager.getNamedCluster( ncTestName ); assertEquals( "Kerberos", retrievingModel.getSecurityType() ); assertEquals( "Password", retrievingModel.getKerberosSubType() ); assertEquals( "username", retrievingModel.getKerberosAuthenticationUsername() ); assertEquals( "Encrypted 2be98afc86aa7f2e4bb18bd63c99dbdde", retrievingModel.getKerberosAuthenticationPassword() ); assertEquals( "", retrievingModel.getKerberosImpersonationUsername() ); assertEquals( "", retrievingModel.getKerberosImpersonationPassword() ); } @After public void tearDown() throws IOException { FileUtils.deleteDirectory( getShimTestDir() ); FileUtils.deleteDirectory( new File( "src/test/resources/driver-destination" ) ); FileUtils .deleteDirectory( new File( hadoopClusterManager.getNamedClusterConfigsRootDir() + File.separator + knoxNC ) ); } private File getShimTestDir() { String shimTestDir = System.getProperty( "user.home" ) + File.separator + ".pentaho" + File.separator + "metastore" + File.separator + "pentaho" + File.separator + "NamedCluster" + File.separator + "Configs" + File.separator + ncTestName; return new File( shimTestDir ); } private Map getFiles( String filesLocation ) { return getFiles( filesLocation, null ); } private Map getFiles( String filesLocation, String customFieldName ) { Map fileItemStreamByName = new HashMap<>(); try { File siteFilesDirectory = new File( filesLocation ); File[] siteFiles = siteFilesDirectory.listFiles(); for ( File siteFile : siteFiles ) { String fieldName = customFieldName == null ? siteFile.getName() : customFieldName; CachedFileItemStream cachedFileItemStream = new CachedFileItemStream( new FileInputStream( siteFile ), siteFile.getName(), fieldName ); cachedFileItemStream.setLastModified( siteFile.lastModified() ); fileItemStreamByName.put( fieldName, cachedFileItemStream ); } } catch ( IOException e ) { fileItemStreamByName = new HashMap<>(); } return fileItemStreamByName; } } ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/bad/core-site.xml ================================================ fs.trash.interval 1 io.compression.codecs org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec,org.apache.hadoop.io.compress.Lz4Codec hadoop.security.authentication simple hadoop.security.authorization false hadoop.rpc.protection authentication hadoop.security.auth_to_local DEFAULT hadoop.proxyuser.oozie.hosts * hadoop.proxyuser.oozie.groups * hadoop.proxyuser.mapred.hosts * hadoop.proxyuser.mapred.groups * hadoop.proxyuser.flume.hosts * hadoop.proxyuser.flume.groups * hadoop.proxyuser.HTTP.hosts * hadoop.proxyuser.HTTP.groups * hadoop.proxyuser.hive.hosts * hadoop.proxyuser.hive.groups * hadoop.proxyuser.hue.hosts * hadoop.proxyuser.hue.groups * hadoop.proxyuser.httpfs.hosts * hadoop.proxyuser.httpfs.groups * hadoop.proxyuser.hdfs.groups * hadoop.proxyuser.hdfs.hosts * hadoop.proxyuser.yarn.hosts * hadoop.proxyuser.yarn.groups * hadoop.security.group.mapping org.apache.hadoop.security.ShellBasedUnixGroupsMapping hadoop.security.instrumentation.requires.admin false net.topology.script.file.name /etc/hadoop/conf.cloudera.yarn/topology.py io.file.buffer.size 65536 hadoop.ssl.enabled false hadoop.ssl.require.client.cert false true hadoop.ssl.keystores.factory.class org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory true hadoop.ssl.server.conf ssl-server.xml true hadoop.ssl.client.conf ssl-client.xml true ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/dataproc/core-site.xml ================================================ fs.default.name hdfs://cluster-ec0a The old FileSystem used by FsShell. hadoop.proxyuser.hive.groups * hadoop.security.token.service.use_ip false Controls whether tokens always use IP addresses. DNS changes will not be detected if this option is enabled. Existing client connections that break will always reconnect to the IP of the original host. New clients will connect to the host's new IP but fail to locate a token. Disabling this option will allow existing and new clients to detect an IP change and continue to locate the new host's token. hadoop.tmp.dir /hadoop/tmp A base for other temporary directories. ha.zookeeper.parent-znode /hadoop/hadoop-ha The ZooKeeper znode under which the ZK failover controller stores its information. Note that the nameservice ID is automatically appended to this znode, so it is not normally necessary to configure this, even in a federated environment. hadoop.proxyuser.hive.hosts * ha.zookeeper.quorum cluster-ec0a-m-0:2181,cluster-ec0a-m-1:2181,cluster-ec0a-m-2:2181 fs.defaultFS hdfs://cluster-ec0a The name of the default file system. A URI whose scheme and authority determine the FileSystem implementation. The uri's scheme determines the config property (fs.SCHEME.impl) naming the FileSystem implementation class. The uri's authority is used to determine the host, port, etc. for a filesystem. hadoop.zk.address cluster-ec0a-m-0:2181,cluster-ec0a-m-1:2181,cluster-ec0a-m-2:2181 hadoop.http.filter.initializers org.apache.hadoop.security.HttpCrossOriginFilterInitializer,org.apache.hadoop.http.lib.StaticUserWebFilter fs.gs.working.dir / The directory relative gs: uris resolve in inside of the default bucket. fs.gs.system.bucket dataproc-staging-us-central1-888792280192-7eit8tmn GCS bucket to use as a default bucket if fs.default.name is not a gs: uri. fs.gs.metadata.cache.directory /hadoop_gcs_connector_metadata_cache Only used if fs.gs.metadata.cache.type is FILESYSTEM_BACKED, specifies the local path to use as the base path for storing mirrored GCS metadata. Must be an absolute path, must be a directory, and must be fully readable/writable/executable by any user running processes which use the GCS connector. fs.gs.impl com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem The FileSystem for gs: (GCS) uris. fs.gs.project.id hitachi3-281807 Google Cloud Project ID with access to configured GCS buckets. fs.gs.metadata.cache.enable false false Dataproc Cluster Properties fs.gs.implicit.dir.infer.enable true If set, we create and return in-memory directory objects on the fly when no backing object exists, but we know there are files with the same prefix. fs.gs.application.name.suffix -dataproc Appended to the user-agent header for API requests to GCS to help identify the traffic as coming from Dataproc. fs.AbstractFileSystem.gs.impl com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS The AbstractFileSystem for gs: (GCS) uris. fs.gs.metadata.cache.type FILESYSTEM_BACKED Specifies which implementation of DirectoryListCache to use for supplementing GCS API &amp;amp;amp;quot;list&amp;amp;amp;quot; requests. Supported implementations: IN_MEMORY: Enforces immediate consistency within same Java process. FILESYSTEM_BACKED: Enforces consistency across all cooperating processes pointed at the same local mirror directory, which may be an NFS directory for massively-distributed coordination. fs.gs.block.size 134217728 false Dataproc Cluster Properties hadoop.ssl.enabled.protocols TLSv1.1,TLSv1.2,TLSv1.3 false Dataproc Cluster Properties hadoop.proxyuser.oozie.hosts * hadoop.proxyuser.oozie.groups * ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/dataproc/hdfs-site.xml ================================================ dfs.namenode.file.close.num-committed-allowed 1 dfs.namenode.shared.edits.dir qjournal://cluster-ec0a-m-0:8485;cluster-ec0a-m-1:8485;cluster-ec0a-m-2:8485/cluster-ec0a A directory on shared storage between the multiple namenodes in an HA cluster. This directory will be written by the active and read by the standby in order to keep the namespaces synchronized. This directory does not need to be listed in dfs.namenode.edits.dir above. It should be left empty in a non-HA cluster. dfs.namenode.name.dir file:///hadoop/dfs/name Determines where on the local filesystem the DFS namenode should store the name table(fsimage). If this is a comma-delimited list of directories then the name table is replicated in all of thedirectories, for redundancy. dfs.permissions.enabled false If &amp;quot;true&amp;quot;, enable permission checking in HDFS. If &amp;quot;false&amp;quot;, permission checking is turned off, but all other behavior is unchanged. Switching from one parameter value to the other does not change the mode, owner or group of files or directories. dfs.client.read.shortcircuit true dfs.ha.automatic-failover.enabled true Whether automatic failover is enabled. See the HDFS High Availability documentation for details on automatic HA configuration. dfs.journalnode.edits.dir /var/tmp dfs.replication 2 Default block replication. The actual number of replications can be specified when the file is created. The default is used if replication is not specified in create time. dfs.namenode.checkpoint.dir file:///hadoop/dfs/namesecondary Determines where on the local filesystem the DFS secondary namenode should store the temporary images to merge. If this is a comma-delimited list of directories then the image is replicated in all of the directories for redundancy. dfs.nameservices cluster-ec0a false Dataproc Cluster Properties dfs.datanode.data.dir /hadoop/dfs/data Determines where on the local filesystem an DFS datanode should store its blocks. If this is a comma-delimited list of directories, then data will be stored in all named directories, typically on different devices.Directories that do not exist are ignored. dfs.namenode.rpc-address.cluster-ec0a.nn1 cluster-ec0a-m-1:8020 false Dataproc Cluster Properties dfs.namenode.rpc-address.cluster-ec0a.nn0 cluster-ec0a-m-0:8020 false Dataproc Cluster Properties dfs.client.failover.proxy.provider.cluster-ec0a org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider dfs.permissions.supergroup hadoop The name of the group of super-users. dfs.hosts /etc/hadoop/conf/nodes_include dfs.ha.fencing.methods shell(/bin/true) dfs.namenode.datanode.registration.retry-hostname-dns-lookup true If true, then the namenode will retry reverse dns lookup for hostname of the datanode. This helps in environments where DNS lookup can be flaky. dfs.ha.namenodes.cluster-ec0a nn0,nn1 false Dataproc Cluster Properties dfs.namenode.http-address.cluster-ec0a.nn1 cluster-ec0a-m-1:9870 false Dataproc Cluster Properties dfs.namenode.http-address.cluster-ec0a.nn0 cluster-ec0a-m-0:9870 false Dataproc Cluster Properties dfs.domain.socket.path /var/lib/hadoop-hdfs/dn_socket dfs.hosts.exclude /etc/hadoop/conf/nodes_exclude dfs.datanode.data.dir.perm 700 Permissions for the directories on on the local filesystem where the DFS data node store its blocks. The permissions can either be octal or symbolic. dfs.namenode.servicerpc-address.cluster-ec0a.nn1 cluster-ec0a-m-1:8051 false Dataproc Cluster Properties dfs.namenode.servicerpc-address.cluster-ec0a.nn0 cluster-ec0a-m-0:8051 false Dataproc Cluster Properties dfs.namenode.https-address.cluster-ec0a.nn1 cluster-ec0a-m-1:9871 false Dataproc Cluster Properties dfs.namenode.https-address.cluster-ec0a.nn0 cluster-ec0a-m-0:9871 false Dataproc Cluster Properties dfs.namenode.https-address 0.0.0.0:9871 false Dataproc Cluster Properties dfs.namenode.service.handler.count 10 false Dataproc Cluster Properties dfs.namenode.handler.count 20 false Dataproc Cluster Properties dfs.datanode.address 0.0.0.0:9866 false Dataproc Cluster Properties dfs.namenode.http-address 0.0.0.0:9870 false Dataproc Cluster Properties dfs.datanode.https.address 0.0.0.0:9865 false Dataproc Cluster Properties dfs.namenode.secondary.http-address 0.0.0.0:9868 false Dataproc Cluster Properties dfs.namenode.secondary.https-address 0.0.0.0:9869 false Dataproc Cluster Properties dfs.datanode.http.address 0.0.0.0:9864 false Dataproc Cluster Properties dfs.datanode.ipc.address 0.0.0.0:9867 false Dataproc Cluster Properties dfs.namenode.lifeline.rpc-address.cluster-ec0a.nn0 cluster-ec0a-m-0:8050 false Dataproc Cluster Properties dfs.namenode.lifeline.rpc-address.cluster-ec0a.nn1 cluster-ec0a-m-1:8050 false Dataproc Cluster Properties ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/dataproc/hive-site.xml ================================================ javax.jdo.option.ConnectionURL jdbc:mysql://cluster-ec0a-m-0/metastore the URL of the MySQL database javax.jdo.option.ConnectionDriverName com.mysql.jdbc.Driver javax.jdo.option.ConnectionUserName hive datanucleus.fixedDatastore true javax.jdo.option.ConnectionPassword hive-password datanucleus.autoStartMechanism SchemaTable hive.metastore.connect.retries 60 datanucleus.autoCreateSchema false hive.localize.resource.num.wait.attempts 25 hive.execution.engine tez hive.metastore.uris thrift://cluster-ec0a-m-0:9083,thrift://cluster-ec0a-m-1:9083,thrift://cluster-ec0a-m-2:9083 hive.zookeeper.quorum cluster-ec0a-m-0:2181,cluster-ec0a-m-1:2181,cluster-ec0a-m-2:2181 hive.zookeeper.client.port 2181 hive.server2.support.dynamic.service.discovery true hive.server2.zookeeper.namespace hiveserver2 hive.support.concurrency true hive.zookeeper.session.timeout 1200000 hive.user.install.directory gs://dataproc-staging-us-central1-888792280192-7eit8tmn/google-cloud-dataproc-metainfo/e553cf63-468f-4b23-a59f-1025ad8e335d/hive/user-install-dir ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/dataproc/mapred-site.xml ================================================ mapreduce.job.reduces 2 false Dataproc Cluster Properties mapreduce.jobhistory.address cluster-ec0a-m-0:10020 MapReduce JobHistory Server IPC host:port mapreduce.framework.name yarn mapreduce.fileoutputcommitter.task.cleanup.enabled true Whether tasks should delete their task temporary directories. This is purely an optimization for filesystems without O(1) recursive delete, as commitJob will recursively delete the entire job temporary directory. HDFS has O(1) recursive delete, so this parameter is left false by default. Users of object stores, for example, may want to set this to true. Note: this is only used if mapreduce.fileoutputcommitter.algorithm.version=2 See https://issues.apache.org/jira/browse/MAPREDUCE-7029 for details. yarn.app.mapreduce.am.resource.mb 1024 false Dataproc Cluster Properties mapreduce.map.java.opts -Xmx819m false Dataproc Cluster Properties mapreduce.jobhistory.recovery.store.fs.uri ${hadoop.tmp.dir}/mapred/history/recoverystore URI where history server state is stored. mapreduce.job.working.dir /user/${user.name} The FileSystem working directory to use for relative paths. mapred.local.dir /hadoop/mapred/local Directories on the local machine in which to store mapreduce temp files. mapreduce.fileoutputcommitter.failures.attempts 4 Number of attempts when failure happens in commit job. mapreduce.reduce.java.opts -Xmx1638m false Dataproc Cluster Properties mapreduce.map.memory.mb 1024 false Dataproc Cluster Properties mapreduce.reduce.memory.mb 2048 false Dataproc Cluster Properties mapreduce.jobhistory.recovery.enable true Enable history server to recover server state on startup. mapreduce.tasktracker.map.tasks.maximum 1 Property from MapReduce version 1 still used for TeraGen sharding. mapreduce.input.fileinputformat.list-status.num-threads 20 The number of threads to use to list and fetch block locations for the specified input paths. Note: multiple threads should not be used if a custom non thread-safe path filter is used. Setting a larger value than the default of 1 can significantly improve job startup overhead, especially if using GCS as input with multi-level directories, such as in partitioned Hive tables. mapreduce.reduce.cpu.vcores 1 false Dataproc Cluster Properties mapreduce.map.cpu.vcores 1 false Dataproc Cluster Properties mapreduce.jobhistory.recovery.store.class org.apache.hadoop.mapreduce.v2.hs.HistoryServerFileSystemStateStoreService Class used to store history server state for recovery. mapreduce.job.maps 15 false Dataproc Cluster Properties mapreduce.jobhistory.webapp.address cluster-ec0a-m-0:19888 MapReduce JobHistory Server Web UI host:port yarn.app.mapreduce.am.command-opts -Xmx819m false Dataproc Cluster Properties yarn.app.mapreduce.am.resource.cpu-vcores 1 false Dataproc Cluster Properties mapreduce.jobhistory.always-scan-user-dir true Enable history server to always scan user dir. mapreduce.fileoutputcommitter.algorithm.version 2 Updated file output committer algorithm in Hadoop 2.7+. Significantly improves commitJob times when using the Google Cloud Storage connector. See https://issues.apache.org/jira/browse/MAPEDUCE-4815 for more details. mapreduce.application.classpath $HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*, $HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*, /usr/local/share/google/dataproc/lib/* mapred.bq.project.id hitachi3-281807 Google Cloud Project ID to use for BigQuery operations. mapred.bq.output.buffer.size 67108864 The size in bytes of the output buffer to use when writing to BigQuery. mapred.bq.gcs.bucket dataproc-staging-us-central1-888792280192-7eit8tmn The GCS bucket holding temporary BigQuery data for the input connector. mapreduce.job.reduce.slowstart.completedmaps 0.95 false Dataproc Cluster Properties mapreduce.task.io.sort.mb 256 false Dataproc Cluster Properties ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/dataproc/yarn-site.xml ================================================ yarn.resourcemanager.webapp.methods-allowed GET,HEAD The HTTP methods allowed by the YARN Resource Manager web UI and REST API. Name of the cluster. In a HA setting, this is used to ensure the RM participates in leader election for this cluster and ensures it does not affect other clusters yarn.resourcemanager.cluster-id cluster-ec0a yarn.resourcemanager.store.class org.apache.hadoop.yarn.server.resourcemanager.recovery.ZKRMStateStore yarn.nodemanager.local-dirs /hadoop/yarn/nm-local-dir Directories on the local machine in which to application temp files. yarn.resourcemanager.hostname cluster-ec0a-m-0 yarn.nodemanager.resource.memory-mb 3072 false Dataproc Cluster Properties yarn.scheduler.minimum-allocation-mb 256 false Dataproc Cluster Properties yarn.resourcemanager.nodes.include-path /etc/hadoop/conf/nodes_include yarn.nodemanager.container-executor.os.sched.priority.adjustment 1 yarn.nodemanager.resource.cpu-vcores 1 Number of vcores that can be allocated for containers. This is used by the RM scheduler when allocating resources for containers. This is not used to limit the number of physical cores used by YARN containers. yarn.resourcemanager.ha.automatic-failover.zk-base-path /hadoop/yarn-leader-election The base znode path to use for storing leader information, when using ZooKeeper based leader election. yarn.scheduler.maximum-allocation-mb 3072 false Dataproc Cluster Properties yarn.resourcemanager.ha.enabled true Enable RM high-availability. When enabled, (1) The RM starts in the Standby mode by default, and transitions to the Active mode when prompted to. (2) The nodes in the RM ensemble are listed in yarn.resourcemanager.ha.rm-ids (3) The id of each RM either comes from yarn.resourcemanager.ha.id if yarn.resourcemanager.ha.id is explicitly specified or can be figured out by matching yarn.resourcemanager.address.{id} with local address (4) The actual physical addresses come from the configs of the pattern - {rpc- config}.{id} yarn.client.failover-sleep-max-ms 15000 When HA is enabled, the maximum sleep time (in milliseconds) between failovers. When set, this overrides the yarn.resourcemanager.connect.* settings. When not set, yarn.resourcemanager.connect.retry-interval.ms is used instead. yarn.resourcemanager.zk-state-store.parent-path /hadoop/rmstore Full path of the ZooKeeper znode where RM state will be stored. This must be supplied when using org.apache.hadoop.yarn.server.resourcemanager.recovery.ZKRMStateStore as the value for yarn.resourcemanager.store.class yarn.log-aggregation-enable false Enable remote logs aggregation to the default FS. yarn.resourcemanager.hostname.rm0 cluster-ec0a-m-0 yarn.resourcemanager.hostname.rm1 cluster-ec0a-m-1 yarn.resourcemanager.hostname.rm2 cluster-ec0a-m-2 yarn.client.failover-max-attempts 15 When HA is enabled, the max number of times FailoverProxyProvider should attempt failover. When set, this overrides the yarn.resourcemanager.connect.max-wait.ms. When not set, this is inferred from yarn.resourcemanager.connect.max-wait.ms. yarn.nodemanager.aux-services mapreduce_shuffle,spark_shuffle yarn.nodemanager.vmem-check-enabled false The maximum allocation for every container request at the RM, in terms of virtual CPU cores. Requests higher than this won't take effect, and will get capped to this value. yarn.scheduler.maximum-allocation-vcores 32000 yarn.resourcemanager.webapp.address.rm0 cluster-ec0a-m-0:8088 yarn.resourcemanager.webapp.address.rm1 cluster-ec0a-m-1:8088 yarn.resourcemanager.webapp.address.rm2 cluster-ec0a-m-2:8088 yarn.resourcemanager.nodes.exclude-path /etc/hadoop/conf/nodes_exclude yarn.resourcemanager.zk-timeout-ms 60000 ZooKeeper session timeout in milliseconds. Session expiration is managed by the ZooKeeper cluster itself, not by the client. This value is used by the cluster to determine when the client's session expires. Expirations happens when the cluster does not hear from the client within the specified session timeout period (i.e. no heartbeat). yarn.client.failover-sleep-base-ms 500 When HA is enabled, the sleep base (in milliseconds) to be used for calculating the exponential delay between failovers. When set, this overrides the yarn.resourcemanager.connect.* settings. When not set, yarn.resourcemanager.connect.retry-interval.ms is used instead. yarn.nodemanager.remote-app-log-dir /yarn-logs/ The remote path, on the default FS, to store logs. yarn.resourcemanager.recovery.enabled true Enable RM to recover state after starting. yarn.resourcemanager.ha.rm-ids rm0,rm1,rm2 The list of RM nodes in the cluster when HA is enabled. See description of yarn.resourcemanager.ha .enabled for full details on how this is used. yarn.resourcemanager.bind-host 0.0.0.0 yarn.application.classpath $HADOOP_CONF_DIR,$HADOOP_COMMON_HOME/*,$HADOOP_COMMON_HOME/lib/*, $HADOOP_HDFS_HOME/*,$HADOOP_HDFS_HOME/lib/*,$HADOOP_MAPRED_HOME/*, $HADOOP_MAPRED_HOME/lib/*,$HADOOP_YARN_HOME/*,$HADOOP_YARN_HOME/lib/*, /usr/local/share/google/dataproc/lib/* yarn.nodemanager.aux-services.spark_shuffle.class org.apache.spark.network.yarn.YarnShuffleService yarn.resourcemanager.webapp.cross-origin.enabled true yarn.timeline-service.http-cross-origin.enabled true yarn.timeline-service.enabled true yarn.timeline-service.hostname cluster-ec0a-m-0 yarn.timeline-service.bind-host 0.0.0.0 yarn.resourcemanager.system-metrics-publisher.enabled true yarn.timeline-service.generic-application-history.enabled true yarn.timeline-service.ui-names tez yarn.timeline-service.ui-on-disk-path.tez /usr/lib/tez/tez-ui-0.9.2.war yarn.timeline-service.ui-web-path.tez /tez-ui yarn.resourcemanager.nodemanager-graceful-decommission-timeout-secs 86400 false Dataproc Cluster Properties ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/driver-source/driver.kar ================================================ ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/keytab/test.keytab ================================================ ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/missing-info/core-site.xml ================================================ fs.trash.interval 1 io.compression.codecs org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec,org.apache.hadoop.io.compress.Lz4Codec hadoop.security.authentication simple hadoop.security.authorization false hadoop.rpc.protection authentication hadoop.security.auth_to_local DEFAULT hadoop.proxyuser.oozie.hosts * hadoop.proxyuser.oozie.groups * hadoop.proxyuser.flume.hosts * hadoop.proxyuser.flume.groups * hadoop.proxyuser.HTTP.hosts * hadoop.proxyuser.HTTP.groups * hadoop.proxyuser.hive.hosts * hadoop.proxyuser.hive.groups * hadoop.proxyuser.hue.hosts * hadoop.proxyuser.hue.groups * hadoop.proxyuser.httpfs.hosts * hadoop.proxyuser.httpfs.groups * hadoop.proxyuser.hdfs.groups * hadoop.proxyuser.hdfs.hosts * hadoop.proxyuser.yarn.hosts * hadoop.proxyuser.yarn.groups * hadoop.security.group.mapping org.apache.hadoop.security.ShellBasedUnixGroupsMapping hadoop.security.instrumentation.requires.admin false net.topology.script.file.name /etc/hadoop/conf.cloudera.yarn/topology.py io.file.buffer.size 65536 hadoop.ssl.enabled false hadoop.ssl.require.client.cert false true hadoop.ssl.keystores.factory.class org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory true hadoop.ssl.server.conf ssl-server.xml true hadoop.ssl.client.conf ssl-client.xml true ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/missing-info/hive-site.xml ================================================ hive.metastore.uris thrift://svqxbdcn6cdh514un4.pentahoqa.com:9083 hive.metastore.client.socket.timeout 300 hive.metastore.warehouse.dir /user/hive/warehouse hive.warehouse.subdir.inherit.perms true hive.auto.convert.join true hive.auto.convert.join.noconditionaltask.size 20971520 hive.optimize.bucketmapjoin.sortedmerge false hive.smbjoin.cache.rows 10000 hive.server2.logging.operation.enabled true hive.server2.logging.operation.log.location /var/log/hive/operation_logs mapred.reduce.tasks -1 hive.exec.reducers.bytes.per.reducer 67108864 hive.exec.copyfile.maxsize 104857600 hive.exec.reducers.max 1099 hive.vectorized.groupby.checkinterval 4096 hive.vectorized.groupby.flush.percent 0.1 hive.compute.query.using.stats false hive.vectorized.execution.enabled true hive.vectorized.execution.reduce.enabled false hive.merge.mapfiles true hive.merge.mapredfiles false hive.cbo.enable false hive.fetch.task.conversion minimal hive.fetch.task.conversion.threshold 268435456 hive.limit.pushdown.memory.usage 0.1 hive.merge.sparkfiles true hive.merge.smallfiles.avgsize 16777216 hive.merge.size.per.task 268435456 hive.optimize.reducededuplication true hive.optimize.reducededuplication.min.reducer 4 hive.map.aggr true hive.map.aggr.hash.percentmemory 0.5 hive.optimize.sort.dynamic.partition false hive.execution.engine mr spark.executor.memory 1828926259 spark.driver.memory 966367641 spark.executor.cores 4 spark.yarn.driver.memoryOverhead 102 spark.yarn.executor.memoryOverhead 307 spark.dynamicAllocation.enabled true spark.dynamicAllocation.initialExecutors 1 spark.dynamicAllocation.minExecutors 1 spark.dynamicAllocation.maxExecutors 2147483647 hive.metastore.execute.setugi true hive.support.concurrency true hive.zookeeper.quorum svqxbdcn6cdh514un1.pentahoqa.com,svqxbdcn6cdh514un5.pentahoqa.com,svqxbdcn6cdh514un4.pentahoqa.com,svqxbdcn6cdh514un2.pentahoqa.com,svqxbdcn6cdh514un3.pentahoqa.com hive.zookeeper.client.port 2181 hive.zookeeper.namespace hive_zookeeper_namespace_hive hbase.zookeeper.quorum svqxbdcn6cdh514un1.pentahoqa.com,svqxbdcn6cdh514un5.pentahoqa.com,svqxbdcn6cdh514un4.pentahoqa.com,svqxbdcn6cdh514un2.pentahoqa.com,svqxbdcn6cdh514un3.pentahoqa.com hbase.zookeeper.property.clientPort 2181 hive.cluster.delegation.token.store.class org.apache.hadoop.hive.thrift.MemoryTokenStore hive.server2.enable.doAs true hive.server2.use.SSL false spark.shuffle.service.enabled true ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/missing-info/oozie-default.xml ================================================ oozie.output.compression.codec gz The name of the compression codec to use. The implementation class for the codec needs to be specified through another property oozie.compression.codecs. You can specify a comma separated list of 'Codec_name'='Codec_class' for oozie.compression.codecs where codec class implements the interface org.apache.oozie.compression.CompressionCodec. If oozie.compression.codecs is not specified, gz codec implementation is used by default. oozie.external_monitoring.enable false If the oozie functional metrics needs to be exposed to the metrics-server backend, set it to true If set to true, the following properties has to be specified : oozie.metrics.server.name, oozie.metrics.host, oozie.metrics.prefix, oozie.metrics.report.interval.sec, oozie.metrics.port oozie.external_monitoring.type graphite The name of the server to which we want to send the metrics, would be graphite or ganglia. oozie.external_monitoring.address http://localhost:2020 oozie.external_monitoring.metricPrefix oozie oozie.external_monitoring.reporterIntervalSecs 60 oozie.jmx_monitoring.enable false If the oozie functional metrics needs to be exposed via JMX interface, set it to true. oozie.action.mapreduce.uber.jar.enable false If true, enables the oozie.mapreduce.uber.jar mapreduce workflow configuration property, which is used to specify an uber jar in HDFS. Submitting a workflow with an uber jar requires at least Hadoop 2.2.0 or 1.2.0. If false, workflows which specify the oozie.mapreduce.uber.jar configuration property will fail. oozie.action.dependency.deduplicate false If true, then Oozie will remove all the duplicates from the list of dependencies when they are passed to the jobtracker. Higher priority dependencies will remain as the following: Original list: "/a/a.jar#a.jar,/a/b.jar#b.jar,/b/a.jar,/b/b.jar,/c/d.jar" Deduplicated list: "/a/a.jar#a.jar,/a/b.jar#b.jar,/c/d.jar" With other words, priority order is: action jar > user workflow libs > action libs > system lib, where dependency with greater prio is used. oozie.processing.timezone UTC Oozie server timezone. Valid values are UTC and GMT(+/-)####, for example 'GMT+0530' would be India timezone. All dates parsed and genered dates by Oozie Coordinator/Bundle will be done in the specified timezone. The default value of 'UTC' should not be changed under normal circumtances. If for any reason is changed, note that GMT(+/-)#### timezones do not observe DST changes. oozie.base.url http://localhost:8080/oozie Base Oozie URL. oozie.system.id oozie-${user.name} The Oozie system ID. oozie.systemmode NORMAL System mode for Oozie at startup. oozie.delete.runtime.dir.on.shutdown true If the runtime directory should be kept after Oozie shutdowns down. oozie.services org.apache.oozie.service.SchedulerService, org.apache.oozie.service.MetricsInstrumentationService, org.apache.oozie.service.MemoryLocksService, org.apache.oozie.service.UUIDService, org.apache.oozie.service.ELService, org.apache.oozie.service.AuthorizationService, org.apache.oozie.service.UserGroupInformationService, org.apache.oozie.service.HadoopAccessorService, org.apache.oozie.service.JobsConcurrencyService, org.apache.oozie.service.URIHandlerService, org.apache.oozie.service.DagXLogInfoService, org.apache.oozie.service.SchemaService, org.apache.oozie.service.LiteWorkflowAppService, org.apache.oozie.service.JPAService, org.apache.oozie.service.StoreService, org.apache.oozie.service.DBLiteWorkflowStoreService, org.apache.oozie.service.CallbackService, org.apache.oozie.service.ActionService, org.apache.oozie.service.ShareLibService, org.apache.oozie.service.CallableQueueService, org.apache.oozie.service.ActionCheckerService, org.apache.oozie.service.RecoveryService, org.apache.oozie.service.PurgeService, org.apache.oozie.service.CoordinatorEngineService, org.apache.oozie.service.BundleEngineService, org.apache.oozie.service.DagEngineService, org.apache.oozie.service.CoordMaterializeTriggerService, org.apache.oozie.service.StatusTransitService, org.apache.oozie.service.PauseTransitService, org.apache.oozie.service.GroupsService, org.apache.oozie.service.ProxyUserService, org.apache.oozie.service.XLogStreamingService, org.apache.oozie.service.JvmPauseMonitorService, org.apache.oozie.service.SparkConfigurationService, org.apache.oozie.service.SchemaCheckerService All services to be created and managed by Oozie Services singleton. Class names must be separated by commas. oozie.services.ext To add/replace services defined in 'oozie.services' with custom implementations. Class names must be separated by commas. oozie.service.XLogStreamingService.buffer.len 4096 4K buffer for streaming the logs progressively oozie.service.XLogStreamingService.error.buffer.len 2048 2K buffer for streaming the error logs progressively oozie.service.XLogStreamingService.audit.buffer.len 3 Number of lines for streaming the audit logs progressively oozie.service.HCatAccessorService.jmsconnections default=java.naming.factory.initial#org.apache.activemq.jndi.ActiveMQInitialContextFactory;java.naming.provider.url#tcp://localhost:61616;connectionFactoryNames#ConnectionFactory Specify the map of endpoints to JMS configuration properties. In general, endpoint identifies the HCatalog server URL. "default" is used if no endpoint is mentioned in the query. If some JMS property is not defined, the system will use the property defined jndi.properties. jndi.properties files is retrieved from the application classpath. Mapping rules can also be provided for mapping Hcatalog servers to corresponding JMS providers. hcat://${1}.${2}.server.com:8020=java.naming.factory.initial#Dummy.Factory;java.naming.provider.url#tcp://broker.${2}:61616 oozie.service.HCatAccessorService.jms.use.canonical.hostname false The JMS messages published from a HCat server usually contains the canonical hostname of the HCat server in standalone mode or the canonical name of the VIP in a case of multiple nodes in a HA setup. This setting is used to translate the HCat server hostname or its aliases specified by the user in the HCat URIs of the coordinator dependencies to its canonical name so that they can be exactly matched with the JMS dependency availability notifications. oozie.service.JMSTopicService.topic.name default=${username} Topic options are ${username} or ${jobId} or a fixed string which can be specified as default or for a particular job type. For e.g To have a fixed string topic for workflows, coordinators and bundles, specify in the following comma-separated format: {jobtype1}={some_string1}, {jobtype2}={some_string2} where job type can be WORKFLOW, COORDINATOR or BUNDLE. e.g. Following defines topic for workflow job, workflow action, coordinator job, coordinator action, bundle job and bundle action WORKFLOW=workflow, COORDINATOR=coordinator, BUNDLE=bundle For jobs with no defined topic, default topic will be ${username} oozie.jms.producer.connection.properties java.naming.factory.initial#org.apache.activemq.jndi.ActiveMQInitialContextFactory;java.naming.provider.url#tcp://localhost:61616;connectionFactoryNames#ConnectionFactory oozie.service.JMSAccessorService.connectioncontext.impl org.apache.oozie.jms.DefaultConnectionContext Specifies the Connection Context implementation oozie.service.ConfigurationService.ignore.system.properties oozie.service.AuthorizationService.security.enabled Specifies "oozie.*" properties to cannot be overriden via Java system properties. Property names must be separted by commas. oozie.service.ConfigurationService.verify.available.properties true Specifies whether the available configurations check is enabled or not. oozie.service.SchedulerService.threads 10 The number of threads to be used by the SchedulerService to run deamon tasks. If maxed out, scheduled daemon tasks will be queued up and delayed until threads become available. oozie.service.AuthorizationService.authorization.enabled false Specifies whether security (user name/admin role) is enabled or not. If disabled any user can manage Oozie system and manage any job. oozie.service.AuthorizationService.default.group.as.acl false Enables old behavior where the User's default group is the job's ACL. oozie.serviceAuthorizationService.admin.users Comma separated list of users with admin access for the Authorization service. oozie.service.AuthorizationService.system.info.authorized.users Comma separated list of users authorized for web service calls to get system configuration. oozie.service.InstrumentationService.logging.interval 60 Interval, in seconds, at which instrumentation should be logged by the InstrumentationService. If set to 0 it will not log instrumentation data. oozie.service.PurgeService.older.than 30 Completed workflow jobs older than this value, in days, will be purged by the PurgeService. oozie.service.PurgeService.coord.older.than 7 Completed coordinator jobs older than this value, in days, will be purged by the PurgeService. oozie.service.PurgeService.bundle.older.than 7 Completed bundle jobs older than this value, in days, will be purged by the PurgeService. oozie.service.PurgeService.purge.old.coord.action false Whether to purge completed workflows and their corresponding coordinator actions of long running coordinator jobs if the completed workflow jobs are older than the value specified in oozie.service.PurgeService.older.than. oozie.service.PurgeService.purge.limit 100 Batch size of individual DB operations used for building the list of items to be purged and performing actual purge. oozie.service.PurgeService.purge.interval 3600 Interval at which the purge service will run, in seconds. oozie.service.PurgeService.enable.command.line true Enable/Disable oozie admin purge command. By default, it is enabled. oozie.service.RecoveryService.wf.actions.older.than 120 Age of the actions which are eligible to be queued for recovery, in seconds. oozie.service.RecoveryService.wf.actions.created.time.interval 7 Created time period of the actions which are eligible to be queued for recovery in days. oozie.service.RecoveryService.callable.batch.size 10 This value determines the number of callable which will be batched together to be executed by a single thread. oozie.service.RecoveryService.push.dependency.interval 200 This value determines the delay for push missing dependency command queueing in Recovery Service oozie.service.RecoveryService.interval 60 Interval at which the RecoverService will run, in seconds. oozie.service.RecoveryService.coord.older.than 600 Age of the Coordinator jobs or actions which are eligible to be queued for recovery, in seconds. oozie.service.RecoveryService.bundle.older.than 600 Age of the Bundle jobs which are eligible to be queued for recovery, in seconds. oozie.service.CallableQueueService.queue.size 10000 Max callable queue size oozie.service.CallableQueueService.threads 10 Number of threads used for executing callables oozie.service.CallableQueueService.delayedcallable.threads 1 The number of threads where delayed tasks are executed. Upon expiration, the tasks are immediately inserted into the main queue to properly handle priorities. This means that no actual business logic is executed in this thread pool, so under normal circumstances, this value can be set to a low number. Note that this property is completely unrelated to oozie.service.SchedulerService.threads which tells how many scheduled background tasks can run in parallel at the same time (like PurgeService, StatusTransitService, etc). oozie.service.CallableQueueService.queue.newImpl true If set to true, then CallableQueueService will use a faster, less CPU-intensive queuing mechanism to execute asynchronous tasks internally. The old implementation generates noticeable CPU load even if Oozie is completely idle, especially when oozie.service.CallableQueueService.threads is set to a large number. The previous queuing mechanism is kept as a fallback option. This is an experimental feature in Oozie 5.1.0 that needs to be re-evaluated upon an upcoming minor release, meaning the old implementation and this feature flag will also be removed. oozie.service.CallableQueueService.queue.awaitTermination.timeout.seconds 30 Number of seconds while awaiting termination of ThreadPoolExecutor instances when CallableQueueService#destroy() is called, in seconds. The more elements you tend to have in your callable queue, the more you want CallableQueueService to wait before shutting down its thread pools. oozie.service.CallableQueueService.callable.concurrency 3 Maximum concurrency for a given callable type. Each command is a callable type (submit, start, run, signal, job, jobs, suspend,resume, etc). Each action type is a callable type (Map-Reduce, Pig, SSH, FS, sub-workflow, etc). All commands that use action executors (action-start, action-end, action-kill and action-check) use the action type as the callable type. oozie.service.CallableQueueService.callable.next.eligible true If true, when a callable in the queue has already reached max concurrency, Oozie continuously find next one which has not yet reach max concurrency. oozie.service.CallableQueueService.InterruptMapMaxSize 500 Maximum Size of the Interrupt Map, the interrupt element will not be inserted in the map if exceeded the size. oozie.service.CallableQueueService.InterruptTypes kill,resume,suspend,bundle_kill,bundle_resume,bundle_suspend,coord_kill,coord_change,coord_resume,coord_suspend Getting the types of XCommands that are considered to be of Interrupt type oozie.service.CoordMaterializeTriggerService.lookup.interval 300 Coordinator Job Lookup interval.(in seconds). oozie.service.CoordMaterializeTriggerService.materialization.window 3600 Coordinator Job Lookup command materialized each job for this next "window" duration oozie.service.CoordMaterializeTriggerService.callable.batch.size 10 This value determines the number of callable which will be batched together to be executed by a single thread. oozie.service.CoordMaterializeTriggerService.materialization.system.limit 50 This value determines the number of coordinator jobs to be materialized at a given time. oozie.service.coord.normal.default.timeout 120 Default timeout for a coordinator action input check (in minutes) for normal job. -1 means infinite timeout oozie.service.coord.default.max.timeout 86400 Default maximum timeout for a coordinator action input check (in minutes). 86400= 60days oozie.service.coord.input.check.requeue.interval 60000 Command re-queue interval for coordinator data input check (in millisecond). oozie.service.coord.input.check.requeue.interval.additional.delay 0 This value (in seconds) will be added into oozie.service.coord.input.check.requeue.interval and resulting value will be the requeue interval for the actions which are waiting for a long time without any input. oozie.service.coord.push.check.requeue.interval 600000 Command re-queue interval for push dependencies (in millisecond). oozie.service.coord.default.concurrency 1 Default concurrency for a coordinator job to determine how many maximum action should be executed at the same time. -1 means infinite concurrency. oozie.service.coord.default.throttle 12 Default throttle for a coordinator job to determine how many maximum action should be in WAITING state at the same time. oozie.service.coord.materialization.throttling.factor 0.05 Determine how many maximum actions should be in WAITING state for a single job at any time. The value is calculated by this factor X the total queue size. oozie.service.coord.check.maximum.frequency true When true, Oozie will reject any coordinators with a frequency faster than 5 minutes. It is not recommended to disable this check or submit coordinators with frequencies faster than 5 minutes: doing so can cause unintended behavior and additional system stress. oozie.service.ELService.groups job-submit,workflow,wf-sla-submit,coord-job-submit-freq,coord-job-submit-nofuncs,coord-job-submit-data,coord-job-submit-instances,coord-sla-submit,coord-action-create,coord-action-create-inst,coord-sla-create,coord-action-start,coord-job-wait-timeout,bundle-submit,coord-job-submit-initial-instance List of groups for different ELServices oozie.service.ELService.constants.job-submit EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.functions.job-submit EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.constants.job-submit EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions without having to include all the built in ones. oozie.service.ELService.ext.functions.job-submit EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions without having to include all the built in ones. oozie.service.ELService.constants.workflow KB=org.apache.oozie.util.ELConstantsFunctions#KB, MB=org.apache.oozie.util.ELConstantsFunctions#MB, GB=org.apache.oozie.util.ELConstantsFunctions#GB, TB=org.apache.oozie.util.ELConstantsFunctions#TB, PB=org.apache.oozie.util.ELConstantsFunctions#PB, RECORDS=org.apache.oozie.action.hadoop.HadoopELFunctions#RECORDS, MAP_IN=org.apache.oozie.action.hadoop.HadoopELFunctions#MAP_IN, MAP_OUT=org.apache.oozie.action.hadoop.HadoopELFunctions#MAP_OUT, REDUCE_IN=org.apache.oozie.action.hadoop.HadoopELFunctions#REDUCE_IN, REDUCE_OUT=org.apache.oozie.action.hadoop.HadoopELFunctions#REDUCE_OUT, GROUPS=org.apache.oozie.action.hadoop.HadoopELFunctions#GROUPS EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.workflow EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.workflow firstNotNull=org.apache.oozie.util.ELConstantsFunctions#firstNotNull, concat=org.apache.oozie.util.ELConstantsFunctions#concat, replaceAll=org.apache.oozie.util.ELConstantsFunctions#replaceAll, appendAll=org.apache.oozie.util.ELConstantsFunctions#appendAll, trim=org.apache.oozie.util.ELConstantsFunctions#trim, timestamp=org.apache.oozie.util.ELConstantsFunctions#timestamp, urlEncode=org.apache.oozie.util.ELConstantsFunctions#urlEncode, toJsonStr=org.apache.oozie.util.ELConstantsFunctions#toJsonStr, toPropertiesStr=org.apache.oozie.util.ELConstantsFunctions#toPropertiesStr, toConfigurationStr=org.apache.oozie.util.ELConstantsFunctions#toConfigurationStr, wf:id=org.apache.oozie.DagELFunctions#wf_id, wf:name=org.apache.oozie.DagELFunctions#wf_name, wf:appPath=org.apache.oozie.DagELFunctions#wf_appPath, wf:conf=org.apache.oozie.DagELFunctions#wf_conf, wf:user=org.apache.oozie.DagELFunctions#wf_user, wf:group=org.apache.oozie.DagELFunctions#wf_group, wf:callback=org.apache.oozie.DagELFunctions#wf_callback, wf:transition=org.apache.oozie.DagELFunctions#wf_transition, wf:lastErrorNode=org.apache.oozie.DagELFunctions#wf_lastErrorNode, wf:errorCode=org.apache.oozie.DagELFunctions#wf_errorCode, wf:errorMessage=org.apache.oozie.DagELFunctions#wf_errorMessage, wf:run=org.apache.oozie.DagELFunctions#wf_run, wf:actionData=org.apache.oozie.DagELFunctions#wf_actionData, wf:actionExternalId=org.apache.oozie.DagELFunctions#wf_actionExternalId, wf:actionTrackerUri=org.apache.oozie.DagELFunctions#wf_actionTrackerUri, wf:actionExternalStatus=org.apache.oozie.DagELFunctions#wf_actionExternalStatus, hadoop:counters=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_counters, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf, fs:exists=org.apache.oozie.action.hadoop.FsELFunctions#fs_exists, fs:isDir=org.apache.oozie.action.hadoop.FsELFunctions#fs_isDir, fs:dirSize=org.apache.oozie.action.hadoop.FsELFunctions#fs_dirSize, fs:fileSize=org.apache.oozie.action.hadoop.FsELFunctions#fs_fileSize, fs:blockSize=org.apache.oozie.action.hadoop.FsELFunctions#fs_blockSize, hcat:exists=org.apache.oozie.coord.HCatELFunctions#hcat_exists EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.WorkflowAppService.WorkflowDefinitionMaxLength 100000 The maximum length of the workflow definition in bytes An error will be reported if the length exceeds the given maximum oozie.service.ELService.ext.functions.workflow EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.wf-sla-submit MINUTES=org.apache.oozie.util.ELConstantsFunctions#SUBMIT_MINUTES, HOURS=org.apache.oozie.util.ELConstantsFunctions#SUBMIT_HOURS, DAYS=org.apache.oozie.util.ELConstantsFunctions#SUBMIT_DAYS EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.wf-sla-submit EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.wf-sla-submit EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.wf-sla-submit EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. l oozie.service.ELService.constants.coord-job-submit-freq EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-job-submit-freq EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-job-submit-freq coord:days=org.apache.oozie.coord.CoordELFunctions#ph1_coord_days, coord:months=org.apache.oozie.coord.CoordELFunctions#ph1_coord_months, coord:hours=org.apache.oozie.coord.CoordELFunctions#ph1_coord_hours, coord:minutes=org.apache.oozie.coord.CoordELFunctions#ph1_coord_minutes, coord:endOfDays=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfDays, coord:endOfMonths=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfMonths, coord:endOfWeeks=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfWeeks, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.functions.coord-job-submit-initial-instance ${oozie.service.ELService.functions.coord-job-submit-nofuncs}, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateOffset, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateTzOffset EL functions for coord job submit initial instance, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-job-submit-freq EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-job-wait-timeout EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.constants.coord-job-wait-timeout EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions without having to include all the built in ones. oozie.service.ELService.functions.coord-job-wait-timeout coord:days=org.apache.oozie.coord.CoordELFunctions#ph1_coord_days, coord:months=org.apache.oozie.coord.CoordELFunctions#ph1_coord_months, coord:hours=org.apache.oozie.coord.CoordELFunctions#ph1_coord_hours, coord:minutes=org.apache.oozie.coord.CoordELFunctions#ph1_coord_minutes, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-job-wait-timeout EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions without having to include all the built in ones. oozie.service.ELService.constants.coord-job-submit-nofuncs MINUTE=org.apache.oozie.coord.CoordELConstants#SUBMIT_MINUTE, HOUR=org.apache.oozie.coord.CoordELConstants#SUBMIT_HOUR, DAY=org.apache.oozie.coord.CoordELConstants#SUBMIT_DAY, MONTH=org.apache.oozie.coord.CoordELConstants#SUBMIT_MONTH, YEAR=org.apache.oozie.coord.CoordELConstants#SUBMIT_YEAR EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-job-submit-nofuncs EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-job-submit-nofuncs coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-job-submit-nofuncs EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-job-submit-instances EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-job-submit-instances EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-job-submit-instances coord:hoursInDay=org.apache.oozie.coord.CoordELFunctions#ph1_coord_hoursInDay_echo, coord:daysInMonth=org.apache.oozie.coord.CoordELFunctions#ph1_coord_daysInMonth_echo, coord:tzOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_tzOffset_echo, coord:current=org.apache.oozie.coord.CoordELFunctions#ph1_coord_current_echo, coord:currentRange=org.apache.oozie.coord.CoordELFunctions#ph1_coord_currentRange_echo, coord:offset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_offset_echo, coord:latest=org.apache.oozie.coord.CoordELFunctions#ph1_coord_latest_echo, coord:latestRange=org.apache.oozie.coord.CoordELFunctions#ph1_coord_latestRange_echo, coord:future=org.apache.oozie.coord.CoordELFunctions#ph1_coord_future_echo, coord:futureRange=org.apache.oozie.coord.CoordELFunctions#ph1_coord_futureRange_echo, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_epochTime_echo, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:absolute=org.apache.oozie.coord.CoordELFunctions#ph1_coord_absolute_echo, coord:endOfMonths=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfMonths_echo, coord:endOfWeeks=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfWeeks_echo, coord:endOfDays=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfDays_echo, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateOffset_echo, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateTzOffset_echo EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-job-submit-instances EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-job-submit-data EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-job-submit-data EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-job-submit-data coord:dataIn=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dataIn_echo, coord:dataOut=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dataOut_echo, coord:nominalTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_nominalTime_echo_wrap, coord:actualTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_actualTime_echo_wrap, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateOffset_echo, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateTzOffset_echo, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_epochTime_echo, coord:actionId=org.apache.oozie.coord.CoordELFunctions#ph1_coord_actionId_echo, coord:name=org.apache.oozie.coord.CoordELFunctions#ph1_coord_name_echo, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:databaseIn=org.apache.oozie.coord.HCatELFunctions#ph1_coord_databaseIn_echo, coord:databaseOut=org.apache.oozie.coord.HCatELFunctions#ph1_coord_databaseOut_echo, coord:tableIn=org.apache.oozie.coord.HCatELFunctions#ph1_coord_tableIn_echo, coord:tableOut=org.apache.oozie.coord.HCatELFunctions#ph1_coord_tableOut_echo, coord:dataInPartitionFilter=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitionFilter_echo, coord:dataInPartitionMin=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitionMin_echo, coord:dataInPartitionMax=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitionMax_echo, coord:dataInPartitions=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitions_echo, coord:dataOutPartitions=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataOutPartitions_echo, coord:dataOutPartitionValue=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataOutPartitionValue_echo, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-job-submit-data EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-sla-submit MINUTES=org.apache.oozie.coord.CoordELConstants#SUBMIT_MINUTES, HOURS=org.apache.oozie.coord.CoordELConstants#SUBMIT_HOURS, DAYS=org.apache.oozie.coord.CoordELConstants#SUBMIT_DAYS EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-sla-submit EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.bundle-submit bundle:conf=org.apache.oozie.bundle.BundleELFunctions#bundle_conf oozie.service.ELService.functions.coord-sla-submit coord:dataOut=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dataOut_echo, coord:nominalTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_nominalTime_echo_fixed, coord:actualTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_actualTime_echo_wrap, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateOffset_echo, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateTzOffset_echo, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_epochTime_echo, coord:actionId=org.apache.oozie.coord.CoordELFunctions#ph1_coord_actionId_echo, coord:name=org.apache.oozie.coord.CoordELFunctions#ph1_coord_name_echo, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:databaseOut=org.apache.oozie.coord.HCatELFunctions#ph1_coord_databaseOut_echo, coord:tableOut=org.apache.oozie.coord.HCatELFunctions#ph1_coord_tableOut_echo, coord:dataOutPartitions=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataOutPartitions_echo, coord:dataOutPartitionValue=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataOutPartitionValue_echo, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-sla-submit EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-action-create EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-action-create EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-action-create coord:hoursInDay=org.apache.oozie.coord.CoordELFunctions#ph2_coord_hoursInDay, coord:daysInMonth=org.apache.oozie.coord.CoordELFunctions#ph2_coord_daysInMonth, coord:tzOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_tzOffset, coord:current=org.apache.oozie.coord.CoordELFunctions#ph2_coord_current, coord:currentRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_currentRange, coord:offset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_offset, coord:latest=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latest_echo, coord:latestRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latestRange_echo, coord:future=org.apache.oozie.coord.CoordELFunctions#ph2_coord_future_echo, coord:futureRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_futureRange_echo, coord:actionId=org.apache.oozie.coord.CoordELFunctions#ph2_coord_actionId, coord:name=org.apache.oozie.coord.CoordELFunctions#ph2_coord_name, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_formatTime, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_epochTime, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:absolute=org.apache.oozie.coord.CoordELFunctions#ph2_coord_absolute_echo, coord:endOfMonths=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfMonths_echo, coord:endOfWeeks=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfWeeks_echo, coord:endOfDays=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfDays_echo, coord:absoluteRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_absolute_range, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-action-create EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-action-create-inst EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-action-create-inst EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-action-create-inst coord:hoursInDay=org.apache.oozie.coord.CoordELFunctions#ph2_coord_hoursInDay, coord:daysInMonth=org.apache.oozie.coord.CoordELFunctions#ph2_coord_daysInMonth, coord:tzOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_tzOffset, coord:current=org.apache.oozie.coord.CoordELFunctions#ph2_coord_current_echo, coord:currentRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_currentRange_echo, coord:offset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_offset_echo, coord:latest=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latest_echo, coord:latestRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latestRange_echo, coord:future=org.apache.oozie.coord.CoordELFunctions#ph2_coord_future_echo, coord:futureRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_futureRange_echo, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_formatTime, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_epochTime, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:absolute=org.apache.oozie.coord.CoordELFunctions#ph2_coord_absolute_echo, coord:absoluteRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_absolute_range, coord:endOfMonths=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfMonths_echo, coord:endOfWeeks=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfWeeks_echo, coord:endOfDays=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfDays_echo, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateOffset, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateTzOffset EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-action-create-inst EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-sla-create EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-sla-create MINUTES=org.apache.oozie.coord.CoordELConstants#SUBMIT_MINUTES, HOURS=org.apache.oozie.coord.CoordELConstants#SUBMIT_HOURS, DAYS=org.apache.oozie.coord.CoordELConstants#SUBMIT_DAYS EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-sla-create coord:dataOut=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dataOut, coord:nominalTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_nominalTime, coord:actualTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_actualTime, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateOffset, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateTzOffset, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_formatTime, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_epochTime, coord:actionId=org.apache.oozie.coord.CoordELFunctions#ph2_coord_actionId, coord:name=org.apache.oozie.coord.CoordELFunctions#ph2_coord_name, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:databaseOut=org.apache.oozie.coord.HCatELFunctions#ph3_coord_databaseOut, coord:tableOut=org.apache.oozie.coord.HCatELFunctions#ph3_coord_tableOut, coord:dataOutPartitions=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataOutPartitions, coord:dataOutPartitionValue=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataOutPartitionValue, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-sla-create EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-action-start EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-action-start EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-action-start coord:hoursInDay=org.apache.oozie.coord.CoordELFunctions#ph3_coord_hoursInDay, coord:daysInMonth=org.apache.oozie.coord.CoordELFunctions#ph3_coord_daysInMonth, coord:tzOffset=org.apache.oozie.coord.CoordELFunctions#ph3_coord_tzOffset, coord:latest=org.apache.oozie.coord.CoordELFunctions#ph3_coord_latest, coord:latestRange=org.apache.oozie.coord.CoordELFunctions#ph3_coord_latestRange, coord:future=org.apache.oozie.coord.CoordELFunctions#ph3_coord_future, coord:futureRange=org.apache.oozie.coord.CoordELFunctions#ph3_coord_futureRange, coord:dataIn=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dataIn, coord:dataOut=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dataOut, coord:nominalTime=org.apache.oozie.coord.CoordELFunctions#ph3_coord_nominalTime, coord:actualTime=org.apache.oozie.coord.CoordELFunctions#ph3_coord_actualTime, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dateOffset, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dateTzOffset, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph3_coord_formatTime, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph3_coord_epochTime, coord:actionId=org.apache.oozie.coord.CoordELFunctions#ph3_coord_actionId, coord:name=org.apache.oozie.coord.CoordELFunctions#ph3_coord_name, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:databaseIn=org.apache.oozie.coord.HCatELFunctions#ph3_coord_databaseIn, coord:databaseOut=org.apache.oozie.coord.HCatELFunctions#ph3_coord_databaseOut, coord:tableIn=org.apache.oozie.coord.HCatELFunctions#ph3_coord_tableIn, coord:tableOut=org.apache.oozie.coord.HCatELFunctions#ph3_coord_tableOut, coord:dataInPartitionFilter=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitionFilter, coord:dataInPartitionMin=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitionMin, coord:dataInPartitionMax=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitionMax, coord:dataInPartitions=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitions, coord:dataOutPartitions=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataOutPartitions, coord:dataOutPartitionValue=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataOutPartitionValue, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-action-start EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.latest-el.use-current-time false Determine whether to use the current time to determine the latest dependency or the action creation time. This is for backward compatibility with older oozie behaviour. oozie.service.UUIDService.generator counter random : generated UUIDs will be random strings. counter: generated UUIDs generated will be a counter postfixed with the system startup time. oozie.service.DBLiteWorkflowStoreService.status.metrics.collection.interval 5 Workflow Status metrics collection interval in minutes. oozie.service.DBLiteWorkflowStoreService.status.metrics.window 3600 Workflow Status metrics collection window in seconds. Workflow status will be instrumented for the window. oozie.db.schema.name oozie Oozie DataBase Name oozie.db.import.batch.size 1000 How many entities are imported in a single transaction by the Oozie DB import CLI tool to avoid OutOfMemoryErrors. oozie.service.JPAService.create.db.schema false Creates Oozie DB. If set to true, it creates the DB schema if it does not exist. If the DB schema exists is a NOP. If set to false, it does not create the DB schema. If the DB schema does not exist it fails start up. oozie.service.JPAService.validate.db.connection true Validates DB connections from the DB connection pool. If the 'oozie.service.JPAService.create.db.schema' property is set to true, this property is ignored. oozie.service.JPAService.validate.db.connection.eviction.interval 300000 Validates DB connections from the DB connection pool. When validate db connection 'TestWhileIdle' is true, the number of milliseconds to sleep between runs of the idle object evictor thread. oozie.service.JPAService.validate.db.connection.eviction.num 10 Validates DB connections from the DB connection pool. When validate db connection 'TestWhileIdle' is true, the number of objects to examine during each run of the idle object evictor thread. oozie.service.JPAService.connection.data.source org.apache.oozie.util.db.BasicDataSourceWrapper DataSource to be used for connection pooling. If you want the property openJpa.connectionProperties="DriverClassName=..." to have a real effect, set this to org.apache.oozie.util.db.BasicDataSourceWrapper. A DBCP bug (https://issues.apache.org/jira/browse/DBCP-333) prevents otherwise the JDBC driver setting to have a real effect while using custom class loader. oozie.service.JPAService.connection.properties DataSource connection properties. oozie.service.JPAService.jdbc.driver org.apache.derby.jdbc.EmbeddedDriver JDBC driver class. oozie.service.JPAService.jdbc.url jdbc:derby:${oozie.data.dir}/${oozie.db.schema.name}-db;create=true JDBC URL. oozie.service.JPAService.jdbc.username sa DB user name. oozie.service.JPAService.jdbc.password DB user password. IMPORTANT: if password is emtpy leave a 1 space string, the service trims the value, if empty Configuration assumes it is NULL. IMPORTANT: if the StoreServicePasswordService is active, it will reset this value with the value given in the console. oozie.service.JPAService.pool.max.active.conn 10 Max number of connections. oozie.service.JPAService.openjpa.BrokerImpl non-finalizing The default OpenJPAEntityManager implementation automatically closes itself during instance finalization. This guards against accidental resource leaks that may occur if a developer fails to explicitly close EntityManagers when finished with them, but it also incurs a scalability bottleneck, since the JVM must perform synchronization during instance creation, and since the finalizer thread will have more instances to monitor. To avoid this overhead, set the openjpa.BrokerImpl configuration property to non-finalizing. To use default implementation set it to empty space. oozie.service.JPAService.retry.initial-wait-time.ms 100 Initial wait time in milliseconds between the first failed database operation and the re-attempted operation. The wait time is doubled at each retry. oozie.service.JPAService.retry.maximum-wait-time.ms 30000 Maximum wait time between database retry attempts. oozie.service.JPAService.retry.max-retries 10 Maximum number of retries for a failed database operation. oozie.service.SchemaService.wf.schemas oozie-common-1.0.xsd, oozie-workflow-0.1.xsd,oozie-workflow-0.2.xsd,oozie-workflow-0.2.5.xsd,oozie-workflow-0.3.xsd,oozie-workflow-0.4.xsd, oozie-workflow-0.4.5.xsd,oozie-workflow-0.5.xsd,oozie-workflow-1.0.xsd, shell-action-0.1.xsd,shell-action-0.2.xsd,shell-action-0.3.xsd,shell-action-1.0.xsd, email-action-0.1.xsd,email-action-0.2.xsd, hive-action-0.2.xsd,hive-action-0.3.xsd,hive-action-0.4.xsd,hive-action-0.5.xsd,hive-action-0.6.xsd,hive-action-1.0.xsd, sqoop-action-0.2.xsd,sqoop-action-0.3.xsd,sqoop-action-0.4.xsd,sqoop-action-1.0.xsd, ssh-action-0.1.xsd,ssh-action-0.2.xsd, distcp-action-0.1.xsd,distcp-action-0.2.xsd,distcp-action-1.0.xsd, oozie-sla-0.1.xsd,oozie-sla-0.2.xsd, hive2-action-0.1.xsd,hive2-action-0.2.xsd,hive2-action-1.0.xsd, spark-action-0.1.xsd,spark-action-0.2.xsd,spark-action-1.0.xsd, git-action-1.0.xsd List of schemas for workflows (separated by commas). oozie.service.SchemaService.wf.ext.schemas List of additional schemas for workflows (separated by commas). oozie.service.SchemaService.coord.schemas oozie-coordinator-0.1.xsd,oozie-coordinator-0.2.xsd,oozie-coordinator-0.3.xsd,oozie-coordinator-0.4.xsd, oozie-coordinator-0.5.xsd,oozie-sla-0.1.xsd,oozie-sla-0.2.xsd List of schemas for coordinators (separated by commas). oozie.service.SchemaService.coord.ext.schemas List of additional schemas for coordinators (separated by commas). oozie.service.SchemaService.bundle.schemas oozie-bundle-0.1.xsd,oozie-bundle-0.2.xsd List of schemas for bundles (separated by commas). oozie.service.SchemaService.bundle.ext.schemas List of additional schemas for bundles (separated by commas). oozie.service.SchemaService.sla.schemas gms-oozie-sla-0.1.xsd,oozie-sla-0.2.xsd List of schemas for semantic validation for GMS SLA (separated by commas). oozie.service.SchemaService.sla.ext.schemas List of additional schemas for semantic validation for GMS SLA (separated by commas). oozie.service.CallbackService.base.url ${oozie.base.url}/callback Base callback URL used by ActionExecutors. oozie.service.CallbackService.early.requeue.max.retries 5 If Oozie receives a callback too early (while the action is in PREP state), it will requeue the command this many times to give the action time to transition to RUNNING. oozie.servlet.CallbackServlet.max.data.len 2048 Max size in characters for the action completion data output. oozie.external.stats.max.size -1 Max size in bytes for action stats. -1 means infinite value. oozie.JobCommand.job.console.url ${oozie.base.url}?job= Base console URL for a workflow job. oozie.service.ActionService.executor.classes org.apache.oozie.action.decision.DecisionActionExecutor, org.apache.oozie.action.hadoop.JavaActionExecutor, org.apache.oozie.action.hadoop.FsActionExecutor, org.apache.oozie.action.hadoop.MapReduceActionExecutor, org.apache.oozie.action.hadoop.PigActionExecutor, org.apache.oozie.action.hadoop.HiveActionExecutor, org.apache.oozie.action.hadoop.ShellActionExecutor, org.apache.oozie.action.hadoop.SqoopActionExecutor, org.apache.oozie.action.hadoop.DistcpActionExecutor, org.apache.oozie.action.hadoop.Hive2ActionExecutor, org.apache.oozie.action.ssh.SshActionExecutor, org.apache.oozie.action.oozie.SubWorkflowActionExecutor, org.apache.oozie.action.email.EmailActionExecutor, org.apache.oozie.action.hadoop.SparkActionExecutor, org.apache.oozie.action.hadoop.GitActionExecutor List of ActionExecutors classes (separated by commas). Only action types with associated executors can be used in workflows. oozie.service.ActionService.executor.ext.classes List of ActionExecutors extension classes (separated by commas). Only action types with associated executors can be used in workflows. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ActionCheckerService.action.check.interval 60 The frequency at which the ActionCheckService will run. oozie.service.ActionCheckerService.action.check.delay 600 The time, in seconds, between an ActionCheck for the same action. oozie.service.ActionCheckerService.callable.batch.size 10 This value determines the number of actions which will be batched together to be executed by a single thread. oozie.service.StatusTransitService.statusTransit.interval 60 The frequency in seconds at which the StatusTransitService will run. oozie.service.StatusTransitService.backward.support.for.coord.status false true, if coordinator job submits using 'uri:oozie:coordinator:0.1' and wants to keep Oozie 2.x status transit. if set true, 1. SUCCEEDED state in coordinator job means materialization done. 2. No DONEWITHERROR state in coordinator job 3. No PAUSED or PREPPAUSED state in coordinator job 4. PREPSUSPENDED becomes SUSPENDED in coordinator job oozie.service.StatusTransitService.backward.support.for.states.without.error true true, if you want to keep Oozie 3.2 status transit. Change it to false for Oozie 4.x releases. if set true, No states like RUNNINGWITHERROR, SUSPENDEDWITHERROR and PAUSEDWITHERROR for coordinator and bundle oozie.service.PauseTransitService.PauseTransit.interval 60 The frequency in seconds at which the PauseTransitService will run. oozie.action.max.output.data 2048 Max size in characters for output data. oozie.action.fs.glob.max 50000 Maximum number of globbed files. oozie.action.launcher.am.restart.kill.childjobs true Multiple instances of launcher jobs can happen due to RM non-work preserving recovery on RM restart, AM recovery due to crashes or AM network connectivity loss. This could also lead to orphaned child jobs of the old AM attempts leading to conflicting runs. This kills child jobs of previous attempts using YARN application tags. oozie.action.spark.setup.hadoop.conf.dir false Oozie action.xml (oozie.action.conf.xml) contains all the hadoop configuration and user provided configurations. This property will allow users to copy Oozie action.xml as hadoop *-site configurations files. The advantage is, user need not to manage these files into spark sharelib. If user wants to manage the hadoop configurations themselves, it should should disable it. oozie.action.shell.setup.hadoop.conf.dir false The Shell action is commonly used to run programs that rely on HADOOP_CONF_DIR (e.g. hive, beeline, sqoop, etc). With YARN, HADOO_CONF_DIR is set to the NodeManager's copies of Hadoop's *-site.xml files, which can be problematic because (a) they are for meant for the NM, not necessarily clients, and (b) they won't have any of the configs that Oozie, or the user through Oozie, sets. When this property is set to true, The Shell action will prepare the *-site.xml files based on the correct config and set HADOOP_CONF_DIR to point to it. Setting it to false will make Oozie leave HADOOP_CONF_DIR alone. This can also be set at the Action level by putting it in the Shell Action's configuration section, which also has priorty. That all said, it's recommended to use the appropriate action type when possible. oozie.action.shell.setup.hadoop.conf.dir.write.log4j.properties true Toggle to control if a log4j.properties file should be written into the configuration directory prepared when oozie.action.shell.setup.hadoop.conf.dir is enabled. This is used to control logging behavior of log4j using commands run within the shell action script, and to ensure logging does not impact output data capture if leaked to stdout. Content of the written file is determined by the value of oozie.action.shell.setup.hadoop.conf.dir.log4j.content. oozie.action.shell.setup.hadoop.conf.dir.log4j.content log4j.rootLogger=INFO,console log4j.appender.console=org.apache.log4j.ConsoleAppender log4j.appender.console.target=System.err log4j.appender.console.layout=org.apache.log4j.PatternLayout log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n The value to write into a log4j.properties file under the config directory created when oozie.action.shell.setup.hadoop.conf.dir and oozie.action.shell.setup.hadoop.conf.dir.write.log4j.properties properties are both enabled. The values must be properly newline separated and in format expected by Log4J. Trailing and preceding whitespaces will be trimmed when reading this property. This is used to control logging behavior of log4j using commands run within the shell action script. oozie.action.shell.max-print-size-kb 128 When an oozie shell action starts, the shell script will be printed. Scripts larger than the size configured here (in KiB) will not be printed. If this value is less than or equal to zero, the script will not be printed. oozie.action.launcher.yarn.timeline-service.enabled false Enables/disables getting delegation tokens for ATS for the launcher job in YARN/Hadoop 2.6 (no effect in Hadoop 1) for all action types by default if tez-site.xml is present in distributed cache. This can be overridden on a per-action basis by setting oozie.launcher.yarn.timeline-service.enabled in an action's configuration section in a workflow. oozie.action.pig.log.expandedscript true Log the expanded pig script in launcher stdout log oozie.action.rootlogger.log.level INFO Logging level for root logger oozie.action.retries.max 3 The number of retries for executing an action in case of failure oozie.action.retry.interval 10 The interval between retries of an action in case of failure oozie.action.retry.policy periodic Retry policy of an action in case of failure. Possible values are periodic/exponential oozie.action.ssh.delete.remote.tmp.dir true If set to true, it will delete temporary directory at the end of execution of ssh action. oozie.action.ssh.http.command curl Command to use for callback to oozie, normally is 'curl' or 'wget'. The command must available in PATH environment variable of the USER@HOST box shell. oozie.action.ssh.http.command.post.options --data-binary @#stdout --request POST --header "content-type:text/plain" The callback command POST options. Used when the ouptut of the ssh action is captured. oozie.action.ssh.allow.user.at.host true Specifies whether the user specified by the ssh action is allowed or is to be replaced by the Job user oozie.action.ssh.check.retries.max 3 Maximal retry count for ssh action status check oozie.action.ssh.check.initial.retry.wait.time 3000 init wait time that the first retry check needs to wait oozie.action.subworkflow.max.depth 50 The maximum depth for subworkflows. For example, if set to 3, then a workflow can start subwf1, which can start subwf2, which can start subwf3; but if subwf3 tries to start subwf4, then the action will fail. This is helpful in preventing errant workflows from starting infintely recursive subworkflows. oozie.service.HadoopAccessorService.kerberos.enabled false Indicates if Oozie is configured to use Kerberos. local.realm LOCALHOST Kerberos Realm used by Oozie and Hadoop. Using 'local.realm' to be aligned with Hadoop configuration oozie.service.HadoopAccessorService.keytab.file ${user.home}/oozie.keytab Location of the Oozie user keytab file. oozie.service.HadoopAccessorService.kerberos.principal ${user.name}/localhost@${local.realm} Kerberos principal for Oozie service. oozie.service.HadoopAccessorService.jobTracker.whitelist Whitelisted job tracker for Oozie service. oozie.service.HadoopAccessorService.nameNode.whitelist Whitelisted job tracker for Oozie service. oozie.service.HadoopAccessorService.hadoop.configurations *=hadoop-conf Comma separated AUTHORITY=HADOOP_CONF_DIR, where AUTHORITY is the HOST:PORT of the Hadoop service (JobTracker, YARN, HDFS). The wildcard '*' configuration is used when there is no exact match for an authority. The HADOOP_CONF_DIR contains the relevant Hadoop *-site.xml files. If the path is relative is looked within the Oozie configuration directory; though the path can be absolute (i.e. to point to Hadoop client conf/ directories in the local filesystem. oozie.service.HadoopAccessorService.action.configurations *=action-conf Comma separated AUTHORITY=ACTION_CONF_DIR, where AUTHORITY is the HOST:PORT of the Hadoop MapReduce service (JobTracker, YARN). The wildcard '*' configuration is used when there is no exact match for an authority. The ACTION_CONF_DIR may contain ACTION.xml files where ACTION is the action type ('java', 'map-reduce', 'pig', 'hive', 'sqoop', etc.). If the ACTION.xml file exists, its properties will be used as defaults properties for the action. If the path is relative is looked within the Oozie configuration directory; though the path can be absolute (i.e. to point to Hadoop client conf/ directories in the local filesystem. oozie.service.HadoopAccessorService.action.configurations.load.default.resources true true means that default and site xml files of hadoop (core-default, core-site, hdfs-default, hdfs-site, mapred-default, mapred-site, yarn-default, yarn-site) are parsed into actionConf on Oozie server. false means that site xml files are not loaded on server, instead loaded on launcher node. This is only done for pig and hive actions which handle loading those files automatically from the classpath on launcher task. It defaults to true. oozie.service.HadoopAccessorService.fs.s3a You can configure custom s3a file system properties globally. Value shall be a comma separated list of key=value pairs. For example: fs.s3a.fast.upload.buffer=bytebuffer,fs.s3a.impl.disable.cache=true Limitation: the custom file system properties cannot contain comma neither in key nor in value. oozie.credentials.credentialclasses A list of credential class mapping for CredentialsProvider oozie.credentials.skip false This determines if Oozie should skip getting credentials from the credential providers. This can be overwritten at a job-level or action-level. oozie.actions.main.classnames distcp=org.apache.hadoop.tools.DistCp A list of class name mapping for Action classes oozie.service.WorkflowAppService.system.libpath /user/${user.name}/share/lib System library path to use for workflow applications. This path is added to workflow application if their job properties sets the property 'oozie.use.system.libpath' to true. oozie.command.default.lock.timeout 5000 Default timeout (in milliseconds) for commands for acquiring an exclusive lock on an entity. oozie.command.default.requeue.delay 10000 Default time (in milliseconds) for commands that are requeued for delayed execution. oozie.service.LiteWorkflowStoreService.user.retry.max 3 Automatic retry max count for workflow action is 3 in default. oozie.service.LiteWorkflowStoreService.user.retry.inteval 10 Automatic retry interval for workflow action is in minutes and the default value is 10 minutes. oozie.service.LiteWorkflowStoreService.user.retry.policy periodic Automatic retry policy for workflow action. Possible values are periodic or exponential, periodic being the default. oozie.service.LiteWorkflowStoreService.user.retry.error.code JA008,JA009,JA017,JA018,JA019,FS009,FS008,FS014 Automatic retry interval for workflow action is handled for these specified error code: FS009, FS008 is file exists error when using chmod in fs action. FS014 is permission error in fs action JA018 is output directory exists error in workflow map-reduce action. JA019 is error while executing distcp action. JA017 is job not exists error in action executor. JA008 is FileNotFoundException in action executor. JA009 is IOException in action executor. ALL is the any kind of error in action executor. oozie.service.LiteWorkflowStoreService.user.retry.error.code.ext Automatic retry interval for workflow action is handled for these specified extra error code: ALL is the any kind of error in action executor. oozie.service.LiteWorkflowStoreService.node.def.version _oozie_inst_v_2 NodeDef default version, _oozie_inst_v_0, _oozie_inst_v_1 or _oozie_inst_v_2 oozie.authentication.type simple Defines authentication used for Oozie HTTP endpoint. Supported values are: simple | kerberos | #AUTHENTICATION_HANDLER_CLASSNAME# oozie.server.authentication.type ${oozie.authentication.type} Defines authentication used for Oozie server communicating to other Oozie server over HTTP(s). Supported values are: simple | kerberos | #AUTHENTICATOR_CLASSNAME# oozie.server.connection.timeout.seconds 180 Defines connection timeout used for Oozie server communicating to other Oozie server over HTTP(s). Default is 3 min. oozie.authentication.token.validity 36000 Indicates how long (in seconds) an authentication token is valid before it has to be renewed. oozie.authentication.cookie.domain The domain to use for the HTTP cookie that stores the authentication token. In order to authentiation to work correctly across multiple hosts the domain must be correctly set. oozie.authentication.simple.anonymous.allowed true Indicates if anonymous requests are allowed when using 'simple' authentication. oozie.authentication.kerberos.principal HTTP/localhost@${local.realm} Indicates the Kerberos principal to be used for HTTP endpoint. The principal MUST start with 'HTTP/' as per Kerberos HTTP SPNEGO specification. oozie.authentication.kerberos.keytab ${oozie.service.HadoopAccessorService.keytab.file} Location of the keytab file with the credentials for the principal. Referring to the same keytab file Oozie uses for its Kerberos credentials for Hadoop. oozie.authentication.kerberos.name.rules DEFAULT The kerberos names rules is to resolve kerberos principal names, refer to Hadoop's KerberosName for more details. oozie.coord.execution.none.tolerance 1 Default time tolerance in minutes after action nominal time for an action to be skipped when execution order is "NONE" oozie.coord.actions.default.length 1000 Default number of coordinator actions to be retrieved by the info command oozie.validate.ForkJoin true If true, fork and join should be validated at wf submission time. oozie.workflow.parallel.fork.action.start true Determines how Oozie processes starting of forked actions. If true, forked actions and their job submissions are done in parallel which is best for performance. If false, they are submitted sequentially. oozie.coord.action.get.all.attributes false Setting to true is not recommended as coord job/action info will bring all columns of the action in memory. Set it true only if backward compatibility for action/job info is required. oozie.service.HadoopAccessorService.supported.filesystems hdfs,hftp,webhdfs Enlist the different filesystems supported for federation. If wildcard "*" is specified, then ALL file schemes will be allowed. oozie.service.URIHandlerService.uri.handlers org.apache.oozie.dependency.FSURIHandler Enlist the different uri handlers supported for data availability checks. oozie.notification.url.connection.timeout 10000 Defines the timeout, in milliseconds, for Oozie HTTP notification callbacks. Oozie does HTTP notifications for workflow jobs which set the 'oozie.wf.action.notification.url', 'oozie.wf.worklfow.notification.url' and/or 'oozie.coord.action.notification.url' properties in their job.properties. Refer to section '5 Oozie Notifications' in the Workflow specification for details. oozie.hadoop-2.0.2-alpha.workaround.for.distributed.cache false Due to a bug in Hadoop 2.0.2-alpha, MAPREDUCE-4820, launcher jobs fail to set the distributed cache for the action job because the local JARs are implicitly included triggering a duplicate check. This flag removes the distributed cache files for the action as they'll be included from the local JARs of the JobClient (MRApps) submitting the action job from the launcher. oozie.service.EventHandlerService.filter.app.types workflow_job, coordinator_action The app-types among workflow/coordinator/bundle job/action for which for which events system is enabled. oozie.service.EventHandlerService.event.queue org.apache.oozie.event.MemoryEventQueue The implementation for EventQueue in use by the EventHandlerService. oozie.service.EventHandlerService.event.listeners org.apache.oozie.jms.JMSJobEventListener oozie.service.EventHandlerService.queue.size 10000 Maximum number of events to be contained in the event queue. oozie.service.EventHandlerService.worker.interval 30 The default interval (seconds) at which the worker threads will be scheduled to run and process events. oozie.service.EventHandlerService.batch.size 10 The batch size for batched draining per thread from the event queue. oozie.service.EventHandlerService.worker.threads 3 Number of worker threads to be scheduled to run and process events. oozie.sla.service.SLAService.capacity 5000 Maximum number of sla records to be contained in the memory structure. oozie.sla.service.SLAService.alert.events END_MISS Default types of SLA events for being alerted of. oozie.sla.service.SLAService.calculator.impl org.apache.oozie.sla.SLACalculatorMemory The implementation for SLACalculator in use by the SLAService. oozie.sla.service.SLAService.job.event.latency 90000 Time in milliseconds to account of latency of getting the job status event to compare against and decide sla miss/met oozie.sla.service.SLAService.check.interval 30 Time interval, in seconds, at which SLA Worker will be scheduled to run oozie.sla.disable.alerts.older.than 48 Time threshold, in HOURS, for disabling SLA alerting for jobs whose nominal time is older than this. oozie.sla.service.SLAService.maximum.retry.count 3 Number of times an SLA calculator status will be tried to get updated when any database related error occurs. It's possible that multiple WorkflowJobBean / CoordActionBean instances being inserted won't have SLACalcStatus entries inside SLACalculatorMemory#slaMap by the time written to database, and thus, no SLA will be tracked. In those rare cases, preconfigured maximum retry count can be extended. oozie.zookeeper.connection.string localhost:2181 Comma-separated values of host:port pairs of the ZooKeeper servers. oozie.zookeeper.namespace oozie The namespace to use. All of the Oozie Servers that are planning on talking to each other should have the same namespace. oozie.zookeeper.connection.timeout 180 Default ZK connection timeout (in sec). oozie.zookeeper.session.timeout 300 Default ZK session timeout (in sec). If connection is lost even after retry, then Oozie server will shutdown itself if oozie.zookeeper.server.shutdown.ontimeout is true. oozie.zookeeper.max.retries 10 Maximum number of times to retry. oozie.zookeeper.server.shutdown.ontimeout true If true, Oozie server will shutdown itself on ZK connection timeout. oozie.service.ZKLocksService.lock.release.retry.time.limit.minutes 30 On exception while releasing the lock, Oozie will exponentially retry till specified minutes before giving up. oozie.http.hostname 0.0.0.0 Oozie server host name. The network interface Oozie server binds to as an IP address or a hostname. Most users won't need to change this setting from the default value. oozie.http.port 11000 Oozie server port. oozie.http.request.header.size 65536 Oozie HTTP request header size. oozie.http.response.header.size 65536 Oozie HTTP response header size. oozie.https.port 11443 Oozie ssl server port. oozie.https.enabled false Controls whether SSL encryption is enabled. oozie.https.truststore.file Path to a TrustStore file. oozie.https.keystore.file Path to a KeyStore file. oozie.https.keystore.pass Password to the KeyStore. oozie.https.include.protocols TLSv1.1,TLSv1.2,TLSv1.3 Enabled TLS protocols. oozie.https.exclude.protocols Disabled TLS protocols. oozie.https.include.cipher.suites List of Cipher suites to include. oozie.https.exclude.cipher.suites TLS_ECDHE_RSA_WITH_RC4_128_SHA,SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,SSL_RSA_WITH_DES_CBC_SHA,SSL_DHE_RSA_WITH_DES_CBC_SHA,SSL_RSA_EXPORT_WITH_RC4_40_MD5,SSL_RSA_EXPORT_WITH_DES40_CBC_SHA,SSL_RSA_WITH_RC4_128_MD5 List of weak Cipher suites to exclude. oozie.hsts.max.age.seconds 31536000 Strict Transport Security max age in seconds if SSL is enabled. Ideally it is set to one year (31536000 sec). oozie.jsp.tmp.dir /tmp Temporary directory for compiling JSP pages. oozie.server.threadpool.max.threads 150 Controls the threadpool size for the Oozie Server (if using embbedded Jetty) oozie.service.ShareLibService.mapping.file Sharelib mapping files contains list of key=value, where key will be the sharelib name for the action and value is a comma separated list of DFS or local filesystem directories or jar files. Example. oozie.pig_10=hdfs:///share/lib/pig/pig-0.10.1/lib/ oozie.pig=hdfs:///share/lib/pig/pig-0.11.1/lib/ oozie.distcp=hdfs:///share/lib/hadoop-2.2.0/share/hadoop/tools/lib/hadoop-distcp-2.2.0.jar oozie.hive=file:///usr/local/oozie/share/lib/hive/ oozie.service.ShareLibService.fail.fast.on.startup false Fails server starup if sharelib initilzation fails. oozie.service.ShareLibService.purge.interval 1 How often, in days, Oozie should check for old ShareLibs and LauncherLibs to purge from HDFS. oozie.service.ShareLibService.temp.sharelib.retention.days 7 ShareLib retention time in days. oozie.action.ship.launcher.jar false Specifies whether launcher jar is shipped or not. oozie.action.jobinfo.enable false JobInfo will contain information of bundle, coordinator, workflow and actions. If enabled, hadoop job will have property(oozie.job.info) which value is multiple key/value pair separated by ",". This information can be used for analytics like how many oozie jobs are submitted for a particular period, what is the total number of failed pig jobs, etc from mapreduce job history logs and configuration. User can also add custom workflow property to jobinfo by adding property which prefix with "oozie.job.info." Eg. oozie.job.info="bundle.id=,bundle.name=,coord.name=,coord.nominal.time=,coord.name=,wf.id=, wf.name=,action.name=,action.type=,launcher=true" oozie.service.XLogStreamingService.max.log.scan.duration -1 Max log scan duration in hours. If log scan request end_date - start_date > value, then exception is thrown to reduce the scan duration. -1 indicate no limit. oozie.service.XLogStreamingService.actionlist.max.log.scan.duration -1 Max log scan duration in hours for coordinator job when list of actions are specified. If log streaming request end_date - start_date > value, then exception is thrown to reduce the scan duration. -1 indicate no limit. This setting is separate from max.log.scan.duration as we want to allow higher durations when actions are specified. oozie.service.JvmPauseMonitorService.warn-threshold.ms 10000 The JvmPauseMonitorService runs a thread that repeatedly tries to detect when the JVM pauses, which could indicate that the JVM or host machine is overloaded or other problems. This thread sleeps for 500ms; if it sleeps for significantly longer, then there is likely a problem. This property specifies the threadshold for when Oozie should log a WARN level message; there is also a counter named "jvm.pause.warn-threshold". oozie.service.JvmPauseMonitorService.info-threshold.ms 1000 The JvmPauseMonitorService runs a thread that repeatedly tries to detect when the JVM pauses, which could indicate that the JVM or host machine is overloaded or other problems. This thread sleeps for 500ms; if it sleeps for significantly longer, then there is likely a problem. This property specifies the threadshold for when Oozie should log an INFO level message; there is also a counter named "jvm.pause.info-threshold". oozie.service.ZKLocksService.locks.reaper.threshold 300 The frequency at which the ChildReaper will run. Duration should be in sec. Default is 5 min. oozie.service.ZKLocksService.locks.reaper.threads 2 Number of fixed threads used by ChildReaper to delete empty locks. oozie.service.AbandonedCoordCheckerService.check.interval 1440 Interval, in minutes, at which AbandonedCoordCheckerService should run. oozie.service.AbandonedCoordCheckerService.check.delay 60 Delay, in minutes, at which AbandonedCoordCheckerService should run. oozie.service.AbandonedCoordCheckerService.failure.limit 25 Failure limit. A job is considered to be abandoned/faulty if total number of actions in failed/timedout/suspended >= "Failure limit" and there are no succeeded action. oozie.service.AbandonedCoordCheckerService.kill.jobs false If true, AbandonedCoordCheckerService will kill abandoned coords. oozie.service.AbandonedCoordCheckerService.job.older.than 2880 In minutes, job will be considered as abandoned/faulty if job is older than this value. oozie.notification.proxy System level proxy setting for job notifications. oozie.wf.rerun.disablechild false By setting this option, workflow rerun will be disabled if parent workflow or coordinator exist and it will only rerun through parent. oozie.use.system.libpath false Default value of oozie.use.system.libpath. If user haven't specified =oozie.use.system.libpath= in the job.properties and this value is true and Oozie will include sharelib jars for workflow. oozie.service.PauseTransitService.callable.batch.size 10 This value determines the number of callable which will be batched together to be executed by a single thread. oozie.configuration.substitute.depth 20 This value determines the depth of substitution in configurations. If set -1, No limitation on substitution. oozie.service.SparkConfigurationService.spark.configurations *=spark-conf Comma separated AUTHORITY=SPARK_CONF_DIR, where AUTHORITY is the HOST:PORT of the ResourceManager of a YARN cluster. The wildcard '*' configuration is used when there is no exact match for an authority. The SPARK_CONF_DIR contains the relevant spark-defaults.conf properties file. If the path is relative is looked within the Oozie configuration directory; though the path can be absolute. This is only used when the Spark master is set to either "yarn-client" or "yarn-cluster". oozie.service.SparkConfigurationService.spark.configurations.blacklist spark.yarn.jar,spark.yarn.jars Comma separated list of properties to ignore from any Spark configurations specified in oozie.service.SparkConfigurationService.spark.configurations property. oozie.service.SparkConfigurationService.spark.configurations.ignore.spark.yarn.jar true Deprecated. Use oozie.service.SparkConfigurationService.spark.configurations.blacklist instead. If true, Oozie will ignore the "spark.yarn.jar" property from any Spark configurations specified in oozie.service.SparkConfigurationService.spark.configurations. If false, Oozie will not ignore it. It is recommended to leave this as true because it can interfere with the jars in the Spark sharelib. oozie.email.attachment.enabled true This value determines whether to support email attachment of a file on HDFS. Set it false if there is any security concern. oozie.email.smtp.host localhost The host where the email action may find the SMTP server. oozie.email.smtp.port 25 The port to connect to for the SMTP server, for email actions. oozie.email.smtp.auth false Boolean property that toggles if authentication is to be done or not when using email actions. oozie.email.smtp.starttls.enable false Boolean property that toggles if use TLS in communication or not. oozie.email.smtp.username If authentication is enabled for email actions, the username to login as (to the SMTP server). oozie.email.smtp.password If authentication is enabled for email actions, the password to login with (to the SMTP server). oozie.email.from.address oozie@localhost The from address to be used for mailing all emails done via the email action. oozie.email.smtp.socket.timeout.ms 10000 The timeout to apply over all SMTP server socket operations done during the email action. oozie.actions.default.name-node The default value to use for the <name-node> element in applicable action types. This value will be used when neither the action itself nor the global section specifies a <name-node>. As expected, it should be of the form "hdfs://HOST:PORT". oozie.actions.default.job-tracker The default value to use for the <job-tracker> element in applicable action types. This value will be used when neither the action itself nor the global section specifies a <job-tracker>. As expected, it should be of the form "HOST:PORT". oozie.actions.default.resource-manager The default value to use for the <resource-manager> element in applicable action types. This value will be used when neither the action itself nor the global section specifies a <resource-managerr>. As expected, it should be of the form "HOST:PORT". If both oozie.actions.default.job-tracker and oozie.actions.default.resource-manager are specified, oozie.actions.default.resource-manager takes precedence. oozie.service.SchemaCheckerService.check.interval 168 This is the interval at which Oozie will check the database schema, in hours. A zero or negative value will disable the checker. oozie.service.SchemaCheckerService.ignore.extras false When set to false, the schema checker will consider extra (unused) tables, columns, and indexes to be incorrect. When set to true, these will be ignored. oozie.hcat.uri.regex.pattern ([a-z]+://[\w\.\-]+:\d+[,]*)+/\w+/\w+/?[\w+=;\-]* Regex pattern for HCat URIs. The regex can be modified by users as per requirement for parsing/splitting the HCat URIs. oozie.action.null.args.allowed true When set to true, empty arguments (like <arg></arg>) will be passed as "null" to the main method of a given action. That is, the args[] array will contain "null" elements. When set to false, then "nulls" are removed. oozie.javax.xml.parsers.DocumentBuilderFactory org.apache.xerces.jaxp.DocumentBuilderFactoryImpl Oozie will set the javax.xml.parsers.DocumentBuilderFactory Java System Property to this value. This helps speed up XML handling because the JVM doesn't have to search for the proper class every time. An empty or whitespace value skips setting the System Property. The default implementation that Oozie uses is Xerces. Most users should not have to change this. oozie.graphviz.timeout.seconds 60 The default number of seconds Graphviz graph generation will timeout. oozie.launcher.default.vcores 1 The default number of vcores that are allocated for the Launcher AMs oozie.launcher.default.memory.mb 2048 The default amount of memory in MBs that is allocated for the Launcher AMs oozie.launcher.default.priority 0 The default YARN priority of the Launcher AM oozie.launcher.default.queue default The default YARN queue where the Launcher AM is placed oozie.launcher.default.max.attempts 2 The default YARN maximal attempt count of the Launcher AM oozie.launcher.override true Whether oozie.launcher.override.* and oozie.launcher.prepend.* parameters have to be considered when submitting a YARN LauncherAM. That is, existing MapReduce v1, MapReduce v2, or YARN parameters used in the action configuration should be populated to the Application Master launcher configuration, or not. Generally, first <launcher/> tag specific user settings, then YARN configuration settings, then MapReduce v2, and at last, MapReduce v1 properties are copied to launcher configuration. oozie.launcher.override.max.attempts mapreduce.map.maxattempts,mapred.map.max.attempts A comma separated list of MapReduce v1 and MapReduce v2 properties to override the max attempts of the MapReduce Application Master. The first one that is found will be used. oozie.launcher.override.memory.mb yarn.app.mapreduce.am.resource.mb,mapreduce.map.memory.mb,mapred.job.map.memory.mb A comma separated list of MapReduce v1, MapReduce v2, and YARN properties to override the memory amount in MB of the MapReduce Application Master. The first one that is found will be used. oozie.launcher.override.vcores yarn.app.mapreduce.am.resource.cpu-vcores,mapreduce.map.cpu.vcores A comma separated list of MapReduce v1, MapReduce v2, and YARN properties to override the CPU vcore count of the MapReduce Application Master. The first one that is found will be used. oozie.launcher.override.log.level mapreduce.map.log.level,mapred.map.child.log.level A comma separated list of MapReduce v1, MapReduce v2, and YARN properties to override the logging level of the MapReduce Application Master. The first one that is found will be used. oozie.launcher.override.javaopts yarn.app.mapreduce.am.command-opts,mapreduce.map.java.opts,mapred.child.java.opts A comma separated list of MapReduce v1, MapReduce v2, and YARN properties to override MapReduce Application Master JVM options. The first one that is found will be used. oozie.launcher.prepend.javaopts yarn.app.mapreduce.am.admin-command-opts A comma separated list of YARN properties to prepend to MapReduce Application Master JVM options. The first one that is found will be prepended to the list of JVM options. oozie.launcher.override.env yarn.app.mapreduce.am.env,mapreduce.map.env,mapred.child.env A comma separated list of MapReduce v1, MapReduce v2, and YARN properties to override MapReduce Application Master environment variable settings. The first one that is found will be used. oozie.launcher.prepend.env yarn.app.mapreduce.am.admin.user.env A comma separated list of YARN properties to prepend to MapReduce Application Master environment settings. The first one that is found will be prepended to the list of environment settings. oozie.launcher.override.priority mapreduce.job.priority,mapred.job.priority A comma separated list of MapReduce v1 and MapReduce v2 to override MapReduce Application Master job priority. The first one that is found will be used. oozie.launcher.override.queue mapreduce.job.queuename,mapred.job.queue.name A comma separated list of MapReduce v1 and MapReduce v2 properties to override MapReduce Application Master job queue name. The first one that is found will be used. oozie.launcher.override.view.acl mapreduce.job.acl-view-job A comma separated list of MapReduce v1 and MapReduce v2 properties to override MapReduce View ACL settings. The first one that is found will be used. oozie.launcher.override.modify.acl mapreduce.job.acl-modify-job A comma separated list of MapReduce v1 and MapReduce v2 properties to override MapReduce Modify ACL settings. The first one that is found will be used. oozie.action.mapreduce.needed.for.distcp true Whether to add MapReduce jars to the DistCp action's classpath's by default. oozie.action.mapreduce.needed.for.hive true Whether to add MapReduce jars to the Hive action's classpath's by default. oozie.action.mapreduce.needed.for.hive2 true Whether to add MapReduce jars to the Hive2 action's classpath's by default. oozie.action.mapreduce.needed.for.java true Whether to add MapReduce jars to the Java action's classpath's by default. oozie.action.mapreduce.needed.for.map-reduce true Whether to add MapReduce jars to the Map-Reduce action's classpath's by default. oozie.action.mapreduce.needed.for.pig true Whether to add MapReduce jars to the Pig action's classpath's by default. oozie.action.mapreduce.needed.for.sqoop true Whether to add MapReduce jars to the Sqoop action's classpath's by default. oozie.action.sqoop.shellsplitter false Whether to use shell splitter instead of the space-based tokenizer during sqoop command splitting. oozie.fluent-job-api.generated.path /user/${user.name}/oozie-fluent-job-api-generated HDFS path to store workflow / coordinator / bundle definitions generated by fluent-job-api artifact. The XML files are first generated out of the fluent-job-api JARs submitted by the user at command line, then stored under this HDFS folder structure for later retrieval / resubmit / check. Note that the submitting user needs r/w permissions under this HDFS folder. Note further that this folder structure, when does not exist, will be created. ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/missing-info/yarn-site.xml ================================================ yarn.acl.enable true yarn.admin.acl * yarn.resourcemanager.address cdh62n2.pentaho yarn.resourcemanager.admin.address cdh62n2.pentaho.net:8033 yarn.resourcemanager.scheduler.address cdh62n2.pentaho.net:8030 yarn.resourcemanager.resource-tracker.address cdh62n2.pentaho.net:8031 yarn.resourcemanager.webapp.address cdh62n2.pentaho.net:8088 yarn.resourcemanager.webapp.https.address cdh62n2.pentaho.net:8090 yarn.resourcemanager.client.thread-count 50 yarn.resourcemanager.scheduler.client.thread-count 50 yarn.resourcemanager.admin.client.thread-count 1 yarn.scheduler.minimum-allocation-mb 1024 yarn.scheduler.increment-allocation-mb 512 yarn.scheduler.maximum-allocation-mb 2273 yarn.scheduler.minimum-allocation-vcores 1 yarn.scheduler.increment-allocation-vcores 1 yarn.scheduler.maximum-allocation-vcores 8 yarn.resourcemanager.amliveliness-monitor.interval-ms 1000 yarn.am.liveness-monitor.expiry-interval-ms 600000 yarn.resourcemanager.am.max-attempts 2 yarn.resourcemanager.container.liveness-monitor.interval-ms 600000 yarn.resourcemanager.nm.liveness-monitor.interval-ms 1000 yarn.nm.liveness-monitor.expiry-interval-ms 600000 yarn.resourcemanager.resource-tracker.client.thread-count 50 yarn.application.classpath $HADOOP_CLIENT_CONF_DIR,$HADOOP_COMMON_HOME/*,$HADOOP_COMMON_HOME/lib/*,$HADOOP_HDFS_HOME/*,$HADOOP_HDFS_HOME/lib/*,$HADOOP_YARN_HOME/*,$HADOOP_YARN_HOME/lib/* yarn.resourcemanager.scheduler.class org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler yarn.scheduler.capacity.resource-calculator org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator yarn.resourcemanager.max-completed-applications 10000 yarn.nodemanager.remote-app-log-dir /tmp/logs yarn.nodemanager.remote-app-log-dir-suffix logs ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/secured/core-site.xml ================================================ fs.defaultFS hdfs://CDH62Secure fs.trash.interval 1 io.compression.codecs org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec,org.apache.hadoop.io.compress.Lz4Codec hadoop.security.authentication kerberos hadoop.security.authorization true hadoop.rpc.protection privacy hadoop.security.key.provider.path kms://https@cdh62secn1.pentaho.net:16000/kms hadoop.security.auth_to_local DEFAULT hadoop.proxyuser.oozie.hosts * hadoop.proxyuser.oozie.groups * hadoop.proxyuser.flume.hosts * hadoop.proxyuser.flume.groups * hadoop.proxyuser.HTTP.hosts * hadoop.proxyuser.HTTP.groups * hadoop.proxyuser.hive.hosts * hadoop.proxyuser.hive.groups * hadoop.proxyuser.hue.hosts * hadoop.proxyuser.hue.groups * hadoop.proxyuser.httpfs.hosts * hadoop.proxyuser.httpfs.groups * hadoop.proxyuser.hdfs.groups * hadoop.proxyuser.hdfs.hosts * hadoop.proxyuser.yarn.hosts * hadoop.proxyuser.yarn.groups * hadoop.security.group.mapping org.apache.hadoop.security.ShellBasedUnixGroupsMapping hadoop.security.instrumentation.requires.admin false net.topology.script.file.name /etc/hadoop/conf.cloudera.yarn/topology.py io.file.buffer.size 65536 hadoop.ssl.enabled true hadoop.ssl.require.client.cert false true hadoop.ssl.keystores.factory.class org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory true hadoop.ssl.server.conf ssl-server.xml true hadoop.ssl.client.conf ssl-client.xml true ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/secured/hive-site.xml ================================================ hive.metastore.uris thrift://cdh62secn3.pentaho.net:9083 hive.metastore.client.socket.timeout 300 hive.metastore.warehouse.dir /user/hive/warehouse hive.warehouse.subdir.inherit.perms true hive.auto.convert.join true hive.auto.convert.join.noconditionaltask.size 20971520 hive.optimize.bucketmapjoin.sortedmerge false hive.smbjoin.cache.rows 10000 hive.server2.logging.operation.enabled true hive.server2.logging.operation.log.location /var/log/hive/operation_logs mapred.reduce.tasks -1 hive.exec.reducers.bytes.per.reducer 67108864 hive.exec.copyfile.maxsize 104857600 hive.exec.reducers.max 1099 hive.vectorized.groupby.checkinterval 4096 hive.vectorized.groupby.flush.percent 0.1 hive.compute.query.using.stats false hive.vectorized.execution.enabled true hive.vectorized.execution.reduce.enabled true hive.vectorized.use.vectorized.input.format true hive.vectorized.use.checked.expressions true hive.vectorized.use.vector.serde.deserialize false hive.vectorized.adaptor.usage.mode chosen hive.vectorized.input.format.excludes org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat hive.merge.mapfiles true hive.merge.mapredfiles false hive.cbo.enable false hive.fetch.task.conversion minimal hive.fetch.task.conversion.threshold 268435456 hive.limit.pushdown.memory.usage 0.1 hive.merge.sparkfiles true hive.merge.smallfiles.avgsize 16777216 hive.merge.size.per.task 268435456 hive.optimize.reducededuplication true hive.optimize.reducededuplication.min.reducer 4 hive.map.aggr true hive.map.aggr.hash.percentmemory 0.5 hive.optimize.sort.dynamic.partition false hive.execution.engine mr spark.executor.memory 743781171b spark.driver.memory 966367641b spark.executor.cores 4 spark.yarn.driver.memoryOverhead 102m spark.yarn.executor.memoryOverhead 125m spark.dynamicAllocation.enabled true spark.dynamicAllocation.initialExecutors 1 spark.dynamicAllocation.minExecutors 1 spark.dynamicAllocation.maxExecutors 2147483647 hive.metastore.execute.setugi true hive.support.concurrency true hive.zookeeper.quorum cdh62secn2.pentaho.net,cdh62secn1.pentaho.net,cdh62secn3.pentaho.net hive.zookeeper.client.port 2181 hive.zookeeper.namespace hive_zookeeper_namespace_hive hive.cluster.delegation.token.store.class org.apache.hadoop.hive.thrift.MemoryTokenStore hive.server2.enable.doAs false hive.metastore.sasl.enabled true hive.server2.authentication kerberos hive.metastore.kerberos.principal hive/_HOST@PENTAHO.NET hive.server2.authentication.kerberos.principal hive/_HOST@PENTAHO.NET hive.server2.use.SSL true spark.shuffle.service.enabled true hive.strict.checks.orderby.no.limit false hive.strict.checks.no.partition.filter false hive.strict.checks.type.safety true hive.strict.checks.cartesian.product false hive.strict.checks.bucketing true ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/secured/yarn-site.xml ================================================ yarn.acl.enable true yarn.admin.acl * yarn.resourcemanager.ha.enabled true yarn.resourcemanager.ha.automatic-failover.enabled true yarn.resourcemanager.ha.automatic-failover.embedded true yarn.resourcemanager.recovery.enabled true yarn.resourcemanager.zk-address cdh62secn2.pentaho.net:2181,cdh62secn1.pentaho.net:2181,cdh62secn3.pentaho.net:2181 yarn.resourcemanager.store.class org.apache.hadoop.yarn.server.resourcemanager.recovery.ZKRMStateStore yarn.client.failover-sleep-base-ms 100 yarn.client.failover-sleep-max-ms 2000 yarn.resourcemanager.cluster-id yarnRM yarn.resourcemanager.address.rm218 cdh62secn1.pentaho.net:8032 yarn.resourcemanager.scheduler.address.rm218 cdh62secn1.pentaho.net:8030 yarn.resourcemanager.resource-tracker.address.rm218 cdh62secn1.pentaho.net:8031 yarn.resourcemanager.admin.address.rm218 cdh62secn1.pentaho.net:8033 yarn.resourcemanager.webapp.address.rm218 cdh62secn1.pentaho.net:8088 yarn.resourcemanager.webapp.https.address.rm218 cdh62secn1.pentaho.net:8090 yarn.resourcemanager.address.rm104 cdh62secn2.pentaho.net:8032 yarn.resourcemanager.scheduler.address.rm104 cdh62secn2.pentaho.net:8030 yarn.resourcemanager.resource-tracker.address.rm104 cdh62secn2.pentaho.net:8031 yarn.resourcemanager.admin.address.rm104 cdh62secn2.pentaho.net:8033 yarn.resourcemanager.webapp.address.rm104 cdh62secn2.pentaho.net:8088 yarn.resourcemanager.webapp.https.address.rm104 cdh62secn2.pentaho.net:8090 yarn.resourcemanager.ha.rm-ids rm218,rm104 yarn.resourcemanager.client.thread-count 50 yarn.resourcemanager.scheduler.client.thread-count 50 yarn.resourcemanager.admin.client.thread-count 1 yarn.scheduler.minimum-allocation-mb 1024 yarn.scheduler.increment-allocation-mb 512 yarn.scheduler.maximum-allocation-mb 16384 yarn.scheduler.minimum-allocation-vcores 1 yarn.scheduler.increment-allocation-vcores 1 yarn.scheduler.maximum-allocation-vcores 8 yarn.resourcemanager.amliveliness-monitor.interval-ms 1000 yarn.am.liveness-monitor.expiry-interval-ms 600000 yarn.resourcemanager.am.max-attempts 2 yarn.resourcemanager.container.liveness-monitor.interval-ms 600000 yarn.resourcemanager.nm.liveness-monitor.interval-ms 1000 yarn.nm.liveness-monitor.expiry-interval-ms 600000 yarn.resourcemanager.resource-tracker.client.thread-count 50 yarn.application.classpath $HADOOP_CLIENT_CONF_DIR,$HADOOP_COMMON_HOME/*,$HADOOP_COMMON_HOME/lib/*,$HADOOP_HDFS_HOME/*,$HADOOP_HDFS_HOME/lib/*,$HADOOP_YARN_HOME/*,$HADOOP_YARN_HOME/lib/* yarn.resourcemanager.scheduler.class org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler yarn.scheduler.capacity.resource-calculator org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator yarn.resourcemanager.max-completed-applications 10000 yarn.nodemanager.remote-app-log-dir /tmp/logs yarn.nodemanager.remote-app-log-dir-suffix logs yarn.resourcemanager.principal yarn/_HOST@PENTAHO.NET yarn.http.policy HTTPS_ONLY ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/unsecured/core-site.xml ================================================ fs.defaultFS hdfs://svqxbdcn6cdh514un4.pentahoqa.com:8020 fs.trash.interval 1 io.compression.codecs org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec,org.apache.hadoop.io.compress.Lz4Codec hadoop.security.authentication simple hadoop.security.authorization false hadoop.rpc.protection authentication hadoop.security.auth_to_local DEFAULT hadoop.proxyuser.oozie.hosts * hadoop.proxyuser.oozie.groups * hadoop.proxyuser.mapred.hosts * hadoop.proxyuser.mapred.groups * hadoop.proxyuser.flume.hosts * hadoop.proxyuser.flume.groups * hadoop.proxyuser.HTTP.hosts * hadoop.proxyuser.HTTP.groups * hadoop.proxyuser.hive.hosts * hadoop.proxyuser.hive.groups * hadoop.proxyuser.hue.hosts * hadoop.proxyuser.hue.groups * hadoop.proxyuser.httpfs.hosts * hadoop.proxyuser.httpfs.groups * hadoop.proxyuser.hdfs.groups * hadoop.proxyuser.hdfs.hosts * hadoop.proxyuser.yarn.hosts * hadoop.proxyuser.yarn.groups * hadoop.security.group.mapping org.apache.hadoop.security.ShellBasedUnixGroupsMapping hadoop.security.instrumentation.requires.admin false net.topology.script.file.name /etc/hadoop/conf.cloudera.yarn/topology.py io.file.buffer.size 65536 hadoop.ssl.enabled false hadoop.ssl.require.client.cert false true hadoop.ssl.keystores.factory.class org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory true hadoop.ssl.server.conf ssl-server.xml true hadoop.ssl.client.conf ssl-client.xml true ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/unsecured/hive-site.xml ================================================ hive.metastore.uris thrift://svqxbdcn6cdh514un4.pentahoqa.com:9083 hive.metastore.client.socket.timeout 300 hive.metastore.warehouse.dir /user/hive/warehouse hive.warehouse.subdir.inherit.perms true hive.auto.convert.join true hive.auto.convert.join.noconditionaltask.size 20971520 hive.optimize.bucketmapjoin.sortedmerge false hive.smbjoin.cache.rows 10000 hive.server2.logging.operation.enabled true hive.server2.logging.operation.log.location /var/log/hive/operation_logs mapred.reduce.tasks -1 hive.exec.reducers.bytes.per.reducer 67108864 hive.exec.copyfile.maxsize 104857600 hive.exec.reducers.max 1099 hive.vectorized.groupby.checkinterval 4096 hive.vectorized.groupby.flush.percent 0.1 hive.compute.query.using.stats false hive.vectorized.execution.enabled true hive.vectorized.execution.reduce.enabled false hive.merge.mapfiles true hive.merge.mapredfiles false hive.cbo.enable false hive.fetch.task.conversion minimal hive.fetch.task.conversion.threshold 268435456 hive.limit.pushdown.memory.usage 0.1 hive.merge.sparkfiles true hive.merge.smallfiles.avgsize 16777216 hive.merge.size.per.task 268435456 hive.optimize.reducededuplication true hive.optimize.reducededuplication.min.reducer 4 hive.map.aggr true hive.map.aggr.hash.percentmemory 0.5 hive.optimize.sort.dynamic.partition false hive.execution.engine mr spark.executor.memory 1828926259 spark.driver.memory 966367641 spark.executor.cores 4 spark.yarn.driver.memoryOverhead 102 spark.yarn.executor.memoryOverhead 307 spark.dynamicAllocation.enabled true spark.dynamicAllocation.initialExecutors 1 spark.dynamicAllocation.minExecutors 1 spark.dynamicAllocation.maxExecutors 2147483647 hive.metastore.execute.setugi true hive.support.concurrency true hive.zookeeper.quorum svqxbdcn6cdh514un1.pentahoqa.com,svqxbdcn6cdh514un5.pentahoqa.com,svqxbdcn6cdh514un4.pentahoqa.com,svqxbdcn6cdh514un2.pentahoqa.com,svqxbdcn6cdh514un3.pentahoqa.com hive.zookeeper.client.port 2181 hive.zookeeper.namespace hive_zookeeper_namespace_hive hbase.zookeeper.quorum svqxbdcn6cdh514un1.pentahoqa.com,svqxbdcn6cdh514un5.pentahoqa.com,svqxbdcn6cdh514un4.pentahoqa.com,svqxbdcn6cdh514un2.pentahoqa.com,svqxbdcn6cdh514un3.pentahoqa.com hbase.zookeeper.property.clientPort 2181 hive.cluster.delegation.token.store.class org.apache.hadoop.hive.thrift.MemoryTokenStore hive.server2.enable.doAs true hive.server2.use.SSL false spark.shuffle.service.enabled true ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/unsecured/oozie-default.xml ================================================ oozie.output.compression.codec gz The name of the compression codec to use. The implementation class for the codec needs to be specified through another property oozie.compression.codecs. You can specify a comma separated list of 'Codec_name'='Codec_class' for oozie.compression.codecs where codec class implements the interface org.apache.oozie.compression.CompressionCodec. If oozie.compression.codecs is not specified, gz codec implementation is used by default. oozie.external_monitoring.enable false If the oozie functional metrics needs to be exposed to the metrics-server backend, set it to true If set to true, the following properties has to be specified : oozie.metrics.server.name, oozie.metrics.host, oozie.metrics.prefix, oozie.metrics.report.interval.sec, oozie.metrics.port oozie.external_monitoring.type graphite The name of the server to which we want to send the metrics, would be graphite or ganglia. oozie.external_monitoring.address http://localhost:2020 oozie.external_monitoring.metricPrefix oozie oozie.external_monitoring.reporterIntervalSecs 60 oozie.jmx_monitoring.enable false If the oozie functional metrics needs to be exposed via JMX interface, set it to true. oozie.action.mapreduce.uber.jar.enable false If true, enables the oozie.mapreduce.uber.jar mapreduce workflow configuration property, which is used to specify an uber jar in HDFS. Submitting a workflow with an uber jar requires at least Hadoop 2.2.0 or 1.2.0. If false, workflows which specify the oozie.mapreduce.uber.jar configuration property will fail. oozie.action.dependency.deduplicate false If true, then Oozie will remove all the duplicates from the list of dependencies when they are passed to the jobtracker. Higher priority dependencies will remain as the following: Original list: "/a/a.jar#a.jar,/a/b.jar#b.jar,/b/a.jar,/b/b.jar,/c/d.jar" Deduplicated list: "/a/a.jar#a.jar,/a/b.jar#b.jar,/c/d.jar" With other words, priority order is: action jar > user workflow libs > action libs > system lib, where dependency with greater prio is used. oozie.processing.timezone UTC Oozie server timezone. Valid values are UTC and GMT(+/-)####, for example 'GMT+0530' would be India timezone. All dates parsed and genered dates by Oozie Coordinator/Bundle will be done in the specified timezone. The default value of 'UTC' should not be changed under normal circumtances. If for any reason is changed, note that GMT(+/-)#### timezones do not observe DST changes. oozie.base.url http://localhost:8080/oozie Base Oozie URL. oozie.system.id oozie-${user.name} The Oozie system ID. oozie.systemmode NORMAL System mode for Oozie at startup. oozie.delete.runtime.dir.on.shutdown true If the runtime directory should be kept after Oozie shutdowns down. oozie.services org.apache.oozie.service.SchedulerService, org.apache.oozie.service.MetricsInstrumentationService, org.apache.oozie.service.MemoryLocksService, org.apache.oozie.service.UUIDService, org.apache.oozie.service.ELService, org.apache.oozie.service.AuthorizationService, org.apache.oozie.service.UserGroupInformationService, org.apache.oozie.service.HadoopAccessorService, org.apache.oozie.service.JobsConcurrencyService, org.apache.oozie.service.URIHandlerService, org.apache.oozie.service.DagXLogInfoService, org.apache.oozie.service.SchemaService, org.apache.oozie.service.LiteWorkflowAppService, org.apache.oozie.service.JPAService, org.apache.oozie.service.StoreService, org.apache.oozie.service.DBLiteWorkflowStoreService, org.apache.oozie.service.CallbackService, org.apache.oozie.service.ActionService, org.apache.oozie.service.ShareLibService, org.apache.oozie.service.CallableQueueService, org.apache.oozie.service.ActionCheckerService, org.apache.oozie.service.RecoveryService, org.apache.oozie.service.PurgeService, org.apache.oozie.service.CoordinatorEngineService, org.apache.oozie.service.BundleEngineService, org.apache.oozie.service.DagEngineService, org.apache.oozie.service.CoordMaterializeTriggerService, org.apache.oozie.service.StatusTransitService, org.apache.oozie.service.PauseTransitService, org.apache.oozie.service.GroupsService, org.apache.oozie.service.ProxyUserService, org.apache.oozie.service.XLogStreamingService, org.apache.oozie.service.JvmPauseMonitorService, org.apache.oozie.service.SparkConfigurationService, org.apache.oozie.service.SchemaCheckerService All services to be created and managed by Oozie Services singleton. Class names must be separated by commas. oozie.services.ext To add/replace services defined in 'oozie.services' with custom implementations. Class names must be separated by commas. oozie.service.XLogStreamingService.buffer.len 4096 4K buffer for streaming the logs progressively oozie.service.XLogStreamingService.error.buffer.len 2048 2K buffer for streaming the error logs progressively oozie.service.XLogStreamingService.audit.buffer.len 3 Number of lines for streaming the audit logs progressively oozie.service.HCatAccessorService.jmsconnections default=java.naming.factory.initial#org.apache.activemq.jndi.ActiveMQInitialContextFactory;java.naming.provider.url#tcp://localhost:61616;connectionFactoryNames#ConnectionFactory Specify the map of endpoints to JMS configuration properties. In general, endpoint identifies the HCatalog server URL. "default" is used if no endpoint is mentioned in the query. If some JMS property is not defined, the system will use the property defined jndi.properties. jndi.properties files is retrieved from the application classpath. Mapping rules can also be provided for mapping Hcatalog servers to corresponding JMS providers. hcat://${1}.${2}.server.com:8020=java.naming.factory.initial#Dummy.Factory;java.naming.provider.url#tcp://broker.${2}:61616 oozie.service.HCatAccessorService.jms.use.canonical.hostname false The JMS messages published from a HCat server usually contains the canonical hostname of the HCat server in standalone mode or the canonical name of the VIP in a case of multiple nodes in a HA setup. This setting is used to translate the HCat server hostname or its aliases specified by the user in the HCat URIs of the coordinator dependencies to its canonical name so that they can be exactly matched with the JMS dependency availability notifications. oozie.service.JMSTopicService.topic.name default=${username} Topic options are ${username} or ${jobId} or a fixed string which can be specified as default or for a particular job type. For e.g To have a fixed string topic for workflows, coordinators and bundles, specify in the following comma-separated format: {jobtype1}={some_string1}, {jobtype2}={some_string2} where job type can be WORKFLOW, COORDINATOR or BUNDLE. e.g. Following defines topic for workflow job, workflow action, coordinator job, coordinator action, bundle job and bundle action WORKFLOW=workflow, COORDINATOR=coordinator, BUNDLE=bundle For jobs with no defined topic, default topic will be ${username} oozie.jms.producer.connection.properties java.naming.factory.initial#org.apache.activemq.jndi.ActiveMQInitialContextFactory;java.naming.provider.url#tcp://localhost:61616;connectionFactoryNames#ConnectionFactory oozie.service.JMSAccessorService.connectioncontext.impl org.apache.oozie.jms.DefaultConnectionContext Specifies the Connection Context implementation oozie.service.ConfigurationService.ignore.system.properties oozie.service.AuthorizationService.security.enabled Specifies "oozie.*" properties to cannot be overriden via Java system properties. Property names must be separted by commas. oozie.service.ConfigurationService.verify.available.properties true Specifies whether the available configurations check is enabled or not. oozie.service.SchedulerService.threads 10 The number of threads to be used by the SchedulerService to run deamon tasks. If maxed out, scheduled daemon tasks will be queued up and delayed until threads become available. oozie.service.AuthorizationService.authorization.enabled false Specifies whether security (user name/admin role) is enabled or not. If disabled any user can manage Oozie system and manage any job. oozie.service.AuthorizationService.default.group.as.acl false Enables old behavior where the User's default group is the job's ACL. oozie.serviceAuthorizationService.admin.users Comma separated list of users with admin access for the Authorization service. oozie.service.AuthorizationService.system.info.authorized.users Comma separated list of users authorized for web service calls to get system configuration. oozie.service.InstrumentationService.logging.interval 60 Interval, in seconds, at which instrumentation should be logged by the InstrumentationService. If set to 0 it will not log instrumentation data. oozie.service.PurgeService.older.than 30 Completed workflow jobs older than this value, in days, will be purged by the PurgeService. oozie.service.PurgeService.coord.older.than 7 Completed coordinator jobs older than this value, in days, will be purged by the PurgeService. oozie.service.PurgeService.bundle.older.than 7 Completed bundle jobs older than this value, in days, will be purged by the PurgeService. oozie.service.PurgeService.purge.old.coord.action false Whether to purge completed workflows and their corresponding coordinator actions of long running coordinator jobs if the completed workflow jobs are older than the value specified in oozie.service.PurgeService.older.than. oozie.service.PurgeService.purge.limit 100 Batch size of individual DB operations used for building the list of items to be purged and performing actual purge. oozie.service.PurgeService.purge.interval 3600 Interval at which the purge service will run, in seconds. oozie.service.PurgeService.enable.command.line true Enable/Disable oozie admin purge command. By default, it is enabled. oozie.service.RecoveryService.wf.actions.older.than 120 Age of the actions which are eligible to be queued for recovery, in seconds. oozie.service.RecoveryService.wf.actions.created.time.interval 7 Created time period of the actions which are eligible to be queued for recovery in days. oozie.service.RecoveryService.callable.batch.size 10 This value determines the number of callable which will be batched together to be executed by a single thread. oozie.service.RecoveryService.push.dependency.interval 200 This value determines the delay for push missing dependency command queueing in Recovery Service oozie.service.RecoveryService.interval 60 Interval at which the RecoverService will run, in seconds. oozie.service.RecoveryService.coord.older.than 600 Age of the Coordinator jobs or actions which are eligible to be queued for recovery, in seconds. oozie.service.RecoveryService.bundle.older.than 600 Age of the Bundle jobs which are eligible to be queued for recovery, in seconds. oozie.service.CallableQueueService.queue.size 10000 Max callable queue size oozie.service.CallableQueueService.threads 10 Number of threads used for executing callables oozie.service.CallableQueueService.delayedcallable.threads 1 The number of threads where delayed tasks are executed. Upon expiration, the tasks are immediately inserted into the main queue to properly handle priorities. This means that no actual business logic is executed in this thread pool, so under normal circumstances, this value can be set to a low number. Note that this property is completely unrelated to oozie.service.SchedulerService.threads which tells how many scheduled background tasks can run in parallel at the same time (like PurgeService, StatusTransitService, etc). oozie.service.CallableQueueService.queue.newImpl true If set to true, then CallableQueueService will use a faster, less CPU-intensive queuing mechanism to execute asynchronous tasks internally. The old implementation generates noticeable CPU load even if Oozie is completely idle, especially when oozie.service.CallableQueueService.threads is set to a large number. The previous queuing mechanism is kept as a fallback option. This is an experimental feature in Oozie 5.1.0 that needs to be re-evaluated upon an upcoming minor release, meaning the old implementation and this feature flag will also be removed. oozie.service.CallableQueueService.queue.awaitTermination.timeout.seconds 30 Number of seconds while awaiting termination of ThreadPoolExecutor instances when CallableQueueService#destroy() is called, in seconds. The more elements you tend to have in your callable queue, the more you want CallableQueueService to wait before shutting down its thread pools. oozie.service.CallableQueueService.callable.concurrency 3 Maximum concurrency for a given callable type. Each command is a callable type (submit, start, run, signal, job, jobs, suspend,resume, etc). Each action type is a callable type (Map-Reduce, Pig, SSH, FS, sub-workflow, etc). All commands that use action executors (action-start, action-end, action-kill and action-check) use the action type as the callable type. oozie.service.CallableQueueService.callable.next.eligible true If true, when a callable in the queue has already reached max concurrency, Oozie continuously find next one which has not yet reach max concurrency. oozie.service.CallableQueueService.InterruptMapMaxSize 500 Maximum Size of the Interrupt Map, the interrupt element will not be inserted in the map if exceeded the size. oozie.service.CallableQueueService.InterruptTypes kill,resume,suspend,bundle_kill,bundle_resume,bundle_suspend,coord_kill,coord_change,coord_resume,coord_suspend Getting the types of XCommands that are considered to be of Interrupt type oozie.service.CoordMaterializeTriggerService.lookup.interval 300 Coordinator Job Lookup interval.(in seconds). oozie.service.CoordMaterializeTriggerService.materialization.window 3600 Coordinator Job Lookup command materialized each job for this next "window" duration oozie.service.CoordMaterializeTriggerService.callable.batch.size 10 This value determines the number of callable which will be batched together to be executed by a single thread. oozie.service.CoordMaterializeTriggerService.materialization.system.limit 50 This value determines the number of coordinator jobs to be materialized at a given time. oozie.service.coord.normal.default.timeout 120 Default timeout for a coordinator action input check (in minutes) for normal job. -1 means infinite timeout oozie.service.coord.default.max.timeout 86400 Default maximum timeout for a coordinator action input check (in minutes). 86400= 60days oozie.service.coord.input.check.requeue.interval 60000 Command re-queue interval for coordinator data input check (in millisecond). oozie.service.coord.input.check.requeue.interval.additional.delay 0 This value (in seconds) will be added into oozie.service.coord.input.check.requeue.interval and resulting value will be the requeue interval for the actions which are waiting for a long time without any input. oozie.service.coord.push.check.requeue.interval 600000 Command re-queue interval for push dependencies (in millisecond). oozie.service.coord.default.concurrency 1 Default concurrency for a coordinator job to determine how many maximum action should be executed at the same time. -1 means infinite concurrency. oozie.service.coord.default.throttle 12 Default throttle for a coordinator job to determine how many maximum action should be in WAITING state at the same time. oozie.service.coord.materialization.throttling.factor 0.05 Determine how many maximum actions should be in WAITING state for a single job at any time. The value is calculated by this factor X the total queue size. oozie.service.coord.check.maximum.frequency true When true, Oozie will reject any coordinators with a frequency faster than 5 minutes. It is not recommended to disable this check or submit coordinators with frequencies faster than 5 minutes: doing so can cause unintended behavior and additional system stress. oozie.service.ELService.groups job-submit,workflow,wf-sla-submit,coord-job-submit-freq,coord-job-submit-nofuncs,coord-job-submit-data,coord-job-submit-instances,coord-sla-submit,coord-action-create,coord-action-create-inst,coord-sla-create,coord-action-start,coord-job-wait-timeout,bundle-submit,coord-job-submit-initial-instance List of groups for different ELServices oozie.service.ELService.constants.job-submit EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.functions.job-submit EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.constants.job-submit EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions without having to include all the built in ones. oozie.service.ELService.ext.functions.job-submit EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions without having to include all the built in ones. oozie.service.ELService.constants.workflow KB=org.apache.oozie.util.ELConstantsFunctions#KB, MB=org.apache.oozie.util.ELConstantsFunctions#MB, GB=org.apache.oozie.util.ELConstantsFunctions#GB, TB=org.apache.oozie.util.ELConstantsFunctions#TB, PB=org.apache.oozie.util.ELConstantsFunctions#PB, RECORDS=org.apache.oozie.action.hadoop.HadoopELFunctions#RECORDS, MAP_IN=org.apache.oozie.action.hadoop.HadoopELFunctions#MAP_IN, MAP_OUT=org.apache.oozie.action.hadoop.HadoopELFunctions#MAP_OUT, REDUCE_IN=org.apache.oozie.action.hadoop.HadoopELFunctions#REDUCE_IN, REDUCE_OUT=org.apache.oozie.action.hadoop.HadoopELFunctions#REDUCE_OUT, GROUPS=org.apache.oozie.action.hadoop.HadoopELFunctions#GROUPS EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.workflow EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.workflow firstNotNull=org.apache.oozie.util.ELConstantsFunctions#firstNotNull, concat=org.apache.oozie.util.ELConstantsFunctions#concat, replaceAll=org.apache.oozie.util.ELConstantsFunctions#replaceAll, appendAll=org.apache.oozie.util.ELConstantsFunctions#appendAll, trim=org.apache.oozie.util.ELConstantsFunctions#trim, timestamp=org.apache.oozie.util.ELConstantsFunctions#timestamp, urlEncode=org.apache.oozie.util.ELConstantsFunctions#urlEncode, toJsonStr=org.apache.oozie.util.ELConstantsFunctions#toJsonStr, toPropertiesStr=org.apache.oozie.util.ELConstantsFunctions#toPropertiesStr, toConfigurationStr=org.apache.oozie.util.ELConstantsFunctions#toConfigurationStr, wf:id=org.apache.oozie.DagELFunctions#wf_id, wf:name=org.apache.oozie.DagELFunctions#wf_name, wf:appPath=org.apache.oozie.DagELFunctions#wf_appPath, wf:conf=org.apache.oozie.DagELFunctions#wf_conf, wf:user=org.apache.oozie.DagELFunctions#wf_user, wf:group=org.apache.oozie.DagELFunctions#wf_group, wf:callback=org.apache.oozie.DagELFunctions#wf_callback, wf:transition=org.apache.oozie.DagELFunctions#wf_transition, wf:lastErrorNode=org.apache.oozie.DagELFunctions#wf_lastErrorNode, wf:errorCode=org.apache.oozie.DagELFunctions#wf_errorCode, wf:errorMessage=org.apache.oozie.DagELFunctions#wf_errorMessage, wf:run=org.apache.oozie.DagELFunctions#wf_run, wf:actionData=org.apache.oozie.DagELFunctions#wf_actionData, wf:actionExternalId=org.apache.oozie.DagELFunctions#wf_actionExternalId, wf:actionTrackerUri=org.apache.oozie.DagELFunctions#wf_actionTrackerUri, wf:actionExternalStatus=org.apache.oozie.DagELFunctions#wf_actionExternalStatus, hadoop:counters=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_counters, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf, fs:exists=org.apache.oozie.action.hadoop.FsELFunctions#fs_exists, fs:isDir=org.apache.oozie.action.hadoop.FsELFunctions#fs_isDir, fs:dirSize=org.apache.oozie.action.hadoop.FsELFunctions#fs_dirSize, fs:fileSize=org.apache.oozie.action.hadoop.FsELFunctions#fs_fileSize, fs:blockSize=org.apache.oozie.action.hadoop.FsELFunctions#fs_blockSize, hcat:exists=org.apache.oozie.coord.HCatELFunctions#hcat_exists EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.WorkflowAppService.WorkflowDefinitionMaxLength 100000 The maximum length of the workflow definition in bytes An error will be reported if the length exceeds the given maximum oozie.service.ELService.ext.functions.workflow EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.wf-sla-submit MINUTES=org.apache.oozie.util.ELConstantsFunctions#SUBMIT_MINUTES, HOURS=org.apache.oozie.util.ELConstantsFunctions#SUBMIT_HOURS, DAYS=org.apache.oozie.util.ELConstantsFunctions#SUBMIT_DAYS EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.wf-sla-submit EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.wf-sla-submit EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.wf-sla-submit EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. l oozie.service.ELService.constants.coord-job-submit-freq EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-job-submit-freq EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-job-submit-freq coord:days=org.apache.oozie.coord.CoordELFunctions#ph1_coord_days, coord:months=org.apache.oozie.coord.CoordELFunctions#ph1_coord_months, coord:hours=org.apache.oozie.coord.CoordELFunctions#ph1_coord_hours, coord:minutes=org.apache.oozie.coord.CoordELFunctions#ph1_coord_minutes, coord:endOfDays=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfDays, coord:endOfMonths=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfMonths, coord:endOfWeeks=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfWeeks, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.functions.coord-job-submit-initial-instance ${oozie.service.ELService.functions.coord-job-submit-nofuncs}, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateOffset, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateTzOffset EL functions for coord job submit initial instance, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-job-submit-freq EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-job-wait-timeout EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.constants.coord-job-wait-timeout EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions without having to include all the built in ones. oozie.service.ELService.functions.coord-job-wait-timeout coord:days=org.apache.oozie.coord.CoordELFunctions#ph1_coord_days, coord:months=org.apache.oozie.coord.CoordELFunctions#ph1_coord_months, coord:hours=org.apache.oozie.coord.CoordELFunctions#ph1_coord_hours, coord:minutes=org.apache.oozie.coord.CoordELFunctions#ph1_coord_minutes, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-job-wait-timeout EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions without having to include all the built in ones. oozie.service.ELService.constants.coord-job-submit-nofuncs MINUTE=org.apache.oozie.coord.CoordELConstants#SUBMIT_MINUTE, HOUR=org.apache.oozie.coord.CoordELConstants#SUBMIT_HOUR, DAY=org.apache.oozie.coord.CoordELConstants#SUBMIT_DAY, MONTH=org.apache.oozie.coord.CoordELConstants#SUBMIT_MONTH, YEAR=org.apache.oozie.coord.CoordELConstants#SUBMIT_YEAR EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-job-submit-nofuncs EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-job-submit-nofuncs coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-job-submit-nofuncs EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-job-submit-instances EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-job-submit-instances EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-job-submit-instances coord:hoursInDay=org.apache.oozie.coord.CoordELFunctions#ph1_coord_hoursInDay_echo, coord:daysInMonth=org.apache.oozie.coord.CoordELFunctions#ph1_coord_daysInMonth_echo, coord:tzOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_tzOffset_echo, coord:current=org.apache.oozie.coord.CoordELFunctions#ph1_coord_current_echo, coord:currentRange=org.apache.oozie.coord.CoordELFunctions#ph1_coord_currentRange_echo, coord:offset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_offset_echo, coord:latest=org.apache.oozie.coord.CoordELFunctions#ph1_coord_latest_echo, coord:latestRange=org.apache.oozie.coord.CoordELFunctions#ph1_coord_latestRange_echo, coord:future=org.apache.oozie.coord.CoordELFunctions#ph1_coord_future_echo, coord:futureRange=org.apache.oozie.coord.CoordELFunctions#ph1_coord_futureRange_echo, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_epochTime_echo, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:absolute=org.apache.oozie.coord.CoordELFunctions#ph1_coord_absolute_echo, coord:endOfMonths=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfMonths_echo, coord:endOfWeeks=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfWeeks_echo, coord:endOfDays=org.apache.oozie.coord.CoordELFunctions#ph1_coord_endOfDays_echo, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateOffset_echo, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateTzOffset_echo EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-job-submit-instances EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-job-submit-data EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-job-submit-data EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-job-submit-data coord:dataIn=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dataIn_echo, coord:dataOut=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dataOut_echo, coord:nominalTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_nominalTime_echo_wrap, coord:actualTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_actualTime_echo_wrap, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateOffset_echo, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateTzOffset_echo, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_epochTime_echo, coord:actionId=org.apache.oozie.coord.CoordELFunctions#ph1_coord_actionId_echo, coord:name=org.apache.oozie.coord.CoordELFunctions#ph1_coord_name_echo, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:databaseIn=org.apache.oozie.coord.HCatELFunctions#ph1_coord_databaseIn_echo, coord:databaseOut=org.apache.oozie.coord.HCatELFunctions#ph1_coord_databaseOut_echo, coord:tableIn=org.apache.oozie.coord.HCatELFunctions#ph1_coord_tableIn_echo, coord:tableOut=org.apache.oozie.coord.HCatELFunctions#ph1_coord_tableOut_echo, coord:dataInPartitionFilter=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitionFilter_echo, coord:dataInPartitionMin=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitionMin_echo, coord:dataInPartitionMax=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitionMax_echo, coord:dataInPartitions=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitions_echo, coord:dataOutPartitions=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataOutPartitions_echo, coord:dataOutPartitionValue=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataOutPartitionValue_echo, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-job-submit-data EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-sla-submit MINUTES=org.apache.oozie.coord.CoordELConstants#SUBMIT_MINUTES, HOURS=org.apache.oozie.coord.CoordELConstants#SUBMIT_HOURS, DAYS=org.apache.oozie.coord.CoordELConstants#SUBMIT_DAYS EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-sla-submit EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.bundle-submit bundle:conf=org.apache.oozie.bundle.BundleELFunctions#bundle_conf oozie.service.ELService.functions.coord-sla-submit coord:dataOut=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dataOut_echo, coord:nominalTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_nominalTime_echo_fixed, coord:actualTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_actualTime_echo_wrap, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateOffset_echo, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateTzOffset_echo, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_epochTime_echo, coord:actionId=org.apache.oozie.coord.CoordELFunctions#ph1_coord_actionId_echo, coord:name=org.apache.oozie.coord.CoordELFunctions#ph1_coord_name_echo, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:databaseOut=org.apache.oozie.coord.HCatELFunctions#ph1_coord_databaseOut_echo, coord:tableOut=org.apache.oozie.coord.HCatELFunctions#ph1_coord_tableOut_echo, coord:dataOutPartitions=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataOutPartitions_echo, coord:dataOutPartitionValue=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataOutPartitionValue_echo, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-sla-submit EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-action-create EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-action-create EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-action-create coord:hoursInDay=org.apache.oozie.coord.CoordELFunctions#ph2_coord_hoursInDay, coord:daysInMonth=org.apache.oozie.coord.CoordELFunctions#ph2_coord_daysInMonth, coord:tzOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_tzOffset, coord:current=org.apache.oozie.coord.CoordELFunctions#ph2_coord_current, coord:currentRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_currentRange, coord:offset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_offset, coord:latest=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latest_echo, coord:latestRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latestRange_echo, coord:future=org.apache.oozie.coord.CoordELFunctions#ph2_coord_future_echo, coord:futureRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_futureRange_echo, coord:actionId=org.apache.oozie.coord.CoordELFunctions#ph2_coord_actionId, coord:name=org.apache.oozie.coord.CoordELFunctions#ph2_coord_name, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_formatTime, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_epochTime, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:absolute=org.apache.oozie.coord.CoordELFunctions#ph2_coord_absolute_echo, coord:endOfMonths=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfMonths_echo, coord:endOfWeeks=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfWeeks_echo, coord:endOfDays=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfDays_echo, coord:absoluteRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_absolute_range, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-action-create EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-action-create-inst EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-action-create-inst EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-action-create-inst coord:hoursInDay=org.apache.oozie.coord.CoordELFunctions#ph2_coord_hoursInDay, coord:daysInMonth=org.apache.oozie.coord.CoordELFunctions#ph2_coord_daysInMonth, coord:tzOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_tzOffset, coord:current=org.apache.oozie.coord.CoordELFunctions#ph2_coord_current_echo, coord:currentRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_currentRange_echo, coord:offset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_offset_echo, coord:latest=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latest_echo, coord:latestRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latestRange_echo, coord:future=org.apache.oozie.coord.CoordELFunctions#ph2_coord_future_echo, coord:futureRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_futureRange_echo, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_formatTime, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_epochTime, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:absolute=org.apache.oozie.coord.CoordELFunctions#ph2_coord_absolute_echo, coord:absoluteRange=org.apache.oozie.coord.CoordELFunctions#ph2_coord_absolute_range, coord:endOfMonths=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfMonths_echo, coord:endOfWeeks=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfWeeks_echo, coord:endOfDays=org.apache.oozie.coord.CoordELFunctions#ph2_coord_endOfDays_echo, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateOffset, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateTzOffset EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-action-create-inst EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-sla-create EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-sla-create MINUTES=org.apache.oozie.coord.CoordELConstants#SUBMIT_MINUTES, HOURS=org.apache.oozie.coord.CoordELConstants#SUBMIT_HOURS, DAYS=org.apache.oozie.coord.CoordELConstants#SUBMIT_DAYS EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-sla-create coord:dataOut=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dataOut, coord:nominalTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_nominalTime, coord:actualTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_actualTime, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateOffset, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph2_coord_dateTzOffset, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_formatTime, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_epochTime, coord:actionId=org.apache.oozie.coord.CoordELFunctions#ph2_coord_actionId, coord:name=org.apache.oozie.coord.CoordELFunctions#ph2_coord_name, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:databaseOut=org.apache.oozie.coord.HCatELFunctions#ph3_coord_databaseOut, coord:tableOut=org.apache.oozie.coord.HCatELFunctions#ph3_coord_tableOut, coord:dataOutPartitions=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataOutPartitions, coord:dataOutPartitionValue=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataOutPartitionValue, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-sla-create EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.constants.coord-action-start EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. oozie.service.ELService.ext.constants.coord-action-start EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.functions.coord-action-start coord:hoursInDay=org.apache.oozie.coord.CoordELFunctions#ph3_coord_hoursInDay, coord:daysInMonth=org.apache.oozie.coord.CoordELFunctions#ph3_coord_daysInMonth, coord:tzOffset=org.apache.oozie.coord.CoordELFunctions#ph3_coord_tzOffset, coord:latest=org.apache.oozie.coord.CoordELFunctions#ph3_coord_latest, coord:latestRange=org.apache.oozie.coord.CoordELFunctions#ph3_coord_latestRange, coord:future=org.apache.oozie.coord.CoordELFunctions#ph3_coord_future, coord:futureRange=org.apache.oozie.coord.CoordELFunctions#ph3_coord_futureRange, coord:dataIn=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dataIn, coord:dataOut=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dataOut, coord:nominalTime=org.apache.oozie.coord.CoordELFunctions#ph3_coord_nominalTime, coord:actualTime=org.apache.oozie.coord.CoordELFunctions#ph3_coord_actualTime, coord:dateOffset=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dateOffset, coord:dateTzOffset=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dateTzOffset, coord:formatTime=org.apache.oozie.coord.CoordELFunctions#ph3_coord_formatTime, coord:epochTime=org.apache.oozie.coord.CoordELFunctions#ph3_coord_epochTime, coord:actionId=org.apache.oozie.coord.CoordELFunctions#ph3_coord_actionId, coord:name=org.apache.oozie.coord.CoordELFunctions#ph3_coord_name, coord:conf=org.apache.oozie.coord.CoordELFunctions#coord_conf, coord:user=org.apache.oozie.coord.CoordELFunctions#coord_user, coord:databaseIn=org.apache.oozie.coord.HCatELFunctions#ph3_coord_databaseIn, coord:databaseOut=org.apache.oozie.coord.HCatELFunctions#ph3_coord_databaseOut, coord:tableIn=org.apache.oozie.coord.HCatELFunctions#ph3_coord_tableIn, coord:tableOut=org.apache.oozie.coord.HCatELFunctions#ph3_coord_tableOut, coord:dataInPartitionFilter=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitionFilter, coord:dataInPartitionMin=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitionMin, coord:dataInPartitionMax=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitionMax, coord:dataInPartitions=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitions, coord:dataOutPartitions=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataOutPartitions, coord:dataOutPartitionValue=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataOutPartitionValue, hadoop:conf=org.apache.oozie.action.hadoop.HadoopELFunctions#hadoop_conf EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. oozie.service.ELService.ext.functions.coord-action-start EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ELService.latest-el.use-current-time false Determine whether to use the current time to determine the latest dependency or the action creation time. This is for backward compatibility with older oozie behaviour. oozie.service.UUIDService.generator counter random : generated UUIDs will be random strings. counter: generated UUIDs generated will be a counter postfixed with the system startup time. oozie.service.DBLiteWorkflowStoreService.status.metrics.collection.interval 5 Workflow Status metrics collection interval in minutes. oozie.service.DBLiteWorkflowStoreService.status.metrics.window 3600 Workflow Status metrics collection window in seconds. Workflow status will be instrumented for the window. oozie.db.schema.name oozie Oozie DataBase Name oozie.db.import.batch.size 1000 How many entities are imported in a single transaction by the Oozie DB import CLI tool to avoid OutOfMemoryErrors. oozie.service.JPAService.create.db.schema false Creates Oozie DB. If set to true, it creates the DB schema if it does not exist. If the DB schema exists is a NOP. If set to false, it does not create the DB schema. If the DB schema does not exist it fails start up. oozie.service.JPAService.validate.db.connection true Validates DB connections from the DB connection pool. If the 'oozie.service.JPAService.create.db.schema' property is set to true, this property is ignored. oozie.service.JPAService.validate.db.connection.eviction.interval 300000 Validates DB connections from the DB connection pool. When validate db connection 'TestWhileIdle' is true, the number of milliseconds to sleep between runs of the idle object evictor thread. oozie.service.JPAService.validate.db.connection.eviction.num 10 Validates DB connections from the DB connection pool. When validate db connection 'TestWhileIdle' is true, the number of objects to examine during each run of the idle object evictor thread. oozie.service.JPAService.connection.data.source org.apache.oozie.util.db.BasicDataSourceWrapper DataSource to be used for connection pooling. If you want the property openJpa.connectionProperties="DriverClassName=..." to have a real effect, set this to org.apache.oozie.util.db.BasicDataSourceWrapper. A DBCP bug (https://issues.apache.org/jira/browse/DBCP-333) prevents otherwise the JDBC driver setting to have a real effect while using custom class loader. oozie.service.JPAService.connection.properties DataSource connection properties. oozie.service.JPAService.jdbc.driver org.apache.derby.jdbc.EmbeddedDriver JDBC driver class. oozie.service.JPAService.jdbc.url jdbc:derby:${oozie.data.dir}/${oozie.db.schema.name}-db;create=true JDBC URL. oozie.service.JPAService.jdbc.username sa DB user name. oozie.service.JPAService.jdbc.password DB user password. IMPORTANT: if password is emtpy leave a 1 space string, the service trims the value, if empty Configuration assumes it is NULL. IMPORTANT: if the StoreServicePasswordService is active, it will reset this value with the value given in the console. oozie.service.JPAService.pool.max.active.conn 10 Max number of connections. oozie.service.JPAService.openjpa.BrokerImpl non-finalizing The default OpenJPAEntityManager implementation automatically closes itself during instance finalization. This guards against accidental resource leaks that may occur if a developer fails to explicitly close EntityManagers when finished with them, but it also incurs a scalability bottleneck, since the JVM must perform synchronization during instance creation, and since the finalizer thread will have more instances to monitor. To avoid this overhead, set the openjpa.BrokerImpl configuration property to non-finalizing. To use default implementation set it to empty space. oozie.service.JPAService.retry.initial-wait-time.ms 100 Initial wait time in milliseconds between the first failed database operation and the re-attempted operation. The wait time is doubled at each retry. oozie.service.JPAService.retry.maximum-wait-time.ms 30000 Maximum wait time between database retry attempts. oozie.service.JPAService.retry.max-retries 10 Maximum number of retries for a failed database operation. oozie.service.SchemaService.wf.schemas oozie-common-1.0.xsd, oozie-workflow-0.1.xsd,oozie-workflow-0.2.xsd,oozie-workflow-0.2.5.xsd,oozie-workflow-0.3.xsd,oozie-workflow-0.4.xsd, oozie-workflow-0.4.5.xsd,oozie-workflow-0.5.xsd,oozie-workflow-1.0.xsd, shell-action-0.1.xsd,shell-action-0.2.xsd,shell-action-0.3.xsd,shell-action-1.0.xsd, email-action-0.1.xsd,email-action-0.2.xsd, hive-action-0.2.xsd,hive-action-0.3.xsd,hive-action-0.4.xsd,hive-action-0.5.xsd,hive-action-0.6.xsd,hive-action-1.0.xsd, sqoop-action-0.2.xsd,sqoop-action-0.3.xsd,sqoop-action-0.4.xsd,sqoop-action-1.0.xsd, ssh-action-0.1.xsd,ssh-action-0.2.xsd, distcp-action-0.1.xsd,distcp-action-0.2.xsd,distcp-action-1.0.xsd, oozie-sla-0.1.xsd,oozie-sla-0.2.xsd, hive2-action-0.1.xsd,hive2-action-0.2.xsd,hive2-action-1.0.xsd, spark-action-0.1.xsd,spark-action-0.2.xsd,spark-action-1.0.xsd, git-action-1.0.xsd List of schemas for workflows (separated by commas). oozie.service.SchemaService.wf.ext.schemas List of additional schemas for workflows (separated by commas). oozie.service.SchemaService.coord.schemas oozie-coordinator-0.1.xsd,oozie-coordinator-0.2.xsd,oozie-coordinator-0.3.xsd,oozie-coordinator-0.4.xsd, oozie-coordinator-0.5.xsd,oozie-sla-0.1.xsd,oozie-sla-0.2.xsd List of schemas for coordinators (separated by commas). oozie.service.SchemaService.coord.ext.schemas List of additional schemas for coordinators (separated by commas). oozie.service.SchemaService.bundle.schemas oozie-bundle-0.1.xsd,oozie-bundle-0.2.xsd List of schemas for bundles (separated by commas). oozie.service.SchemaService.bundle.ext.schemas List of additional schemas for bundles (separated by commas). oozie.service.SchemaService.sla.schemas gms-oozie-sla-0.1.xsd,oozie-sla-0.2.xsd List of schemas for semantic validation for GMS SLA (separated by commas). oozie.service.SchemaService.sla.ext.schemas List of additional schemas for semantic validation for GMS SLA (separated by commas). oozie.service.CallbackService.base.url ${oozie.base.url}/callback Base callback URL used by ActionExecutors. oozie.service.CallbackService.early.requeue.max.retries 5 If Oozie receives a callback too early (while the action is in PREP state), it will requeue the command this many times to give the action time to transition to RUNNING. oozie.servlet.CallbackServlet.max.data.len 2048 Max size in characters for the action completion data output. oozie.external.stats.max.size -1 Max size in bytes for action stats. -1 means infinite value. oozie.JobCommand.job.console.url ${oozie.base.url}?job= Base console URL for a workflow job. oozie.service.ActionService.executor.classes org.apache.oozie.action.decision.DecisionActionExecutor, org.apache.oozie.action.hadoop.JavaActionExecutor, org.apache.oozie.action.hadoop.FsActionExecutor, org.apache.oozie.action.hadoop.MapReduceActionExecutor, org.apache.oozie.action.hadoop.PigActionExecutor, org.apache.oozie.action.hadoop.HiveActionExecutor, org.apache.oozie.action.hadoop.ShellActionExecutor, org.apache.oozie.action.hadoop.SqoopActionExecutor, org.apache.oozie.action.hadoop.DistcpActionExecutor, org.apache.oozie.action.hadoop.Hive2ActionExecutor, org.apache.oozie.action.ssh.SshActionExecutor, org.apache.oozie.action.oozie.SubWorkflowActionExecutor, org.apache.oozie.action.email.EmailActionExecutor, org.apache.oozie.action.hadoop.SparkActionExecutor, org.apache.oozie.action.hadoop.GitActionExecutor List of ActionExecutors classes (separated by commas). Only action types with associated executors can be used in workflows. oozie.service.ActionService.executor.ext.classes List of ActionExecutors extension classes (separated by commas). Only action types with associated executors can be used in workflows. This property is a convenience property to add extensions to the built in executors without having to include all the built in ones. oozie.service.ActionCheckerService.action.check.interval 60 The frequency at which the ActionCheckService will run. oozie.service.ActionCheckerService.action.check.delay 600 The time, in seconds, between an ActionCheck for the same action. oozie.service.ActionCheckerService.callable.batch.size 10 This value determines the number of actions which will be batched together to be executed by a single thread. oozie.service.StatusTransitService.statusTransit.interval 60 The frequency in seconds at which the StatusTransitService will run. oozie.service.StatusTransitService.backward.support.for.coord.status false true, if coordinator job submits using 'uri:oozie:coordinator:0.1' and wants to keep Oozie 2.x status transit. if set true, 1. SUCCEEDED state in coordinator job means materialization done. 2. No DONEWITHERROR state in coordinator job 3. No PAUSED or PREPPAUSED state in coordinator job 4. PREPSUSPENDED becomes SUSPENDED in coordinator job oozie.service.StatusTransitService.backward.support.for.states.without.error true true, if you want to keep Oozie 3.2 status transit. Change it to false for Oozie 4.x releases. if set true, No states like RUNNINGWITHERROR, SUSPENDEDWITHERROR and PAUSEDWITHERROR for coordinator and bundle oozie.service.PauseTransitService.PauseTransit.interval 60 The frequency in seconds at which the PauseTransitService will run. oozie.action.max.output.data 2048 Max size in characters for output data. oozie.action.fs.glob.max 50000 Maximum number of globbed files. oozie.action.launcher.am.restart.kill.childjobs true Multiple instances of launcher jobs can happen due to RM non-work preserving recovery on RM restart, AM recovery due to crashes or AM network connectivity loss. This could also lead to orphaned child jobs of the old AM attempts leading to conflicting runs. This kills child jobs of previous attempts using YARN application tags. oozie.action.spark.setup.hadoop.conf.dir false Oozie action.xml (oozie.action.conf.xml) contains all the hadoop configuration and user provided configurations. This property will allow users to copy Oozie action.xml as hadoop *-site configurations files. The advantage is, user need not to manage these files into spark sharelib. If user wants to manage the hadoop configurations themselves, it should should disable it. oozie.action.shell.setup.hadoop.conf.dir false The Shell action is commonly used to run programs that rely on HADOOP_CONF_DIR (e.g. hive, beeline, sqoop, etc). With YARN, HADOO_CONF_DIR is set to the NodeManager's copies of Hadoop's *-site.xml files, which can be problematic because (a) they are for meant for the NM, not necessarily clients, and (b) they won't have any of the configs that Oozie, or the user through Oozie, sets. When this property is set to true, The Shell action will prepare the *-site.xml files based on the correct config and set HADOOP_CONF_DIR to point to it. Setting it to false will make Oozie leave HADOOP_CONF_DIR alone. This can also be set at the Action level by putting it in the Shell Action's configuration section, which also has priorty. That all said, it's recommended to use the appropriate action type when possible. oozie.action.shell.setup.hadoop.conf.dir.write.log4j.properties true Toggle to control if a log4j.properties file should be written into the configuration directory prepared when oozie.action.shell.setup.hadoop.conf.dir is enabled. This is used to control logging behavior of log4j using commands run within the shell action script, and to ensure logging does not impact output data capture if leaked to stdout. Content of the written file is determined by the value of oozie.action.shell.setup.hadoop.conf.dir.log4j.content. oozie.action.shell.setup.hadoop.conf.dir.log4j.content log4j.rootLogger=INFO,console log4j.appender.console=org.apache.logging.log4j.core.appender.ConsoleAppender log4j.appender.console.target=System.err log4j.appender.console.layout=org.apache.logging.log4j.core.layout.PatternLayout log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n The value to write into a log4j.properties file under the config directory created when oozie.action.shell.setup.hadoop.conf.dir and oozie.action.shell.setup.hadoop.conf.dir.write.log4j.properties properties are both enabled. The values must be properly newline separated and in format expected by Log4J. Trailing and preceding whitespaces will be trimmed when reading this property. This is used to control logging behavior of log4j using commands run within the shell action script. oozie.action.shell.max-print-size-kb 128 When an oozie shell action starts, the shell script will be printed. Scripts larger than the size configured here (in KiB) will not be printed. If this value is less than or equal to zero, the script will not be printed. oozie.action.launcher.yarn.timeline-service.enabled false Enables/disables getting delegation tokens for ATS for the launcher job in YARN/Hadoop 2.6 (no effect in Hadoop 1) for all action types by default if tez-site.xml is present in distributed cache. This can be overridden on a per-action basis by setting oozie.launcher.yarn.timeline-service.enabled in an action's configuration section in a workflow. oozie.action.pig.log.expandedscript true Log the expanded pig script in launcher stdout log oozie.action.rootlogger.log.level INFO Logging level for root logger oozie.action.retries.max 3 The number of retries for executing an action in case of failure oozie.action.retry.interval 10 The interval between retries of an action in case of failure oozie.action.retry.policy periodic Retry policy of an action in case of failure. Possible values are periodic/exponential oozie.action.ssh.delete.remote.tmp.dir true If set to true, it will delete temporary directory at the end of execution of ssh action. oozie.action.ssh.http.command curl Command to use for callback to oozie, normally is 'curl' or 'wget'. The command must available in PATH environment variable of the USER@HOST box shell. oozie.action.ssh.http.command.post.options --data-binary @#stdout --request POST --header "content-type:text/plain" The callback command POST options. Used when the ouptut of the ssh action is captured. oozie.action.ssh.allow.user.at.host true Specifies whether the user specified by the ssh action is allowed or is to be replaced by the Job user oozie.action.ssh.check.retries.max 3 Maximal retry count for ssh action status check oozie.action.ssh.check.initial.retry.wait.time 3000 init wait time that the first retry check needs to wait oozie.action.subworkflow.max.depth 50 The maximum depth for subworkflows. For example, if set to 3, then a workflow can start subwf1, which can start subwf2, which can start subwf3; but if subwf3 tries to start subwf4, then the action will fail. This is helpful in preventing errant workflows from starting infintely recursive subworkflows. oozie.service.HadoopAccessorService.kerberos.enabled false Indicates if Oozie is configured to use Kerberos. local.realm LOCALHOST Kerberos Realm used by Oozie and Hadoop. Using 'local.realm' to be aligned with Hadoop configuration oozie.service.HadoopAccessorService.keytab.file ${user.home}/oozie.keytab Location of the Oozie user keytab file. oozie.service.HadoopAccessorService.kerberos.principal ${user.name}/localhost@${local.realm} Kerberos principal for Oozie service. oozie.service.HadoopAccessorService.jobTracker.whitelist Whitelisted job tracker for Oozie service. oozie.service.HadoopAccessorService.nameNode.whitelist Whitelisted job tracker for Oozie service. oozie.service.HadoopAccessorService.hadoop.configurations *=hadoop-conf Comma separated AUTHORITY=HADOOP_CONF_DIR, where AUTHORITY is the HOST:PORT of the Hadoop service (JobTracker, YARN, HDFS). The wildcard '*' configuration is used when there is no exact match for an authority. The HADOOP_CONF_DIR contains the relevant Hadoop *-site.xml files. If the path is relative is looked within the Oozie configuration directory; though the path can be absolute (i.e. to point to Hadoop client conf/ directories in the local filesystem. oozie.service.HadoopAccessorService.action.configurations *=action-conf Comma separated AUTHORITY=ACTION_CONF_DIR, where AUTHORITY is the HOST:PORT of the Hadoop MapReduce service (JobTracker, YARN). The wildcard '*' configuration is used when there is no exact match for an authority. The ACTION_CONF_DIR may contain ACTION.xml files where ACTION is the action type ('java', 'map-reduce', 'pig', 'hive', 'sqoop', etc.). If the ACTION.xml file exists, its properties will be used as defaults properties for the action. If the path is relative is looked within the Oozie configuration directory; though the path can be absolute (i.e. to point to Hadoop client conf/ directories in the local filesystem. oozie.service.HadoopAccessorService.action.configurations.load.default.resources true true means that default and site xml files of hadoop (core-default, core-site, hdfs-default, hdfs-site, mapred-default, mapred-site, yarn-default, yarn-site) are parsed into actionConf on Oozie server. false means that site xml files are not loaded on server, instead loaded on launcher node. This is only done for pig and hive actions which handle loading those files automatically from the classpath on launcher task. It defaults to true. oozie.service.HadoopAccessorService.fs.s3a You can configure custom s3a file system properties globally. Value shall be a comma separated list of key=value pairs. For example: fs.s3a.fast.upload.buffer=bytebuffer,fs.s3a.impl.disable.cache=true Limitation: the custom file system properties cannot contain comma neither in key nor in value. oozie.credentials.credentialclasses A list of credential class mapping for CredentialsProvider oozie.credentials.skip false This determines if Oozie should skip getting credentials from the credential providers. This can be overwritten at a job-level or action-level. oozie.actions.main.classnames distcp=org.apache.hadoop.tools.DistCp A list of class name mapping for Action classes oozie.service.WorkflowAppService.system.libpath /user/${user.name}/share/lib System library path to use for workflow applications. This path is added to workflow application if their job properties sets the property 'oozie.use.system.libpath' to true. oozie.command.default.lock.timeout 5000 Default timeout (in milliseconds) for commands for acquiring an exclusive lock on an entity. oozie.command.default.requeue.delay 10000 Default time (in milliseconds) for commands that are requeued for delayed execution. oozie.service.LiteWorkflowStoreService.user.retry.max 3 Automatic retry max count for workflow action is 3 in default. oozie.service.LiteWorkflowStoreService.user.retry.inteval 10 Automatic retry interval for workflow action is in minutes and the default value is 10 minutes. oozie.service.LiteWorkflowStoreService.user.retry.policy periodic Automatic retry policy for workflow action. Possible values are periodic or exponential, periodic being the default. oozie.service.LiteWorkflowStoreService.user.retry.error.code JA008,JA009,JA017,JA018,JA019,FS009,FS008,FS014 Automatic retry interval for workflow action is handled for these specified error code: FS009, FS008 is file exists error when using chmod in fs action. FS014 is permission error in fs action JA018 is output directory exists error in workflow map-reduce action. JA019 is error while executing distcp action. JA017 is job not exists error in action executor. JA008 is FileNotFoundException in action executor. JA009 is IOException in action executor. ALL is the any kind of error in action executor. oozie.service.LiteWorkflowStoreService.user.retry.error.code.ext Automatic retry interval for workflow action is handled for these specified extra error code: ALL is the any kind of error in action executor. oozie.service.LiteWorkflowStoreService.node.def.version _oozie_inst_v_2 NodeDef default version, _oozie_inst_v_0, _oozie_inst_v_1 or _oozie_inst_v_2 oozie.authentication.type simple Defines authentication used for Oozie HTTP endpoint. Supported values are: simple | kerberos | #AUTHENTICATION_HANDLER_CLASSNAME# oozie.server.authentication.type ${oozie.authentication.type} Defines authentication used for Oozie server communicating to other Oozie server over HTTP(s). Supported values are: simple | kerberos | #AUTHENTICATOR_CLASSNAME# oozie.server.connection.timeout.seconds 180 Defines connection timeout used for Oozie server communicating to other Oozie server over HTTP(s). Default is 3 min. oozie.authentication.token.validity 36000 Indicates how long (in seconds) an authentication token is valid before it has to be renewed. oozie.authentication.cookie.domain The domain to use for the HTTP cookie that stores the authentication token. In order to authentiation to work correctly across multiple hosts the domain must be correctly set. oozie.authentication.simple.anonymous.allowed true Indicates if anonymous requests are allowed when using 'simple' authentication. oozie.authentication.kerberos.principal HTTP/localhost@${local.realm} Indicates the Kerberos principal to be used for HTTP endpoint. The principal MUST start with 'HTTP/' as per Kerberos HTTP SPNEGO specification. oozie.authentication.kerberos.keytab ${oozie.service.HadoopAccessorService.keytab.file} Location of the keytab file with the credentials for the principal. Referring to the same keytab file Oozie uses for its Kerberos credentials for Hadoop. oozie.authentication.kerberos.name.rules DEFAULT The kerberos names rules is to resolve kerberos principal names, refer to Hadoop's KerberosName for more details. oozie.coord.execution.none.tolerance 1 Default time tolerance in minutes after action nominal time for an action to be skipped when execution order is "NONE" oozie.coord.actions.default.length 1000 Default number of coordinator actions to be retrieved by the info command oozie.validate.ForkJoin true If true, fork and join should be validated at wf submission time. oozie.workflow.parallel.fork.action.start true Determines how Oozie processes starting of forked actions. If true, forked actions and their job submissions are done in parallel which is best for performance. If false, they are submitted sequentially. oozie.coord.action.get.all.attributes false Setting to true is not recommended as coord job/action info will bring all columns of the action in memory. Set it true only if backward compatibility for action/job info is required. oozie.service.HadoopAccessorService.supported.filesystems hdfs,hftp,webhdfs Enlist the different filesystems supported for federation. If wildcard "*" is specified, then ALL file schemes will be allowed. oozie.service.URIHandlerService.uri.handlers org.apache.oozie.dependency.FSURIHandler Enlist the different uri handlers supported for data availability checks. oozie.notification.url.connection.timeout 10000 Defines the timeout, in milliseconds, for Oozie HTTP notification callbacks. Oozie does HTTP notifications for workflow jobs which set the 'oozie.wf.action.notification.url', 'oozie.wf.worklfow.notification.url' and/or 'oozie.coord.action.notification.url' properties in their job.properties. Refer to section '5 Oozie Notifications' in the Workflow specification for details. oozie.hadoop-2.0.2-alpha.workaround.for.distributed.cache false Due to a bug in Hadoop 2.0.2-alpha, MAPREDUCE-4820, launcher jobs fail to set the distributed cache for the action job because the local JARs are implicitly included triggering a duplicate check. This flag removes the distributed cache files for the action as they'll be included from the local JARs of the JobClient (MRApps) submitting the action job from the launcher. oozie.service.EventHandlerService.filter.app.types workflow_job, coordinator_action The app-types among workflow/coordinator/bundle job/action for which for which events system is enabled. oozie.service.EventHandlerService.event.queue org.apache.oozie.event.MemoryEventQueue The implementation for EventQueue in use by the EventHandlerService. oozie.service.EventHandlerService.event.listeners org.apache.oozie.jms.JMSJobEventListener oozie.service.EventHandlerService.queue.size 10000 Maximum number of events to be contained in the event queue. oozie.service.EventHandlerService.worker.interval 30 The default interval (seconds) at which the worker threads will be scheduled to run and process events. oozie.service.EventHandlerService.batch.size 10 The batch size for batched draining per thread from the event queue. oozie.service.EventHandlerService.worker.threads 3 Number of worker threads to be scheduled to run and process events. oozie.sla.service.SLAService.capacity 5000 Maximum number of sla records to be contained in the memory structure. oozie.sla.service.SLAService.alert.events END_MISS Default types of SLA events for being alerted of. oozie.sla.service.SLAService.calculator.impl org.apache.oozie.sla.SLACalculatorMemory The implementation for SLACalculator in use by the SLAService. oozie.sla.service.SLAService.job.event.latency 90000 Time in milliseconds to account of latency of getting the job status event to compare against and decide sla miss/met oozie.sla.service.SLAService.check.interval 30 Time interval, in seconds, at which SLA Worker will be scheduled to run oozie.sla.disable.alerts.older.than 48 Time threshold, in HOURS, for disabling SLA alerting for jobs whose nominal time is older than this. oozie.sla.service.SLAService.maximum.retry.count 3 Number of times an SLA calculator status will be tried to get updated when any database related error occurs. It's possible that multiple WorkflowJobBean / CoordActionBean instances being inserted won't have SLACalcStatus entries inside SLACalculatorMemory#slaMap by the time written to database, and thus, no SLA will be tracked. In those rare cases, preconfigured maximum retry count can be extended. oozie.zookeeper.connection.string localhost:2181 Comma-separated values of host:port pairs of the ZooKeeper servers. oozie.zookeeper.namespace oozie The namespace to use. All of the Oozie Servers that are planning on talking to each other should have the same namespace. oozie.zookeeper.connection.timeout 180 Default ZK connection timeout (in sec). oozie.zookeeper.session.timeout 300 Default ZK session timeout (in sec). If connection is lost even after retry, then Oozie server will shutdown itself if oozie.zookeeper.server.shutdown.ontimeout is true. oozie.zookeeper.max.retries 10 Maximum number of times to retry. oozie.zookeeper.server.shutdown.ontimeout true If true, Oozie server will shutdown itself on ZK connection timeout. oozie.service.ZKLocksService.lock.release.retry.time.limit.minutes 30 On exception while releasing the lock, Oozie will exponentially retry till specified minutes before giving up. oozie.http.hostname 0.0.0.0 Oozie server host name. The network interface Oozie server binds to as an IP address or a hostname. Most users won't need to change this setting from the default value. oozie.http.port 11000 Oozie server port. oozie.http.request.header.size 65536 Oozie HTTP request header size. oozie.http.response.header.size 65536 Oozie HTTP response header size. oozie.https.port 11443 Oozie ssl server port. oozie.https.enabled false Controls whether SSL encryption is enabled. oozie.https.truststore.file Path to a TrustStore file. oozie.https.keystore.file Path to a KeyStore file. oozie.https.keystore.pass Password to the KeyStore. oozie.https.include.protocols TLSv1.1,TLSv1.2,TLSv1.3 Enabled TLS protocols. oozie.https.exclude.protocols Disabled TLS protocols. oozie.https.include.cipher.suites List of Cipher suites to include. oozie.https.exclude.cipher.suites TLS_ECDHE_RSA_WITH_RC4_128_SHA,SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,SSL_RSA_WITH_DES_CBC_SHA,SSL_DHE_RSA_WITH_DES_CBC_SHA,SSL_RSA_EXPORT_WITH_RC4_40_MD5,SSL_RSA_EXPORT_WITH_DES40_CBC_SHA,SSL_RSA_WITH_RC4_128_MD5 List of weak Cipher suites to exclude. oozie.hsts.max.age.seconds 31536000 Strict Transport Security max age in seconds if SSL is enabled. Ideally it is set to one year (31536000 sec). oozie.jsp.tmp.dir /tmp Temporary directory for compiling JSP pages. oozie.server.threadpool.max.threads 150 Controls the threadpool size for the Oozie Server (if using embbedded Jetty) oozie.service.ShareLibService.mapping.file Sharelib mapping files contains list of key=value, where key will be the sharelib name for the action and value is a comma separated list of DFS or local filesystem directories or jar files. Example. oozie.pig_10=hdfs:///share/lib/pig/pig-0.10.1/lib/ oozie.pig=hdfs:///share/lib/pig/pig-0.11.1/lib/ oozie.distcp=hdfs:///share/lib/hadoop-2.2.0/share/hadoop/tools/lib/hadoop-distcp-2.2.0.jar oozie.hive=file:///usr/local/oozie/share/lib/hive/ oozie.service.ShareLibService.fail.fast.on.startup false Fails server starup if sharelib initilzation fails. oozie.service.ShareLibService.purge.interval 1 How often, in days, Oozie should check for old ShareLibs and LauncherLibs to purge from HDFS. oozie.service.ShareLibService.temp.sharelib.retention.days 7 ShareLib retention time in days. oozie.action.ship.launcher.jar false Specifies whether launcher jar is shipped or not. oozie.action.jobinfo.enable false JobInfo will contain information of bundle, coordinator, workflow and actions. If enabled, hadoop job will have property(oozie.job.info) which value is multiple key/value pair separated by ",". This information can be used for analytics like how many oozie jobs are submitted for a particular period, what is the total number of failed pig jobs, etc from mapreduce job history logs and configuration. User can also add custom workflow property to jobinfo by adding property which prefix with "oozie.job.info." Eg. oozie.job.info="bundle.id=,bundle.name=,coord.name=,coord.nominal.time=,coord.name=,wf.id=, wf.name=,action.name=,action.type=,launcher=true" oozie.service.XLogStreamingService.max.log.scan.duration -1 Max log scan duration in hours. If log scan request end_date - start_date > value, then exception is thrown to reduce the scan duration. -1 indicate no limit. oozie.service.XLogStreamingService.actionlist.max.log.scan.duration -1 Max log scan duration in hours for coordinator job when list of actions are specified. If log streaming request end_date - start_date > value, then exception is thrown to reduce the scan duration. -1 indicate no limit. This setting is separate from max.log.scan.duration as we want to allow higher durations when actions are specified. oozie.service.JvmPauseMonitorService.warn-threshold.ms 10000 The JvmPauseMonitorService runs a thread that repeatedly tries to detect when the JVM pauses, which could indicate that the JVM or host machine is overloaded or other problems. This thread sleeps for 500ms; if it sleeps for significantly longer, then there is likely a problem. This property specifies the threadshold for when Oozie should log a WARN level message; there is also a counter named "jvm.pause.warn-threshold". oozie.service.JvmPauseMonitorService.info-threshold.ms 1000 The JvmPauseMonitorService runs a thread that repeatedly tries to detect when the JVM pauses, which could indicate that the JVM or host machine is overloaded or other problems. This thread sleeps for 500ms; if it sleeps for significantly longer, then there is likely a problem. This property specifies the threadshold for when Oozie should log an INFO level message; there is also a counter named "jvm.pause.info-threshold". oozie.service.ZKLocksService.locks.reaper.threshold 300 The frequency at which the ChildReaper will run. Duration should be in sec. Default is 5 min. oozie.service.ZKLocksService.locks.reaper.threads 2 Number of fixed threads used by ChildReaper to delete empty locks. oozie.service.AbandonedCoordCheckerService.check.interval 1440 Interval, in minutes, at which AbandonedCoordCheckerService should run. oozie.service.AbandonedCoordCheckerService.check.delay 60 Delay, in minutes, at which AbandonedCoordCheckerService should run. oozie.service.AbandonedCoordCheckerService.failure.limit 25 Failure limit. A job is considered to be abandoned/faulty if total number of actions in failed/timedout/suspended >= "Failure limit" and there are no succeeded action. oozie.service.AbandonedCoordCheckerService.kill.jobs false If true, AbandonedCoordCheckerService will kill abandoned coords. oozie.service.AbandonedCoordCheckerService.job.older.than 2880 In minutes, job will be considered as abandoned/faulty if job is older than this value. oozie.notification.proxy System level proxy setting for job notifications. oozie.wf.rerun.disablechild false By setting this option, workflow rerun will be disabled if parent workflow or coordinator exist and it will only rerun through parent. oozie.use.system.libpath false Default value of oozie.use.system.libpath. If user haven't specified =oozie.use.system.libpath= in the job.properties and this value is true and Oozie will include sharelib jars for workflow. oozie.service.PauseTransitService.callable.batch.size 10 This value determines the number of callable which will be batched together to be executed by a single thread. oozie.configuration.substitute.depth 20 This value determines the depth of substitution in configurations. If set -1, No limitation on substitution. oozie.service.SparkConfigurationService.spark.configurations *=spark-conf Comma separated AUTHORITY=SPARK_CONF_DIR, where AUTHORITY is the HOST:PORT of the ResourceManager of a YARN cluster. The wildcard '*' configuration is used when there is no exact match for an authority. The SPARK_CONF_DIR contains the relevant spark-defaults.conf properties file. If the path is relative is looked within the Oozie configuration directory; though the path can be absolute. This is only used when the Spark master is set to either "yarn-client" or "yarn-cluster". oozie.service.SparkConfigurationService.spark.configurations.blacklist spark.yarn.jar,spark.yarn.jars Comma separated list of properties to ignore from any Spark configurations specified in oozie.service.SparkConfigurationService.spark.configurations property. oozie.service.SparkConfigurationService.spark.configurations.ignore.spark.yarn.jar true Deprecated. Use oozie.service.SparkConfigurationService.spark.configurations.blacklist instead. If true, Oozie will ignore the "spark.yarn.jar" property from any Spark configurations specified in oozie.service.SparkConfigurationService.spark.configurations. If false, Oozie will not ignore it. It is recommended to leave this as true because it can interfere with the jars in the Spark sharelib. oozie.email.attachment.enabled true This value determines whether to support email attachment of a file on HDFS. Set it false if there is any security concern. oozie.email.smtp.host localhost The host where the email action may find the SMTP server. oozie.email.smtp.port 25 The port to connect to for the SMTP server, for email actions. oozie.email.smtp.auth false Boolean property that toggles if authentication is to be done or not when using email actions. oozie.email.smtp.starttls.enable false Boolean property that toggles if use TLS in communication or not. oozie.email.smtp.username If authentication is enabled for email actions, the username to login as (to the SMTP server). oozie.email.smtp.password If authentication is enabled for email actions, the password to login with (to the SMTP server). oozie.email.from.address oozie@localhost The from address to be used for mailing all emails done via the email action. oozie.email.smtp.socket.timeout.ms 10000 The timeout to apply over all SMTP server socket operations done during the email action. oozie.actions.default.name-node The default value to use for the <name-node> element in applicable action types. This value will be used when neither the action itself nor the global section specifies a <name-node>. As expected, it should be of the form "hdfs://HOST:PORT". oozie.actions.default.job-tracker The default value to use for the <job-tracker> element in applicable action types. This value will be used when neither the action itself nor the global section specifies a <job-tracker>. As expected, it should be of the form "HOST:PORT". oozie.actions.default.resource-manager The default value to use for the <resource-manager> element in applicable action types. This value will be used when neither the action itself nor the global section specifies a <resource-managerr>. As expected, it should be of the form "HOST:PORT". If both oozie.actions.default.job-tracker and oozie.actions.default.resource-manager are specified, oozie.actions.default.resource-manager takes precedence. oozie.service.SchemaCheckerService.check.interval 168 This is the interval at which Oozie will check the database schema, in hours. A zero or negative value will disable the checker. oozie.service.SchemaCheckerService.ignore.extras false When set to false, the schema checker will consider extra (unused) tables, columns, and indexes to be incorrect. When set to true, these will be ignored. oozie.hcat.uri.regex.pattern ([a-z]+://[\w\.\-]+:\d+[,]*)+/\w+/\w+/?[\w+=;\-]* Regex pattern for HCat URIs. The regex can be modified by users as per requirement for parsing/splitting the HCat URIs. oozie.action.null.args.allowed true When set to true, empty arguments (like <arg></arg>) will be passed as "null" to the main method of a given action. That is, the args[] array will contain "null" elements. When set to false, then "nulls" are removed. oozie.javax.xml.parsers.DocumentBuilderFactory org.apache.xerces.jaxp.DocumentBuilderFactoryImpl Oozie will set the javax.xml.parsers.DocumentBuilderFactory Java System Property to this value. This helps speed up XML handling because the JVM doesn't have to search for the proper class every time. An empty or whitespace value skips setting the System Property. The default implementation that Oozie uses is Xerces. Most users should not have to change this. oozie.graphviz.timeout.seconds 60 The default number of seconds Graphviz graph generation will timeout. oozie.launcher.default.vcores 1 The default number of vcores that are allocated for the Launcher AMs oozie.launcher.default.memory.mb 2048 The default amount of memory in MBs that is allocated for the Launcher AMs oozie.launcher.default.priority 0 The default YARN priority of the Launcher AM oozie.launcher.default.queue default The default YARN queue where the Launcher AM is placed oozie.launcher.default.max.attempts 2 The default YARN maximal attempt count of the Launcher AM oozie.launcher.override true Whether oozie.launcher.override.* and oozie.launcher.prepend.* parameters have to be considered when submitting a YARN LauncherAM. That is, existing MapReduce v1, MapReduce v2, or YARN parameters used in the action configuration should be populated to the Application Master launcher configuration, or not. Generally, first <launcher/> tag specific user settings, then YARN configuration settings, then MapReduce v2, and at last, MapReduce v1 properties are copied to launcher configuration. oozie.launcher.override.max.attempts mapreduce.map.maxattempts,mapred.map.max.attempts A comma separated list of MapReduce v1 and MapReduce v2 properties to override the max attempts of the MapReduce Application Master. The first one that is found will be used. oozie.launcher.override.memory.mb yarn.app.mapreduce.am.resource.mb,mapreduce.map.memory.mb,mapred.job.map.memory.mb A comma separated list of MapReduce v1, MapReduce v2, and YARN properties to override the memory amount in MB of the MapReduce Application Master. The first one that is found will be used. oozie.launcher.override.vcores yarn.app.mapreduce.am.resource.cpu-vcores,mapreduce.map.cpu.vcores A comma separated list of MapReduce v1, MapReduce v2, and YARN properties to override the CPU vcore count of the MapReduce Application Master. The first one that is found will be used. oozie.launcher.override.log.level mapreduce.map.log.level,mapred.map.child.log.level A comma separated list of MapReduce v1, MapReduce v2, and YARN properties to override the logging level of the MapReduce Application Master. The first one that is found will be used. oozie.launcher.override.javaopts yarn.app.mapreduce.am.command-opts,mapreduce.map.java.opts,mapred.child.java.opts A comma separated list of MapReduce v1, MapReduce v2, and YARN properties to override MapReduce Application Master JVM options. The first one that is found will be used. oozie.launcher.prepend.javaopts yarn.app.mapreduce.am.admin-command-opts A comma separated list of YARN properties to prepend to MapReduce Application Master JVM options. The first one that is found will be prepended to the list of JVM options. oozie.launcher.override.env yarn.app.mapreduce.am.env,mapreduce.map.env,mapred.child.env A comma separated list of MapReduce v1, MapReduce v2, and YARN properties to override MapReduce Application Master environment variable settings. The first one that is found will be used. oozie.launcher.prepend.env yarn.app.mapreduce.am.admin.user.env A comma separated list of YARN properties to prepend to MapReduce Application Master environment settings. The first one that is found will be prepended to the list of environment settings. oozie.launcher.override.priority mapreduce.job.priority,mapred.job.priority A comma separated list of MapReduce v1 and MapReduce v2 to override MapReduce Application Master job priority. The first one that is found will be used. oozie.launcher.override.queue mapreduce.job.queuename,mapred.job.queue.name A comma separated list of MapReduce v1 and MapReduce v2 properties to override MapReduce Application Master job queue name. The first one that is found will be used. oozie.launcher.override.view.acl mapreduce.job.acl-view-job A comma separated list of MapReduce v1 and MapReduce v2 properties to override MapReduce View ACL settings. The first one that is found will be used. oozie.launcher.override.modify.acl mapreduce.job.acl-modify-job A comma separated list of MapReduce v1 and MapReduce v2 properties to override MapReduce Modify ACL settings. The first one that is found will be used. oozie.action.mapreduce.needed.for.distcp true Whether to add MapReduce jars to the DistCp action's classpath's by default. oozie.action.mapreduce.needed.for.hive true Whether to add MapReduce jars to the Hive action's classpath's by default. oozie.action.mapreduce.needed.for.hive2 true Whether to add MapReduce jars to the Hive2 action's classpath's by default. oozie.action.mapreduce.needed.for.java true Whether to add MapReduce jars to the Java action's classpath's by default. oozie.action.mapreduce.needed.for.map-reduce true Whether to add MapReduce jars to the Map-Reduce action's classpath's by default. oozie.action.mapreduce.needed.for.pig true Whether to add MapReduce jars to the Pig action's classpath's by default. oozie.action.mapreduce.needed.for.sqoop true Whether to add MapReduce jars to the Sqoop action's classpath's by default. oozie.action.sqoop.shellsplitter false Whether to use shell splitter instead of the space-based tokenizer during sqoop command splitting. oozie.fluent-job-api.generated.path /user/${user.name}/oozie-fluent-job-api-generated HDFS path to store workflow / coordinator / bundle definitions generated by fluent-job-api artifact. The XML files are first generated out of the fluent-job-api JARs submitted by the user at command line, then stored under this HDFS folder structure for later retrieval / resubmit / check. Note that the submitting user needs r/w permissions under this HDFS folder. Note further that this folder structure, when does not exist, will be created. ================================================ FILE: kettle-plugins/hadoop-cluster/ui/src/test/resources/unsecured/yarn-site.xml ================================================ yarn.acl.enable true yarn.admin.acl * yarn.resourcemanager.address svqxbdcn6cdh514un3.pentahoqa.com:8032 yarn.resourcemanager.admin.address svqxbdcn6cdh514un3.pentahoqa.com:8033 yarn.resourcemanager.scheduler.address svqxbdcn6cdh514un3.pentahoqa.com:8030 yarn.resourcemanager.resource-tracker.address svqxbdcn6cdh514un3.pentahoqa.com:8031 yarn.resourcemanager.webapp.address svqxbdcn6cdh514un3.pentahoqa.com:8088 yarn.resourcemanager.webapp.https.address svqxbdcn6cdh514un3.pentahoqa.com:8090 yarn.resourcemanager.client.thread-count 50 yarn.resourcemanager.scheduler.client.thread-count 50 yarn.resourcemanager.admin.client.thread-count 1 yarn.scheduler.minimum-allocation-mb 1024 yarn.scheduler.increment-allocation-mb 512 yarn.scheduler.maximum-allocation-mb 3784 yarn.scheduler.minimum-allocation-vcores 1 yarn.scheduler.increment-allocation-vcores 1 yarn.scheduler.maximum-allocation-vcores 4 yarn.resourcemanager.amliveliness-monitor.interval-ms 1000 yarn.am.liveness-monitor.expiry-interval-ms 600000 yarn.resourcemanager.am.max-attempts 2 yarn.resourcemanager.container.liveness-monitor.interval-ms 600000 yarn.resourcemanager.nm.liveness-monitor.interval-ms 1000 yarn.nm.liveness-monitor.expiry-interval-ms 600000 yarn.resourcemanager.resource-tracker.client.thread-count 50 yarn.application.classpath $HADOOP_CLIENT_CONF_DIR,$HADOOP_CONF_DIR,$HADOOP_COMMON_HOME/*,$HADOOP_COMMON_HOME/lib/*,$HADOOP_HDFS_HOME/*,$HADOOP_HDFS_HOME/lib/*,$HADOOP_YARN_HOME/*,$HADOOP_YARN_HOME/lib/* yarn.resourcemanager.scheduler.class org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler yarn.resourcemanager.max-completed-applications 10000 yarn.nodemanager.remote-app-log-dir /tmp/logs yarn.nodemanager.remote-app-log-dir-suffix logs ================================================ FILE: kettle-plugins/hbase/assemblies/plugin/pom.xml ================================================ 4.0.0 hbase-assemblies pentaho 11.1.0.0-SNAPSHOT pdi-hbase-plugin pom PDI HBase Plugin Distribution ${project.basedir}/src/main/resources ${project.build.directory}/assembly pentaho pdi-hbase-core ${project.version} ================================================ FILE: kettle-plugins/hbase/assemblies/plugin/src/assembly/assembly.xml ================================================ zip zip ${resources.directory} . true ${assembly.dir} . . pentaho:pdi-hbase-core:jar false runtime . false false pentaho:pdi-hbase-core:jar runtime false lib pentaho:pdi-hbase-core:* pentaho:pentaho-big-data-kettle-plugins-hbase-meta ================================================ FILE: kettle-plugins/hbase/assemblies/plugin/src/main/resources/version.xml ================================================ ${project.version} ================================================ FILE: kettle-plugins/hbase/assemblies/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-hbase 11.1.0.0-SNAPSHOT hbase-assemblies pom PDI HBase Plugin Assemblies plugin ================================================ FILE: kettle-plugins/hbase/core/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-hbase 11.1.0.0-SNAPSHOT pdi-hbase-core PDI Hbase Core site src/main/resources false src/main/resources-filtered true org.pentaho shim-api ${pentaho-hadoop-shims.version} provided pentaho pentaho-big-data-kettle-plugins-common-ui ${project.version} provided pentaho pentaho-big-data-impl-cluster ${project.version} org.pentaho pentaho-hadoop-shims-common-services-api ${project.version} pentaho pentaho-big-data-impl-cluster ${project.version} org.pentaho pentaho-hadoop-shims-common-services-api ${project.version} pentaho-kettle kettle-core ${pdi.version} provided org.pentaho.di.plugins pentaho-metastore-locator-api ${pdi.version} provided pentaho-kettle kettle-engine ${pdi.version} provided pentaho-kettle kettle-ui-swt ${pdi.version} provided junit junit ${dependency.junit.revision} test org.mockito mockito-core ${mockito.version} test org.mockito mockito-inline ${mockito-inline.version} test pentaho-kettle kettle-engine ${pdi.version} tests test pentaho pentaho-big-data-legacy ${project.version} test pentaho pentaho-big-data-legacy-core ${project.version} compile pentaho pentaho-big-data-legacy-core ${project.version} compile pentaho pentaho-big-data-legacy-core ${project.version} provided pentaho pentaho-big-data-kettle-plugins-hbase-meta ${project.version} compile pentaho-kettle kettle-core ${pdi.version} tests test org.pentaho.hadoop.shims pentaho-hadoop-shims-common-base ${pentaho-hadoop-shims.version} test ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/FilterDefinition.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase; import org.pentaho.hadoop.shim.api.hbase.mapping.ColumnFilter; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionTypeConverter; public class FilterDefinition { @Injection( name = "ALIAS", group = "FILTER" ) private String alias; @Injection( name = "FIELD_TYPE", group = "FILTER" ) private String fieldType; @Injection( name = "COMPARISON_TYPE", group = "FILTER", converter = ComparisonTypeConverter.class ) private ColumnFilter.ComparisonType comparisonType; @Injection( name = "SIGNED_COMPARISON", group = "FILTER" ) private boolean signedComparison; @Injection( name = "COMPARISON_VALUE", group = "FILTER" ) private String constant; @Injection( name = "FORMAT", group = "FILTER" ) private String format; public String getAlias() { return alias; } public void setAlias( String alias ) { this.alias = alias; } public String getFieldType() { return fieldType; } public void setFieldType( String fieldType ) { this.fieldType = fieldType; } public ColumnFilter.ComparisonType getComparisonType() { return comparisonType; } public void setComparisonType( ColumnFilter.ComparisonType comparisonType ) { this.comparisonType = comparisonType; } public boolean isSignedComparison() { return signedComparison; } public void setSignedComparison( boolean signedComparison ) { this.signedComparison = signedComparison; } public String getConstant() { return constant; } public void setConstant( String constant ) { this.constant = constant; } public String getFormat() { return format; } public void setFormat( String format ) { this.format = format; } public static class ComparisonTypeConverter extends InjectionTypeConverter { @Override public ColumnFilter.ComparisonType string2enum( Class enumClass, String value ) throws KettleValueException { return ColumnFilter.ComparisonType.stringToOpp( value ); } } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/HBaseConnectionException.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase; /** * @author Tatsiana_Kasiankova * */ public class HBaseConnectionException extends Exception { private static final long serialVersionUID = -6215675067801506240L; public HBaseConnectionException( String message, Throwable cause ) { super( message, cause ); } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/HbaseUtil.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase; import org.pentaho.di.core.util.StringUtil; public class HbaseUtil { public static final String HBASE_NAMESPACE_DELIMITER = ":"; public static final String HBASE_DEFAULT_NAMESPACE = "default"; private HbaseUtil() { } public static String parseNamespaceFromTableName( String tableName ) { return parseNamespaceFromTableName( tableName, HBASE_DEFAULT_NAMESPACE ); } public static String parseNamespaceFromTableName( String tableName, String defaultNamespaceIfNoneSpecified ) { String nameSpace = null; if ( tableName.contains( HBASE_NAMESPACE_DELIMITER ) ) { nameSpace = tableName.substring( 0, tableName.indexOf( HBASE_NAMESPACE_DELIMITER ) ).trim(); } if ( nameSpace == null || nameSpace.isEmpty() ) { return defaultNamespaceIfNoneSpecified; } else { return nameSpace; } } public static String parseQualifierFromTableName( String tableName ) { if ( tableName.contains( HBASE_NAMESPACE_DELIMITER ) ) { return tableName.substring( tableName.indexOf( HBASE_NAMESPACE_DELIMITER ) + 1 ).trim(); } else { return tableName.trim(); } } /** * Force the namespace on the qualifier received. If the qualifier already has a namespace, ignore it. * * @param namespace * @param qualifier * @return */ public static String expandTableName( String namespace, String qualifier ) { if ( namespace == null || namespace.isEmpty() || qualifier == null ) { throw new IllegalArgumentException( "Namespace must have a value, qualifier must not be null" ); } if ( qualifier.indexOf( HBASE_NAMESPACE_DELIMITER ) > -1 ) { return namespace + HBASE_NAMESPACE_DELIMITER + qualifier .substring( qualifier.indexOf( HBASE_NAMESPACE_DELIMITER ) + 1 ); } return namespace + HBASE_NAMESPACE_DELIMITER + qualifier; } /** * returns a fully qualified table name. If the incoming name has a namespace it will honor it, otherwise it will * return the default namespace. * * @param qualifier * @return namespace:qualifier */ public static String expandTableName( String qualifier ) { if ( qualifier == null ) { return HBASE_DEFAULT_NAMESPACE + HBASE_NAMESPACE_DELIMITER; } int pos = qualifier.indexOf( HBASE_NAMESPACE_DELIMITER ); if ( pos > 0 ) { return qualifier; } if ( pos == 0 ) { return HBASE_DEFAULT_NAMESPACE + qualifier; } return HBASE_DEFAULT_NAMESPACE + HBASE_NAMESPACE_DELIMITER + qualifier; } public static String expandLegacyTableNameOnLoad( String qualifier ) { if ( qualifier == null ) { return expandTableName( "" ); } int pos = Math.min( positionOfString( qualifier, StringUtil.UNIX_OPEN ), positionOfString( qualifier, StringUtil.WINDOWS_OPEN ) ); if ( pos == qualifier.length() ) { // No variables in qualifier return expandTableName( qualifier ); } int delimPos = qualifier.indexOf( HBASE_NAMESPACE_DELIMITER ); if ( delimPos > -1 && delimPos < pos ) { // hard delimeter exists before the variables, so ok to parse return expandTableName( qualifier ); } // variable could be the namespace, or not, we can't tell without substitution return qualifier; } private static int positionOfString( String target, String search ) { int pos = target.indexOf( search ); if ( pos == -1 ) { return target.length(); } return pos; } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/MappingDefinition.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase; import java.util.List; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionDeep; public class MappingDefinition { @Injection( name = "TABLE_NAME", group = "MAPPING" ) private String tableName; @Injection( name = "MAPPING_NAME", group = "MAPPING" ) private String mappingName; @InjectionDeep private List mappingColumns; public String getTableName() { return tableName; } public void setTableName( String tableName ) { this.tableName = tableName; } public String getMappingName() { return mappingName; } public void setMappingName( String mappingName ) { this.mappingName = mappingName; } public List getMappingColumns() { return mappingColumns; } public void setMappingColumns( List mappingColumns ) { this.mappingColumns = mappingColumns; } public static class MappingColumn { @Injection( name = "MAPPING_ALIAS", group = "MAPPING" ) private String alias; @Injection( name = "MAPPING_KEY", group = "MAPPING" ) private boolean key; @Injection( name = "MAPPING_COLUMN_FAMILY", group = "MAPPING" ) private String columnFamily; @Injection( name = "MAPPING_COLUMN_NAME", group = "MAPPING" ) private String columnName; @Injection( name = "MAPPING_TYPE", group = "MAPPING" ) private String type; @Injection( name = "MAPPING_INDEXED_VALUES", group = "MAPPING" ) private String indexedValues; public String getAlias() { return alias; } public void setAlias( String alias ) { this.alias = alias; } public boolean isKey() { return key; } public void setKey( boolean key ) { this.key = key; } public String getColumnFamily() { return columnFamily; } public void setColumnFamily( String columnFamily ) { this.columnFamily = columnFamily; } public String getColumnName() { return columnName; } public void setColumnName( String columnName ) { this.columnName = columnName; } public String getType() { return type; } public void setType( String type ) { this.type = type; } public String getIndexedValues() { return indexedValues; } public void setIndexedValues( String indexedValues ) { this.indexedValues = indexedValues; } } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/NamedClusterLoadSaveUtil.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase; import org.apache.commons.lang.StringUtils; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.w3c.dom.Node; /** * Created by bryan on 1/19/16. */ public class NamedClusterLoadSaveUtil { public static final String CLUSTER_NAME = "cluster_name"; public static final String ZOOKEEPER_HOSTS = "zookeeper_hosts"; public static final String ZOOKEEPER_PORT = "zookeeper_port"; public NamedCluster loadClusterConfig( NamedClusterService namedClusterService, ObjectId id_jobentry, Repository rep, IMetaStore metaStore, Node entrynode, LogChannelInterface logChannelInterface ) { // load from system first, then fall back to copy stored with job (AbstractMeta) NamedCluster nc = null; String clusterName = null; try { // attempt to load from named cluster if ( entrynode != null ) { clusterName = XMLHandler.getTagValue( entrynode, CLUSTER_NAME ); //$NON-NLS-1$ } else if ( rep != null ) { clusterName = rep.getJobEntryAttributeString( id_jobentry, CLUSTER_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ } if ( !StringUtils.isEmpty( clusterName ) ) { nc = namedClusterService.getNamedClusterByName( clusterName, metaStore ); } if ( nc != null ) { return nc; } } catch ( Throwable t ) { logChannelInterface.logDebug( t.getMessage(), t ); } nc = namedClusterService.getClusterTemplate(); if ( !StringUtils.isEmpty( clusterName ) ) { nc.setName( clusterName ); } if ( entrynode != null ) { // load default values for cluster & legacy fallback nc.setZooKeeperHost( XMLHandler.getTagValue( entrynode, ZOOKEEPER_HOSTS ) ); //$NON-NLS-1$ nc.setZooKeeperPort( XMLHandler.getTagValue( entrynode, ZOOKEEPER_PORT ) ); //$NON-NLS-1$ } else if ( rep != null ) { // load default values for cluster & legacy fallback try { nc.setZooKeeperHost( rep.getJobEntryAttributeString( id_jobentry, ZOOKEEPER_HOSTS ) ); nc.setZooKeeperPort( rep.getJobEntryAttributeString( id_jobentry, ZOOKEEPER_PORT ) ); //$NON-NLS-1$ } catch ( KettleException ke ) { logChannelInterface.logError( ke.getMessage(), ke ); } } return nc; } public void getXml( StringBuilder retval, NamedClusterService namedClusterService, NamedCluster namedCluster, IMetaStore metaStore, LogChannelInterface logChannelInterface ) { String namedClusterName = namedCluster.getName(); String m_zookeeperHosts = namedCluster.getZooKeeperHost(); String m_zookeeperPort = namedCluster.getZooKeeperPort(); if ( !StringUtils.isEmpty( namedClusterName ) ) { retval.append( "\n " ) .append( XMLHandler.addTagValue( CLUSTER_NAME, namedClusterName ) ); //$NON-NLS-1$ //$NON-NLS-2$ try { if ( metaStore != null && namedClusterService.contains( namedClusterName, metaStore ) ) { // pull config from NamedCluster NamedCluster nc = namedClusterService.read( namedClusterName, metaStore ); if ( nc != null ) { m_zookeeperHosts = nc.getZooKeeperHost(); m_zookeeperPort = nc.getZooKeeperPort(); } } } catch ( MetaStoreException e ) { logChannelInterface.logDebug( e.getMessage(), e ); } } if ( !Utils.isEmpty( m_zookeeperHosts ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( ZOOKEEPER_HOSTS, m_zookeeperHosts ) ); } if ( !Utils.isEmpty( m_zookeeperPort ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( ZOOKEEPER_PORT, m_zookeeperPort ) ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step, NamedClusterService namedClusterService, NamedCluster namedCluster, LogChannelInterface logChannelInterface ) throws KettleException { String namedClusterName = namedCluster.getName(); String m_zookeeperHosts = namedCluster.getZooKeeperHost(); String m_zookeeperPort = namedCluster.getZooKeeperPort(); if ( !StringUtils.isEmpty( namedClusterName ) ) { rep.saveStepAttribute( id_transformation, id_step, CLUSTER_NAME, namedClusterName ); //$NON-NLS-1$ try { if ( namedClusterService.contains( namedClusterName, metaStore ) ) { // pull config from NamedCluster NamedCluster nc = namedClusterService.read( namedClusterName, metaStore ); m_zookeeperHosts = nc.getZooKeeperHost(); m_zookeeperPort = nc.getZooKeeperPort(); } } catch ( MetaStoreException e ) { logChannelInterface.logDebug( e.getMessage(), e ); } } if ( !Utils.isEmpty( m_zookeeperHosts ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, ZOOKEEPER_HOSTS, m_zookeeperHosts ); } if ( !Utils.isEmpty( m_zookeeperPort ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, ZOOKEEPER_PORT, m_zookeeperPort ); } } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/ServiceStatus.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase; /** * Helper class that shows HBaseService status of a Step. */ public class ServiceStatus { public static ServiceStatus OK = new ServiceStatus(); private boolean ok = true; private Exception exception; private ServiceStatus() { } private ServiceStatus( Exception exception ) { this.ok = false; this.exception = exception; } public boolean isOk() { return ok; } public Exception getException() { return exception; } public static ServiceStatus notOk( Exception e ) { return new ServiceStatus( e ); } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/input/HBaseInput.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.input; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.big.data.kettle.plugins.hbase.mapping.HBaseRowToKettleTuple; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingAdmin; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.Result; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterfaceFactory; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTable; import org.pentaho.hadoop.shim.api.hbase.table.ResultScanner; import org.pentaho.hadoop.shim.api.hbase.table.ResultScannerBuilder; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Class providing an input step for reading data from an HBase table according to meta data mapping info stored in a * separate HBase table called "pentaho_mappings". See org.pentaho.hbase.mapping.Mapping for details on the meta data * format. * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ public class HBaseInput extends BaseStep implements StepInterface { private final NamedClusterServiceLocator namedClusterServiceLocator; protected HBaseInputMeta m_meta; protected HBaseInputData m_data; private HBaseService hBaseService; private HBaseTable m_hbAdminTable; private ResultScanner resultScanner; private HBaseValueMetaInterfaceFactory hBaseValueMetaInterfaceFactory; public HBaseInput( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans, NamedClusterServiceLocator namedClusterServiceLocator ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); this.namedClusterServiceLocator = namedClusterServiceLocator; } /** Connection/admin object for interacting with HBase */ protected HBaseConnection m_hbAdmin; /** Byte utilities */ protected ByteConversionUtil m_bytesUtil; /** The mapping admin object for interacting with mapping information */ protected MappingAdmin m_mappingAdmin; /** The mapping information to use in order to decode HBase column values */ protected Mapping m_tableMapping; /** Information from the mapping */ protected Map m_columnsMappedByAlias; /** User-selected columns from the mapping (null indicates output all columns) */ protected List m_userOutputColumns; /** * Used when decoding columns to tuples */ protected HBaseRowToKettleTuple m_tupleHandler; @Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { if ( first ) { first = false; m_meta = (HBaseInputMeta) smi; m_data = (HBaseInputData) sdi; // Get the connection to HBase try { List connectionMessages = new ArrayList(); hBaseService = namedClusterServiceLocator.getService( m_meta.getNamedCluster(), HBaseService.class ); m_hbAdmin = hBaseService.getHBaseConnection( this, environmentSubstitute( m_meta.getCoreConfigURL() ), environmentSubstitute( m_meta.getDefaultConfigURL() ), log ); m_bytesUtil = hBaseService.getByteConversionUtil(); hBaseValueMetaInterfaceFactory = hBaseService.getHBaseValueMetaInterfaceFactory(); if ( connectionMessages.size() > 0 ) { for ( String m : connectionMessages ) { logBasic( m ); } } } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToObtainConnection" ), ex ); } try { m_mappingAdmin = new MappingAdmin( m_hbAdmin ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToCreateAMappingAdminConnection" ), ex ); } // check on the existence and readiness of the target table String sourceName = environmentSubstitute( m_meta.getSourceTableName() ); if ( StringUtil.isEmpty( sourceName ) ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.TableName.Missing" ) ); } HBaseTable hBaseTable; try { hBaseTable = m_hbAdmin.getTable( sourceName ); } catch ( IOException e ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.CantGetTable", sourceName ), e ); } try { if ( !hBaseTable.exists() ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.SourceTableDoesNotExist", sourceName ) ); } if ( hBaseTable.disabled() || !hBaseTable.available() ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.SourceTableIsNotAvailable", sourceName ) ); } } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.AvailabilityReadinessProblem", sourceName ), ex ); } if ( m_meta.getMapping() != null && Const.isEmpty( m_meta.getSourceMappingName() ) ) { // use embedded mapping m_tableMapping = m_meta.getMapping(); } else { // Otherwise get mapping details for the source table from HBase if ( Const.isEmpty( m_meta.getSourceMappingName() ) ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.NoMappingName" ) ); } try { m_tableMapping = m_mappingAdmin.getMapping( environmentSubstitute( m_meta.getSourceTableName() ), environmentSubstitute( m_meta.getSourceMappingName() ) ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToRetrieveMapping", environmentSubstitute( m_meta.getSourceMappingName() ), environmentSubstitute( m_meta.getSourceTableName() ) ), ex ); } } HBaseValueMetaInterface vm2 = hBaseValueMetaInterfaceFactory .createHBaseValueMetaInterface( null, null, m_tableMapping.getKeyName(), getKettleTypeByKeyType( m_tableMapping.getKeyType() ), -1, -1 ); vm2.setKey( true ); try { m_tableMapping.addMappedColumn( vm2, m_tableMapping.isTupleMapping() ); } catch ( Exception exception ) { exception.printStackTrace(); } m_columnsMappedByAlias = m_tableMapping.getMappedColumns(); if ( m_tableMapping.isTupleMapping() ) { m_tupleHandler = new HBaseRowToKettleTuple( m_bytesUtil ); } // conversion mask to use for user specified key values in range scan. // This can come from user-specified field information OR it can be // provided in the keyStart/keyStop values by suffixing the value with // "@converionMask" String dateOrNumberConversionMaskForKey = null; // if there are any user-chosen output fields in the meta data then // check them against table mapping. All selected fields must be present // in the mapping m_userOutputColumns = m_meta.getOutputFields(); if ( m_userOutputColumns != null && m_userOutputColumns.size() > 0 ) { for ( HBaseValueMetaInterface vm : m_userOutputColumns ) { if ( !vm.isKey() ) { if ( m_columnsMappedByAlias.get( vm.getAlias() ) == null ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToFindUserSelectedColumn", vm.getAlias(), m_tableMapping.getFriendlyName() ) ); } } else { dateOrNumberConversionMaskForKey = vm.getConversionMask(); } } } try { m_hbAdminTable = m_hbAdmin.getTable( sourceName ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToSetSourceTableForScan" ), ex ); } ResultScannerBuilder scannerBuilder = m_hbAdminTable .createScannerBuilder( m_tableMapping, dateOrNumberConversionMaskForKey, m_meta.getKeyStartValue(), m_meta.getKeyStopValue(), m_meta.getScannerCacheSize(), log, this ); // LIMIT THE SCAN TO JUST THE COLUMNS IN THE MAPPING // User-selected output columns? if ( m_userOutputColumns != null && m_userOutputColumns.size() > 0 && !m_tableMapping.isTupleMapping() ) { HBaseInputData.setScanColumns( scannerBuilder, m_userOutputColumns, m_tableMapping ); } // set any filters if ( m_meta.getColumnFilters() != null && m_meta.getColumnFilters().size() > 0 ) { HBaseInputData.setScanFilters( scannerBuilder, m_meta.getColumnFilters(), m_meta.getMatchAnyFilter(), m_columnsMappedByAlias, this ); } if ( !isStopped() ) { try { resultScanner = scannerBuilder.build(); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToExecuteSourceTableScan" ), e ); } // set up the output fields (using the mapping) m_data.setOutputRowMeta( new RowMeta() ); m_meta.getFields( getTransMeta().getBowl(), m_data.getOutputRowMeta(), getStepname(), null, null, this, repository, metaStore ); } } Result next = null; if ( !isStopped() ) { try { next = resultScanner.next(); } catch ( Exception e ) { throw new KettleException( e.getMessage(), e ); } } if ( next == null ) { try { m_hbAdminTable.close(); m_hbAdmin.close(); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.ProblemClosingConnection", e.getMessage() ), e ); } setOutputDone(); return false; } if ( m_tableMapping.isTupleMapping() ) { List tupleRows = HBaseInputData.getTupleOutputRows( hBaseService, next, m_userOutputColumns, m_columnsMappedByAlias, m_tableMapping, m_tupleHandler, m_data.getOutputRowMeta() ); for ( Object[] tuple : tupleRows ) { putRow( m_data.getOutputRowMeta(), tuple ); } return true; } else { Object[] outRowData = HBaseInputData.getOutputRow( next, m_userOutputColumns, m_columnsMappedByAlias, m_tableMapping, m_data .getOutputRowMeta() ); putRow( m_data.getOutputRowMeta(), outRowData ); return true; } } @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { if ( super.init( smi, sdi ) ) { HBaseInputMeta meta = (HBaseInputMeta) smi; try { // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS if ( getTransMeta().getNamedClusterEmbedManager() != null ) { getTransMeta().getNamedClusterEmbedManager().passEmbeddedMetastoreKey( getTransMeta(), getTransMeta().getEmbeddedMetastoreProviderKey() ); meta.applyInjection( this ); } return true; } catch ( KettleException e ) { logError( "Error while injecting properties", e ); } } return false; } public static int getKettleTypeByKeyType( Mapping.KeyType keyType ) { if ( keyType == null ) { return ValueMetaInterface.TYPE_NONE; } switch ( keyType ) { case BINARY: return ValueMetaInterface.TYPE_BINARY; case STRING: return ValueMetaInterface.TYPE_STRING; case UNSIGNED_LONG: case UNSIGNED_INTEGER: case LONG: case INTEGER: return ValueMetaInterface.TYPE_NUMBER; case UNSIGNED_DATE: case DATE: return ValueMetaInterface.TYPE_DATE; default: return ValueMetaInterface.TYPE_NONE; } } /* * (non-Javadoc) * * @see org.pentaho.di.trans.step.BaseStep#setStopped(boolean) */ @Override public void setStopped( boolean stopped ) { if ( isStopped() && stopped == true ) { return; } super.setStopped( stopped ); if ( stopped && m_hbAdmin != null ) { logBasic( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.ClosingConnection" ) ); try { m_hbAdmin.close(); } catch ( IOException ex ) { logError( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.ProblemClosingConnection1", ex ) ); } } } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/input/HBaseInputData.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.input; import org.pentaho.big.data.kettle.plugins.hbase.mapping.HBaseRowToKettleTuple; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.ColumnFilter; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.Result; import org.pentaho.hadoop.shim.api.hbase.table.ResultScannerBuilder; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.step.BaseStepData; import org.pentaho.di.trans.step.StepDataInterface; import java.net.MalformedURLException; import java.net.URL; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; /** * Class providing an input step for reading data from an HBase table according to meta data mapping info stored in a * separate HBase table called "pentaho_mappings". See org.pentaho.hbase.mapping.Mapping for details on the meta data * format. * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) * @version $Revision$ * */ public class HBaseInputData extends BaseStepData implements StepDataInterface { /** The output data format */ protected RowMetaInterface m_outputRowMeta; /** * Get the output row format * * @return the output row format */ public RowMetaInterface getOutputRowMeta() { return m_outputRowMeta; } /** * Set the output row format * * @param rmi * the output row format */ public void setOutputRowMeta( RowMetaInterface rmi ) { m_outputRowMeta = rmi; } /** * Utility method to covert a string to a URL object. * * @param pathOrURL * file or http URL as a string * @return a URL * @throws MalformedURLException * if there is a problem with the URL. */ public static URL stringToURL( String pathOrURL ) throws MalformedURLException { URL result = null; if ( !Const.isEmpty( pathOrURL ) ) { if ( pathOrURL.toLowerCase().startsWith( "http://" ) || pathOrURL.toLowerCase().startsWith( "file://" ) ) { result = new URL( pathOrURL ); } else { String c = "file://" + pathOrURL; result = new URL( c ); } } return result; } /** * Set the specific columns to be returned by the scan. * * @param resultScannerBuilder * the resultScannerBuilder * @param limitCols * the columns to limit the scan to * @param tableMapping * the mapping information * @throws KettleException * if a problem occurs */ public static void setScanColumns( ResultScannerBuilder resultScannerBuilder, List limitCols, Mapping tableMapping ) throws KettleException { for ( HBaseValueMetaInterface currentCol : limitCols ) { if ( !currentCol.isKey() ) { String colFamilyName = currentCol.getColumnFamily(); String qualifier = currentCol.getColumnName(); boolean binaryColName = false; if ( qualifier.startsWith( "@@@binary@@@" ) ) { qualifier = qualifier.replace( "@@@binary@@@", "" ); binaryColName = true; } try { resultScannerBuilder.addColumnToScan( colFamilyName, qualifier, binaryColName ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToAddColumnToScan" ), ex ); } } } } /** * Set column filters to apply server-side to the scan results. * * @param resultScannerBuilder * the resultScannerBuilder * @param columnFilters * the column filters to apply * @param matchAnyFilter * if true then a row will be returned if any of the filters match (otherwise all have to match) * @param columnsMappedByAlias * the columns defined in the mapping * @param vars * variables to use * @throws KettleException * if a problem occurs */ public static void setScanFilters( ResultScannerBuilder resultScannerBuilder, Collection columnFilters, boolean matchAnyFilter, Map columnsMappedByAlias, VariableSpace vars ) throws KettleException { for ( ColumnFilter cf : columnFilters ) { String fieldAliasS = vars.environmentSubstitute( cf.getFieldAlias() ); HBaseValueMetaInterface mappedCol = columnsMappedByAlias.get( fieldAliasS ); if ( mappedCol == null ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.ColumnFilterIsNotInTheMapping", fieldAliasS ) ); } // check the type (if set in the ColumnFilter) against the type // of this field in the mapping String fieldTypeS = vars.environmentSubstitute( cf.getFieldType() ); if ( !Const.isEmpty( fieldTypeS ) ) { if ( !mappedCol.getHBaseTypeDesc().equalsIgnoreCase( fieldTypeS ) ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.FieldTypeMismatch", fieldTypeS, fieldAliasS, mappedCol.getHBaseTypeDesc() ) ); } } try { resultScannerBuilder.addColumnFilterToScan( cf, mappedCol, vars, matchAnyFilter ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToAddColumnFilterToScan" ), ex ); } } } /** * Convert/decode the current hbase row into a list of "tuple" kettle rows * * @param hBaseService * the hBaseService * @param result * the result to use * @param userOutputColumns * user-specified subset of columns (if any) from the mapping * @param columnsMappedByAlias * columns in the mapping keyed by alias * @param tableMapping * the mapping to use * @param tupleHandler * the HBaseRowToKettleTuple to delegate to * @param outputRowMeta * the outgoing row meta * @return a list of kettle rows * @throws KettleException * if a problem occurs */ public static List getTupleOutputRows( HBaseService hBaseService, Result result, List userOutputColumns, Map columnsMappedByAlias, Mapping tableMapping, HBaseRowToKettleTuple tupleHandler, RowMetaInterface outputRowMeta ) throws KettleException { if ( userOutputColumns != null && userOutputColumns.size() > 0 ) { return tupleHandler.hbaseRowToKettleTupleMode( result, tableMapping, userOutputColumns, outputRowMeta ); } else { return tupleHandler.hbaseRowToKettleTupleMode( hBaseService.getHBaseValueMetaInterfaceFactory(), result, tableMapping, columnsMappedByAlias, outputRowMeta ); } } /** * Convert/decode the current hbase row into a kettle row * * @param result * the result to use * @param userOutputColumns * user-specified subset of columns (if any) from the mapping * @param columnsMappedByAlias * columns in the mapping keyed by alias * @param tableMapping * the mapping to use * @param outputRowMeta * the outgoing row meta * @return a kettle row * @throws KettleException * if a problem occurs */ public static Object[] getOutputRow( Result result, List userOutputColumns, Map columnsMappedByAlias, Mapping tableMapping, RowMetaInterface outputRowMeta ) throws KettleException { int size = ( userOutputColumns != null && userOutputColumns.size() > 0 ) ? userOutputColumns.size() : tableMapping.numMappedColumns() + 1; // + 1 for the key Object[] outputRowData = RowDataUtil.allocateRowData( size ); // User-selected output columns? if ( userOutputColumns != null && userOutputColumns.size() > 0 ) { for ( HBaseValueMetaInterface currentCol : userOutputColumns ) { if ( currentCol.isKey() ) { byte[] rawKey = null; try { rawKey = result.getRow(); } catch ( Exception e ) { throw new KettleException( e ); } Object decodedKey = tableMapping.decodeKeyValue( rawKey ); int keyIndex = outputRowMeta.indexOfValue( currentCol.getAlias() ); outputRowData[keyIndex] = decodedKey; } else { String colFamilyName = currentCol.getColumnFamily(); String qualifier = currentCol.getColumnName(); boolean binaryColName = false; if ( qualifier.startsWith( "@@@binary@@@" ) ) { qualifier = qualifier.replace( "@@@binary@@@", "" ); // assume hex encoded binaryColName = true; } byte[] kv = null; try { kv = result.getValue( colFamilyName, qualifier, binaryColName ); } catch ( Exception e ) { throw new KettleException( e ); } int outputIndex = outputRowMeta.indexOfValue( currentCol.getAlias() ); if ( outputIndex < 0 ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.ColumnNotDefinedInOutput", currentCol.getAlias() ) ); } Object decodedVal = currentCol.decodeColumnValue( ( kv == null ) ? null : kv ); outputRowData[outputIndex] = decodedVal; } } } else { // do the key first byte[] rawKey = null; try { rawKey = result.getRow(); } catch ( Exception e ) { throw new KettleException( e ); } Object decodedKey = tableMapping.decodeKeyValue( rawKey ); int keyIndex = outputRowMeta.indexOfValue( tableMapping.getKeyName() ); outputRowData[keyIndex] = decodedKey; Set aliasSet = columnsMappedByAlias.keySet(); for ( String name : aliasSet ) { HBaseValueMetaInterface currentCol = columnsMappedByAlias.get( name ); String colFamilyName = currentCol.getColumnFamily(); String qualifier = currentCol.getColumnName(); if ( currentCol.isKey() ) { // skip key as it has already been processed // and is not in the scan's columns continue; } boolean binaryColName = false; if ( qualifier.startsWith( "@@@binary@@@" ) ) { qualifier = qualifier.replace( "@@@binary@@@", "" ); // assume hex encoded binaryColName = true; } byte[] kv = null; try { kv = result.getValue( colFamilyName, qualifier, binaryColName ); } catch ( Exception e ) { throw new KettleException( e ); } int outputIndex = outputRowMeta.indexOfValue( name ); if ( outputIndex < 0 ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.ColumnNotDefinedInOutput", name ) ); } Object decodedVal = currentCol.decodeColumnValue( ( kv == null ) ? null : kv ); outputRowData[outputIndex] = decodedVal; } } return outputRowData; } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/input/HBaseInputDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.input; import org.apache.commons.lang.StringUtils; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.Cursor; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.big.data.kettle.plugins.hbase.HbaseUtil; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.big.data.kettle.plugins.hbase.ServiceStatus; import org.pentaho.big.data.kettle.plugins.hbase.mapping.ConfigurationProducer; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingAdmin; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingEditor; import org.pentaho.big.data.plugins.common.ui.NamedClusterWidgetImpl; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.ColumnFilter; import org.pentaho.hadoop.shim.api.hbase.mapping.ColumnFilterFactory; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterfaceFactory; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBase; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.ComboValuesSelectionListener; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; /** * Dialog class for HBaseInput * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ @PluginDialog( id = "HBaseInput", image = "HB.svg", pluginType = PluginDialog.PluginType.JOBENTRY, documentationUrl = "Products/HBase_Input" ) public class HBaseInputDialog extends BaseStepDialog implements StepDialogInterface, ConfigurationProducer { /** various UI bits and pieces for the dialog */ private Label m_stepnameLabel; private Text m_stepnameText; // The tabs of the dialog private CTabFolder m_wTabFolder; private CTabItem m_wConfigTab; private CTabItem m_wFilterTab; private CTabItem m_editorTab; NamedClusterWidgetImpl namedClusterWidget; // Core config line private Button m_coreConfigBut; private TextVar m_coreConfigText; // Default config line private Button m_defaultConfigBut; private TextVar m_defaultConfigText; private final HBaseInputMeta m_currentMeta; private final HBaseInputMeta m_originalMeta; private final HBaseInputMeta m_configurationMeta; // Table name line private Button m_mappedTableNamesBut; private CCombo m_mappedTableNamesCombo; // Mapping name line private Button m_mappingNamesBut; private CCombo m_mappingNamesCombo; /** Store the mapping information in the step's meta data */ private Button m_storeMappingInStepMetaData; // Key start line private TextVar m_keyStartText; // Key stop line private TextVar m_keyStopText; // Rows to be cached by Scanner private TextVar m_scanCacheText; // Key as a column // private Button m_includeKey; // Key information private String m_keyName; private Mapping.KeyType m_keyType; private Label m_keyInfo; private Button m_getKeyInfoBut; // Fields table widget private TableView m_fieldsView; // filters fields widget private TableView m_filtersView; private ColumnInfo m_filterAliasCI; private Button m_matchAllBut; private Button m_matchAnyBut; // mapping editor composite private MappingEditor m_mappingEditor; // cached copy of the mapped columns private Map m_mappedColumns; // lookup map for indexed columns private Map m_indexedLookup = new HashMap<>(); private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; private final NamedClusterServiceLocator namedClusterServiceLocator; public HBaseInputDialog( Shell parent, Object in, TransMeta tr, String name ) { super( parent, (BaseStepMeta) in, tr, name ); m_currentMeta = (HBaseInputMeta) in; m_originalMeta = (HBaseInputMeta) m_currentMeta.clone(); m_configurationMeta = (HBaseInputMeta) m_currentMeta.clone(); namedClusterService = m_currentMeta.getNamedClusterService(); runtimeTestActionService = m_currentMeta.getRuntimeTestActionService(); runtimeTester = m_currentMeta.getRuntimeTester(); namedClusterServiceLocator = m_currentMeta.getNamedClusterServiceLocator(); } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX ); props.setLook( shell ); setShellImage( shell, m_currentMeta ); // used to listen to a text field (m_wStepname) ModifyListener lsMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); } }; changed = m_currentMeta.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( Messages.getString( "HBaseInputDialog.Shell.Title" ) ); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line m_stepnameLabel = new Label( shell, SWT.RIGHT ); m_stepnameLabel.setText( Messages.getString( "HBaseInputDialog.StepName.Label" ) ); props.setLook( m_stepnameLabel ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( middle, -margin ); fd.top = new FormAttachment( 0, margin ); m_stepnameLabel.setLayoutData( fd ); m_stepnameText = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); m_stepnameText.setText( stepname ); props.setLook( m_stepnameText ); m_stepnameText.addModifyListener( lsMod ); // format the text field fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, 0 ); m_stepnameText.setLayoutData( fd ); m_wTabFolder = new CTabFolder( shell, SWT.BORDER ); props.setLook( m_wTabFolder, Props.WIDGET_STYLE_TAB ); m_wTabFolder.setSimple( false ); // Start of the config tab m_wConfigTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_wConfigTab.setText( Messages.getString( "HBaseInputDialog.ConfigTab.TabTitle" ) ); Composite wConfigComp = new Composite( m_wTabFolder, SWT.NONE ); props.setLook( wConfigComp ); FormLayout configLayout = new FormLayout(); configLayout.marginWidth = 3; configLayout.marginHeight = 3; wConfigComp.setLayout( configLayout ); Label namedClusterLab = new Label( wConfigComp, SWT.RIGHT ); namedClusterLab.setText( Messages.getString( "HBaseInputDialog.NamedCluster.Label" ) ); namedClusterLab.setToolTipText( Messages.getString( "HBaseInputDialog.NamedCluster.TipText" ) ); props.setLook( namedClusterLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 10 ); fd.right = new FormAttachment( middle, -margin ); namedClusterLab.setLayoutData( fd ); namedClusterWidget = new NamedClusterWidgetImpl( wConfigComp, false, namedClusterService, runtimeTestActionService, runtimeTester, false ); namedClusterWidget.initiate(); props.setLook( namedClusterWidget ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( middle, 0 ); namedClusterWidget.setLayoutData( fd ); // core config line Label coreConfigLab = new Label( wConfigComp, SWT.RIGHT ); coreConfigLab.setText( Messages.getString( "HBaseInputDialog.CoreConfig.Label" ) ); coreConfigLab.setToolTipText( Messages.getString( "HBaseInputDialog.CoreConfig.TipText" ) ); props.setLook( coreConfigLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( namedClusterWidget, margin ); fd.right = new FormAttachment( middle, -margin ); coreConfigLab.setLayoutData( fd ); m_coreConfigBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_coreConfigBut ); m_coreConfigBut.setText( Messages.getString( "System.Button.Browse" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( namedClusterWidget, 0 ); m_coreConfigBut.setLayoutData( fd ); m_coreConfigBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { FileDialog dialog = new FileDialog( shell, SWT.OPEN ); String[] extensions = null; String[] filterNames = null; extensions = new String[ 2 ]; filterNames = new String[ 2 ]; extensions[ 0 ] = "*.xml"; filterNames[ 0 ] = Messages.getString( "HBaseInputDialog.FileType.XML" ); extensions[ 1 ] = "*"; filterNames[ 1 ] = Messages.getString( "System.FileType.AllFiles" ); dialog.setFilterExtensions( extensions ); if ( dialog.open() != null ) { m_coreConfigText.setText( dialog.getFilterPath() + System.getProperty( "file.separator" ) + dialog.getFileName() ); } } } ); m_coreConfigText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( m_coreConfigText ); m_coreConfigText.addModifyListener( lsMod ); // set the tool tip to the contents with any env variables expanded m_coreConfigText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_coreConfigText.setToolTipText( transMeta.environmentSubstitute( m_coreConfigText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( namedClusterWidget, margin ); fd.right = new FormAttachment( m_coreConfigBut, -margin ); m_coreConfigText.setLayoutData( fd ); // default config line Label defaultConfigLab = new Label( wConfigComp, SWT.RIGHT ); defaultConfigLab.setText( Messages.getString( "HBaseInputDialog.DefaultConfig.Label" ) ); defaultConfigLab.setToolTipText( Messages.getString( "HBaseInputDialog.DefaultConfig.TipText" ) ); props.setLook( defaultConfigLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_coreConfigText, margin ); fd.right = new FormAttachment( middle, -margin ); defaultConfigLab.setLayoutData( fd ); m_defaultConfigBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_defaultConfigBut ); m_defaultConfigBut.setText( Messages.getString( "System.Button.Browse" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( m_coreConfigText, 0 ); m_defaultConfigBut.setLayoutData( fd ); m_defaultConfigBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { FileDialog dialog = new FileDialog( shell, SWT.OPEN ); String[] extensions = null; String[] filterNames = null; extensions = new String[ 2 ]; filterNames = new String[ 2 ]; extensions[ 0 ] = "*.xml"; filterNames[ 0 ] = Messages.getString( "HBaseInputDialog.FileType.XML" ); extensions[ 1 ] = "*"; filterNames[ 1 ] = Messages.getString( "System.FileType.AllFiles" ); dialog.setFilterExtensions( extensions ); if ( dialog.open() != null ) { m_defaultConfigText.setText( dialog.getFilterPath() + System.getProperty( "file.separator" ) + dialog.getFileName() ); } } } ); m_defaultConfigText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( m_defaultConfigText ); m_defaultConfigText.addModifyListener( lsMod ); // set the tool tip to the contents with any env variables expanded m_defaultConfigText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_defaultConfigText.setToolTipText( transMeta.environmentSubstitute( m_defaultConfigText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_coreConfigText, margin ); fd.right = new FormAttachment( m_defaultConfigBut, -margin ); m_defaultConfigText.setLayoutData( fd ); // table name Label tableNameLab = new Label( wConfigComp, SWT.RIGHT ); tableNameLab.setText( Messages.getString( "HBaseInputDialog.TableName.Label" ) ); tableNameLab.setToolTipText( Messages.getString( "HBaseInputDialog.TableName.TipText" ) ); props.setLook( tableNameLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_defaultConfigText, margin ); fd.right = new FormAttachment( middle, -margin ); tableNameLab.setLayoutData( fd ); m_mappedTableNamesBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_mappedTableNamesBut ); m_mappedTableNamesBut.setText( Messages.getString( "HBaseInputDialog.TableName.Button" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( m_defaultConfigText, 0 ); m_mappedTableNamesBut.setLayoutData( fd ); m_mappedTableNamesCombo = new CCombo( wConfigComp, SWT.BORDER ); props.setLook( m_mappedTableNamesCombo ); m_mappedTableNamesCombo.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_mappedTableNamesCombo.setToolTipText( transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_defaultConfigText, margin ); fd.right = new FormAttachment( m_mappedTableNamesBut, -margin ); m_mappedTableNamesCombo.setLayoutData( fd ); m_mappedTableNamesBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { setupMappedTableNames(); if ( m_mappedTableNamesCombo.getItemCount() > 0 ) { m_mappedTableNamesCombo.setListVisible( true ); } } } ); // mapping name Label mappingNameLab = new Label( wConfigComp, SWT.RIGHT ); mappingNameLab.setText( Messages.getString( "HBaseInputDialog.MappingName.Label" ) ); mappingNameLab.setToolTipText( Messages.getString( "HBaseInputDialog.MappingName.TipText" ) ); props.setLook( mappingNameLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_mappedTableNamesCombo, margin ); fd.right = new FormAttachment( middle, -margin ); mappingNameLab.setLayoutData( fd ); m_mappingNamesBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_mappingNamesBut ); m_mappingNamesBut.setText( Messages.getString( "HBaseInputDialog.MappingName.Button" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( m_mappedTableNamesCombo, 0 ); m_mappingNamesBut.setLayoutData( fd ); m_mappingNamesBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { setupMappingNamesForTable( false ); if ( m_mappingNamesCombo.getItemCount() > 0 ) { m_mappingNamesCombo.setListVisible( true ); } } } ); m_mappingNamesCombo = new CCombo( wConfigComp, SWT.BORDER ); props.setLook( m_mappingNamesCombo ); m_mappingNamesCombo.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); // checkKeyInformation(true); m_mappingNamesCombo.setToolTipText( transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) ); m_storeMappingInStepMetaData.setSelection( false ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_mappedTableNamesCombo, margin ); fd.right = new FormAttachment( m_mappingNamesBut, -margin ); m_mappingNamesCombo.setLayoutData( fd ); // store mapping in meta data Label storeMapping = new Label( wConfigComp, SWT.RIGHT ); storeMapping.setText( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInputDialog.StoreMapping.Label" ) ); storeMapping.setToolTipText( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInputDialog.StoreMapping.TipText" ) ); props.setLook( storeMapping ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_mappingNamesCombo, margin ); fd.right = new FormAttachment( middle, -margin ); storeMapping.setLayoutData( fd ); m_storeMappingInStepMetaData = new Button( wConfigComp, SWT.CHECK ); props.setLook( m_storeMappingInStepMetaData ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_mappingNamesCombo, margin ); m_storeMappingInStepMetaData.setLayoutData( fd ); // keystart Label keyStartLab = new Label( wConfigComp, SWT.RIGHT ); keyStartLab.setText( Messages.getString( "HBaseInputDialog.KeyStart.Label" ) ); keyStartLab.setToolTipText( Messages.getString( "HBaseInputDialog.KeyStart.TipText" ) ); props.setLook( keyStartLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_storeMappingInStepMetaData, margin ); fd.right = new FormAttachment( middle, -margin ); keyStartLab.setLayoutData( fd ); m_keyStartText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); m_keyStartText.setToolTipText( Messages.getString( "HBaseInputDialog.KeyStart.TipText" ) ); m_keyStartText.addModifyListener( lsMod ); props.setLook( m_keyStartText ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_storeMappingInStepMetaData, margin ); m_keyStartText.setLayoutData( fd ); // keystop Label keyStopLab = new Label( wConfigComp, SWT.RIGHT ); keyStopLab.setText( Messages.getString( "HBaseInputDialog.KeyStop.Label" ) ); keyStopLab.setToolTipText( Messages.getString( "HBaseInputDialog.KeyStop.TipText" ) ); props.setLook( keyStopLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_keyStartText, margin ); fd.right = new FormAttachment( middle, -margin ); keyStopLab.setLayoutData( fd ); m_keyStopText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); m_keyStopText.setToolTipText( Messages.getString( "HBaseInputDialog.KeyStop.TipText" ) ); m_keyStopText.addModifyListener( lsMod ); props.setLook( m_keyStopText ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_keyStartText, margin ); m_keyStopText.setLayoutData( fd ); // Scanner caching Label scannerCacheLab = new Label( wConfigComp, SWT.RIGHT ); scannerCacheLab.setText( Messages.getString( "HBaseInputDialog.ScannerCache.Label" ) ); scannerCacheLab.setToolTipText( Messages.getString( "HBaseInputDialog.ScannerCache.TipText" ) ); props.setLook( scannerCacheLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_keyStopText, margin ); fd.right = new FormAttachment( middle, -margin ); scannerCacheLab.setLayoutData( fd ); m_scanCacheText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); m_scanCacheText.setToolTipText( Messages.getString( "HBaseInputDialog.ScannerCache.TipText" ) ); props.setLook( m_scanCacheText ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_keyStopText, margin ); m_scanCacheText.setLayoutData( fd ); m_getKeyInfoBut = new Button( wConfigComp, SWT.PUSH ); m_getKeyInfoBut.setText( "Get Key/Fields Info" ); props.setLook( m_getKeyInfoBut ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, -margin * 2 ); m_getKeyInfoBut.setLayoutData( fd ); m_getKeyInfoBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { checkKeyInformation( false, true ); } } ); Group keyGroup = new Group( wConfigComp, SWT.SHADOW_ETCHED_IN ); FormLayout keyLayout = new FormLayout(); keyGroup.setLayout( keyLayout ); props.setLook( keyGroup ); m_keyInfo = new Label( keyGroup, SWT.RIGHT ); m_keyInfo.setText( "-- Key details --" ); props.setLook( m_keyInfo ); fd = new FormData(); fd.top = new FormAttachment( 0, margin ); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, -margin ); m_keyInfo.setLayoutData( fd ); fd = new FormData(); fd.right = new FormAttachment( m_getKeyInfoBut, -margin ); fd.left = new FormAttachment( middle, 0 ); fd.bottom = new FormAttachment( 100, -margin * 2 ); keyGroup.setLayoutData( fd ); // fields stuff ColumnInfo[] colinf = new ColumnInfo[] { new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_ALIAS" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_KEY" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_FAMILY" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_NAME" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_TYPE" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_FORMAT" ), ColumnInfo.COLUMN_TYPE_FORMAT, 3 ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_INDEXED" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), }; colinf[ 0 ].setReadOnly( true ); colinf[ 1 ].setReadOnly( true ); colinf[ 2 ].setReadOnly( true ); colinf[ 3 ].setReadOnly( true ); colinf[ 4 ].setReadOnly( true ); colinf[ 5 ].setReadOnly( true ); colinf[ 5 ].setComboValuesSelectionListener( new ComboValuesSelectionListener() { public String[] getComboValues( TableItem tableItem, int rowNr, int colNr ) { String[] comboValues = new String[] {}; int type = ValueMeta.getType( tableItem.getText( colNr - 1 ) ); switch ( type ) { case ValueMetaInterface.TYPE_DATE: comboValues = Const.getDateFormats(); break; case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: case ValueMetaInterface.TYPE_NUMBER: comboValues = Const.getNumberFormats(); break; default: break; } return comboValues; } } ); m_fieldsView = new TableView( transMeta, wConfigComp, SWT.FULL_SELECTION | SWT.MULTI, colinf, 1, lsMod, props ); fd = new FormData(); fd.top = new FormAttachment( m_scanCacheText, margin * 2 ); fd.bottom = new FormAttachment( m_getKeyInfoBut, -margin * 2 ); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); m_fieldsView.setLayoutData( fd ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, 0 ); wConfigComp.setLayoutData( fd ); wConfigComp.layout(); m_wConfigTab.setControl( wConfigComp ); // --- mapping editor tab m_editorTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_editorTab.setText( Messages.getString( "HBaseInputDialog.MappingEditorTab.TabTitle" ) ); m_mappingEditor = new MappingEditor( shell, m_wTabFolder, this, null, SWT.FULL_SELECTION | SWT.MULTI, false, props, transMeta, namedClusterService, runtimeTestActionService, runtimeTester, namedClusterServiceLocator ); fd = new FormData(); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( 0, 0 ); m_mappingEditor.setLayoutData( fd ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.bottom = new FormAttachment( 100, -margin * 2 ); fd.right = new FormAttachment( 100, 0 ); m_mappingEditor.setLayoutData( fd ); m_mappingEditor.layout(); m_editorTab.setControl( m_mappingEditor ); // ----- Start of the filter tab -------- m_wFilterTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_wFilterTab.setText( Messages.getString( "HBaseInputDialog.FilterTab.TabTitle" ) ); Composite wFilterComp = new Composite( m_wTabFolder, SWT.NONE ); props.setLook( wFilterComp ); FormLayout filterLayout = new FormLayout(); filterLayout.marginWidth = 3; filterLayout.marginHeight = 3; wFilterComp.setLayout( filterLayout ); m_matchAllBut = new Button( wFilterComp, SWT.RADIO ); m_matchAllBut.setText( Messages.getString( "HBaseInputDialog.Filters.RADIO_ALL" ) ); props.setLook( m_matchAllBut ); fd = new FormData(); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( 0, 0 ); m_matchAllBut.setLayoutData( fd ); m_matchAllBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { m_currentMeta.setChanged(); } } ); m_matchAnyBut = new Button( wFilterComp, SWT.RADIO ); m_matchAnyBut.setText( Messages.getString( "HBaseInputDialog.Filters.RADIO_ANY" ) ); props.setLook( m_matchAnyBut ); fd = new FormData(); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( m_matchAllBut, 0 ); fd.right = new FormAttachment( 100, -margin ); m_matchAnyBut.setLayoutData( fd ); m_matchAnyBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { m_currentMeta.setChanged(); } } ); m_matchAllBut.setSelection( true ); final ColumnInfo[] colinf2 = new ColumnInfo[] { new ColumnInfo( Messages.getString( "HBaseInputDialog.Filters.FIELD_ALIAS" ) + " ", ColumnInfo.COLUMN_TYPE_CCOMBO, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Filters.FIELD_TYPE" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Filters.FIELD_OPERATOR" ), ColumnInfo.COLUMN_TYPE_CCOMBO, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Filters.FIELD_COMPARISON" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Filters.FIELD_FORMAT" ), ColumnInfo.COLUMN_TYPE_CCOMBO, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Filters.FIELD_SIGNED" ), ColumnInfo.COLUMN_TYPE_CCOMBO, false ), }; colinf2[ 0 ].setReadOnly( false ); colinf2[ 1 ].setReadOnly( false ); colinf2[ 2 ].setReadOnly( true ); colinf2[ 3 ].setReadOnly( false ); colinf2[ 4 ].setReadOnly( false ); colinf2[ 5 ].setReadOnly( true ); m_filterAliasCI = colinf2[ 0 ]; m_filterAliasCI.setComboValues( new String[] { "" } ); colinf2[ 2 ].setComboValues( ColumnFilter.ComparisonType.getAllOperators() ); colinf2[ 5 ].setComboValues( new String[] { "Y", "N" } ); colinf2[ 2 ].setComboValuesSelectionListener( new ComboValuesSelectionListener() { public String[] getComboValues( TableItem tableItem, int rowNr, int colNr ) { String[] comboValues = colinf2[ 2 ].getComboValues(); // try to fill in the type String alias = tableItem.getText( 1 ); HBaseValueMetaInterface vm = null; if ( !Const.isEmpty( alias ) ) { vm = setFilterTableTypeColumn( tableItem ); } if ( vm != null ) { if ( vm.isNumeric() || vm.isDate() || vm.isBoolean() ) { comboValues = ColumnFilter.ComparisonType.getNumericOperators(); } else if ( vm.isString() ) { comboValues = ColumnFilter.ComparisonType.getStringOperators(); } else { comboValues = new String[ 1 ]; comboValues[ 0 ] = ""; } } else { // if we've not got a connection, or there is no user-specified // columns saved in the meta class, then just get all the // operators comboValues = ColumnFilter.ComparisonType.getAllOperators(); } return comboValues; } } ); colinf2[ 4 ].setComboValuesSelectionListener( new ComboValuesSelectionListener() { public String[] getComboValues( TableItem tableItem, int rowNr, int colNr ) { String[] comboValues = new String[] {}; // try to fill in the type String alias = tableItem.getText( 1 ); if ( !Const.isEmpty( alias ) ) { setFilterTableTypeColumn( tableItem ); } int type = ValueMeta.getType( tableItem.getText( 2 ) ); switch ( type ) { case ValueMetaInterface.TYPE_DATE: comboValues = Const.getDateFormats(); break; case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: case ValueMetaInterface.TYPE_NUMBER: comboValues = Const.getNumberFormats(); break; default: break; // if there is not type information available (no connection and no // user-specified // columns in the meta class) then the user will just have to type // in their own // formatting string (if necessary) } return comboValues; } } ); m_filtersView = new TableView( transMeta, wFilterComp, SWT.FULL_SELECTION | SWT.MULTI, colinf2, 1, lsMod, props ); fd = new FormData(); fd.top = new FormAttachment( m_matchAllBut, margin * 2 ); fd.bottom = new FormAttachment( 100, -margin * 2 ); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); m_filtersView.setLayoutData( fd ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, 0 ); wFilterComp.setLayoutData( fd ); wFilterComp.layout(); m_wFilterTab.setControl( wFilterComp ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_stepnameText, margin ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, -50 ); m_wTabFolder.setLayoutData( fd ); // Buttons inherited from BaseStepDialog wOK = new Button( shell, SWT.PUSH ); wOK.setText( Messages.getString( "System.Button.OK" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( Messages.getString( "System.Button.Cancel" ) ); setButtonPositions( new Button[] { wOK, wCancel }, margin, m_wTabFolder ); // Add listeners lsCancel = new Listener() { public void handleEvent( Event e ) { cancel(); } }; lsOK = new Listener() { public void handleEvent( Event e ) { ok(); } }; wCancel.addListener( SWT.Selection, lsCancel ); wOK.addListener( SWT.Selection, lsOK ); lsDef = new SelectionAdapter() { @Override public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; m_stepnameText.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { @Override public void shellClosed( ShellEvent e ) { cancel(); } } ); m_wTabFolder.setSelection( 0 ); setSize(); getData(); ServiceStatus serviceStatus = m_currentMeta.getServiceStatus(); if ( !serviceStatus.isOk() ) { new ErrorDialog( shell, Messages.getString( "Dialog.Error" ), Messages.getString( "HBaseInput.Error.ServiceStatus" ), serviceStatus.getException() ); } shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } protected HBaseValueMetaInterface setFilterTableTypeColumn( TableItem tableItem ) { // try to fill in the type String alias = tableItem.getText( 1 ).trim(); if ( !Const.isEmpty( alias ) ) { // try using the mapping information first since it is complete if ( transMeta.environmentSubstitute( alias ).equals( m_keyName ) ) { tableItem.setText( 2, m_keyType.toString() ); HBaseValueMetaInterface vm = m_mappedColumns.get( transMeta.environmentSubstitute( alias ) ); if ( vm != null ) { vm.setType( HBaseInput.getKettleTypeByKeyType( m_keyType ) ); String type = ValueMetaBase.getTypeDesc( vm.getType() ); tableItem.setText( 2, type ); return vm; } } else if ( m_mappedColumns != null ) { HBaseValueMetaInterface vm = m_mappedColumns.get( transMeta.environmentSubstitute( alias ) ); if ( vm != null ) { String type = ValueMetaBase.getTypeDesc( vm.getType() ); if ( vm.getType() == ValueMetaInterface.TYPE_INTEGER ) { if ( vm.getIsLongOrDouble() ) { type = "Long"; } else { type = "Integer"; } } if ( vm.getType() == ValueMetaInterface.TYPE_NUMBER ) { if ( vm.getIsLongOrDouble() ) { type = "Double"; } else { type = "Float"; } } tableItem.setText( 2, type ); return vm; } } else if ( m_currentMeta.getOutputFields() != null && m_currentMeta.getOutputFields().size() > 0 ) { // use the user-selected fields information for ( HBaseValueMetaInterface vm : m_currentMeta.getOutputFields() ) { String aliasF = vm.getAlias(); if ( alias.equals( aliasF ) ) { String type = ValueMetaBase.getTypeDesc( vm.getType() ); tableItem.setText( 2, type ); return vm; } } } } return null; } protected void updateMetaConnectionDetails( HBaseInputMeta meta ) { NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); if ( nc != null ) { meta.setNamedCluster( nc ); } meta.setCoreConfigURL( m_coreConfigText.getText() ); meta.setDefaulConfigURL( m_defaultConfigText.getText() ); meta.setSourceTableName( m_mappedTableNamesCombo.getText() ); meta.setSourceMappingName( m_mappingNamesCombo.getText() ); } protected void ok() { if ( Const.isEmpty( m_stepnameText.getText() ) ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( Messages.getString( "System.StepJobEntryNameMissing.Title" ) ); mb.setMessage( Messages.getString( "System.JobEntryNameMissing.Msg" ) ); mb.open(); return; } NamedCluster selectedNamedCluster = namedClusterWidget.getSelectedNamedCluster(); if ( selectedNamedCluster == null ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( Messages.getString( "Dialog.Error" ) ); mb.setMessage( Messages.getString( "HBaseInputDialog.NamedClusterNotSelected.Msg" ) ); mb.open(); return; } else { if ( StringUtils.isEmpty( selectedNamedCluster.getZooKeeperHost() ) && !selectedNamedCluster.isUseGateway() ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( Messages.getString( "Dialog.Error" ) ); mb.setMessage( Messages.getString( "HBaseInputDialog.NamedClusterMissingValues.Msg" ) ); mb.open(); return; } } HBaseService hBaseService = null; try { hBaseService = getHBaseService(); } catch ( ClusterInitializationException e ) { throw new RuntimeException( e ); } HBaseValueMetaInterfaceFactory hBaseValueMetaInterfaceFactory = hBaseService.getHBaseValueMetaInterfaceFactory(); stepname = m_stepnameText.getText(); updateMetaConnectionDetails( m_currentMeta ); m_currentMeta.setKeyStartValue( m_keyStartText.getText() ); m_currentMeta.setKeyStopValue( m_keyStopText.getText() ); m_currentMeta.setScannerCacheSize( m_scanCacheText.getText() ); m_currentMeta.setMatchAnyFilter( m_matchAnyBut.getSelection() ); int numNonEmpty = m_fieldsView.nrNonEmpty(); if ( numNonEmpty > 0 ) { ByteConversionUtil byteConversionUtil = hBaseService.getByteConversionUtil(); List outputFields = new ArrayList<>(); for ( int i = 0; i < numNonEmpty; i++ ) { TableItem item = m_fieldsView.getNonEmpty( i ); String alias = item.getText( 1 ).trim(); String isKey = item.getText( 2 ).trim(); String family = item.getText( 3 ).trim(); String column = item.getText( 4 ).trim(); String type = item.getText( 5 ).trim(); String format = item.getText( 6 ).trim(); HBaseValueMetaInterface vm = hBaseValueMetaInterfaceFactory .createHBaseValueMetaInterface( family, column, alias, ValueMeta.getType( type ), -1, -1 ); vm.setTableName( m_mappedTableNamesCombo.getText() ); vm.setMappingName( m_mappingNamesCombo.getText() ); vm.setKey( isKey.equalsIgnoreCase( "Y" ) ); String indexItems = m_indexedLookup.get( alias ); if ( indexItems != null ) { Object[] values = byteConversionUtil.stringIndexListToObjects( indexItems ); vm.setIndex( values ); vm.setStorageType( ValueMetaInterface.STORAGE_TYPE_INDEXED ); } vm.setConversionMask( format ); outputFields.add( vm ); } m_currentMeta.setOutputFields( outputFields ); } else { m_currentMeta.setOutputFields( null ); // output everything } numNonEmpty = m_filtersView.nrNonEmpty(); if ( numNonEmpty > 0 ) { ColumnFilterFactory columnFilterFactory = hBaseService.getColumnFilterFactory(); List filters = new ArrayList<>(); for ( int i = 0; i < m_filtersView.nrNonEmpty(); i++ ) { TableItem item = m_filtersView.getNonEmpty( i ); String alias = item.getText( 1 ).trim(); String type = item.getText( 2 ).trim(); String operator = item.getText( 3 ).trim(); String comparison = item.getText( 4 ).trim(); String signed = item.getText( 6 ).trim(); String format = item.getText( 5 ).trim(); ColumnFilter f = columnFilterFactory.createFilter( alias ); f.setFieldType( type ); f.setComparisonOperator( ColumnFilter.ComparisonType.stringToOpp( operator ) ); f.setConstant( comparison ); f.setSignedComparison( signed.equalsIgnoreCase( "Y" ) ); f.setFormat( format ); filters.add( f ); } m_currentMeta.setColumnFilters( filters ); } else { m_currentMeta.setColumnFilters( null ); } if ( m_storeMappingInStepMetaData.getSelection() ) { if ( Const.isEmpty( m_mappingNamesCombo.getText() ) ) { List problems = new ArrayList(); Mapping toSet = m_mappingEditor.getMapping( false, problems, false ); if ( problems.size() > 0 ) { StringBuffer p = new StringBuffer(); for ( String s : problems ) { p.append( s ).append( "\n" ); } MessageDialog md = new MessageDialog( shell, BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInputDialog.Error.IssuesWithMapping.Title" ), null, BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInputDialog.Error.IssuesWithMapping" ) + ":\n\n" + p.toString(), MessageDialog.WARNING, new String[] { BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInputDialog.Error.IssuesWithMapping.ButtonOK" ), BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInputDialog.Error.IssuesWithMapping.ButtonCancel" ) }, 0 ); MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() ); int idx = md.open() & 0xFF; if ( idx == 1 || idx == 255 /* 255 = escape pressed */ ) { return; // Cancel } } m_currentMeta.setMapping( toSet ); } else { HBaseConnection connection = null; try { connection = getHBaseConnection(); MappingAdmin admin = new MappingAdmin( connection ); Mapping current = admin.getMapping( transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() ), transMeta .environmentSubstitute( m_mappingNamesCombo.getText() ) ); m_currentMeta.setMapping( current ); m_currentMeta.setSourceMappingName( "" ); } catch ( Exception e ) { logError( Messages.getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ) + " \"" + transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() + "," + transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) + "\"" ), e ); new ErrorDialog( shell, Messages.getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ), Messages .getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ) + " \"" + transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() + "," + transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) + "\"" ), e ); } finally { if ( connection != null ) { try { connection.close(); } catch ( Exception e ) { String msg = Messages.getString( "HBaseInputDialog.ErrorMessage.FailedClosingHBaseConnection" ); logError( msg, e ); new ErrorDialog( shell, msg, msg, e ); } } } } } else { // we're going to use a mapping stored in HBase - null out any stored // mapping m_currentMeta.setMapping( null ); } if ( !m_originalMeta.equals( m_currentMeta ) ) { m_currentMeta.setChanged(); changed = m_currentMeta.hasChanged(); } dispose(); } protected void cancel() { stepname = null; m_currentMeta.setChanged( changed ); dispose(); } private void getData() { namedClusterWidget.setSelectedNamedCluster( m_currentMeta.getNamedCluster().getName() ); if ( !Const.isEmpty( m_currentMeta.getCoreConfigURL() ) ) { m_coreConfigText.setText( m_currentMeta.getCoreConfigURL() ); } if ( !Const.isEmpty( m_currentMeta.getDefaultConfigURL() ) ) { m_defaultConfigText.setText( m_currentMeta.getDefaultConfigURL() ); } if ( !Const.isEmpty( m_currentMeta.getSourceTableName() ) ) { m_mappedTableNamesCombo.setText( m_currentMeta.getSourceTableName() ); } if ( !Const.isEmpty( m_currentMeta.getSourceMappingName() ) ) { m_mappingNamesCombo.setText( m_currentMeta.getSourceMappingName() ); } if ( !Const.isEmpty( m_currentMeta.getKeyStartValue() ) ) { m_keyStartText.setText( m_currentMeta.getKeyStartValue() ); } if ( !Const.isEmpty( m_currentMeta.getKeyStopValue() ) ) { m_keyStopText.setText( m_currentMeta.getKeyStopValue() ); } if ( !Const.isEmpty( m_currentMeta.getScannerCacheSize() ) ) { m_scanCacheText.setText( m_currentMeta.getScannerCacheSize() ); } m_matchAnyBut.setSelection( m_currentMeta.getMatchAnyFilter() ); m_matchAllBut.setSelection( !m_currentMeta.getMatchAnyFilter() ); // filters if ( m_currentMeta.getColumnFilters() != null && m_currentMeta.getColumnFilters().size() > 0 ) { for ( ColumnFilter f : m_currentMeta.getColumnFilters() ) { TableItem item = new TableItem( m_filtersView.table, SWT.NONE ); if ( !Const.isEmpty( f.getFieldAlias() ) ) { item.setText( 1, f.getFieldAlias() ); } if ( !Const.isEmpty( f.getFieldType() ) ) { item.setText( 2, f.getFieldType() ); } if ( f.getComparisonOperator() != null ) { item.setText( 3, f.getComparisonOperator().toString() ); } if ( !Const.isEmpty( f.getConstant() ) ) { item.setText( 4, f.getConstant() ); } item.setText( 6, ( f.getSignedComparison() ) ? "Y" : "N" ); if ( !Const.isEmpty( f.getFormat() ) ) { item.setText( 5, f.getFormat() ); } } m_filtersView.removeEmptyRows(); m_filtersView.setRowNums(); m_filtersView.optWidth( true ); } if ( Const.isEmpty( m_currentMeta.getSourceMappingName() ) && m_currentMeta.getMapping() != null ) { m_mappingEditor.setMapping( m_currentMeta.getMapping() ); m_storeMappingInStepMetaData.setSelection( true ); } // do the key and columns checkKeyInformation( true, false ); } @Override public HBaseService getHBaseService() throws ClusterInitializationException { NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); return namedClusterServiceLocator.getService( nc, HBaseService.class ); } public HBaseConnection getHBaseConnection() throws IOException, ClusterInitializationException { HBaseConnection conf = null; String coreConf = ""; String defaultConf = ""; String zookeeperHosts = ""; NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); HBaseService hBaseService = getHBaseService(); if ( nc != null && !nc.isUseGateway()) { zookeeperHosts = transMeta.environmentSubstitute( nc.getZooKeeperHost() ); } if ( !Const.isEmpty( m_coreConfigText.getText() ) ) { coreConf = transMeta.environmentSubstitute( m_coreConfigText.getText() ); } if ( !Const.isEmpty( m_defaultConfigText.getText() ) ) { defaultConf = transMeta.environmentSubstitute( m_defaultConfigText.getText() ); } if ( Const.isEmpty( zookeeperHosts ) && Const.isEmpty( coreConf ) && Const.isEmpty( defaultConf ) && ( nc == null || !nc.isUseGateway() ) ) { throw new IOException( BaseMessages.getString( HBaseInputMeta.PKG, "MappingDialog.Error.Message.CantConnectNoConnectionDetailsProvided" ) ); } return hBaseService.getHBaseConnection( transMeta, coreConf, defaultConf, null ); } private void checkKeyInformation( boolean quiet, boolean readFieldsFromMapping ) { String zookeeperQuorumText = null; NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); if ( nc != null ) { zookeeperQuorumText = nc.getZooKeeperHost(); } boolean displayFieldsEmbeddedMapping = ( ( m_mappingEditor.getMapping( false, null, false ) != null && Const.isEmpty( m_mappingNamesCombo.getText() ) ) ); boolean displayFieldsMappingFromHBase = ( !Const.isEmpty( m_coreConfigText.getText() ) || !Const.isEmpty( zookeeperQuorumText ) || ( nc != null && nc.isUseGateway() ) ) && !Const.isEmpty( m_mappedTableNamesCombo.getText() ) && !Const.isEmpty( m_mappingNamesCombo.getText() ); if ( displayFieldsEmbeddedMapping || displayFieldsMappingFromHBase ) { try { m_indexedLookup = new HashMap(); MappingAdmin admin = null; Mapping current = null; Map mappedColumns = null; boolean filterAliasesDone = false; HBaseConnection connection = null; try { if ( displayFieldsMappingFromHBase && readFieldsFromMapping ) { connection = getHBaseConnection(); if ( displayFieldsMappingFromHBase ) { admin = new MappingAdmin( connection ); } current = admin.getMapping( transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() ), transMeta .environmentSubstitute( m_mappingNamesCombo.getText() ) ); } else { current = m_mappingEditor.getMapping( false, null, true ); } if ( current != null ) { // Key information m_keyName = current.getKeyName(); m_keyType = current.getKeyType(); m_keyInfo.setText( "HBase Key: " + m_keyName + " (" + m_keyType.toString() + ")" ); mappedColumns = current.getMappedColumns(); m_mappedColumns = mappedColumns; // cached copy // Set up the alias combo box in the filters tab List filterAliasNames = new ArrayList(); filterAliasNames.add( m_keyName ); for ( String alias : mappedColumns.keySet() ) { HBaseValueMetaInterface column = mappedColumns.get( alias ); String aliasS = column.getAlias(); if ( column.isNumeric() || column.isDate() || column.isString() || column.isBoolean() ) { filterAliasNames.add( aliasS ); } } String[] filterAliasNamesA = filterAliasNames.toArray( new String[ 1 ] ); m_filterAliasCI.setComboValues( filterAliasNamesA ); filterAliasesDone = true; } else { m_keyInfo.setText( "" ); } } catch ( Exception ex ) { if ( !quiet ) { logError( Messages.getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ) + " \"" + transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() + "," + transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) + "\"" ), ex ); new ErrorDialog( shell, Messages.getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ), Messages .getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ) + " \"" + transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() + "," + transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) + "\"" ), ex ); } m_keyInfo.setText( Messages.getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ) ); } finally { if ( connection != null ) { connection.close(); } } // Fields information m_fieldsView.clearAll( false ); ByteConversionUtil byteConversionUtil = getHBaseService().getByteConversionUtil(); if ( current != null && readFieldsFromMapping ) { TableItem item = new TableItem( m_fieldsView.table, SWT.NONE ); item.setText( 1, m_keyName ); item.setText( 2, "Y" ); item.setText( 7, "N" ); if ( current.getKeyType() == Mapping.KeyType.DATE || current.getKeyType() == Mapping.KeyType.UNSIGNED_DATE ) { item.setText( 5, ValueMeta.getTypeDesc( ValueMetaInterface.TYPE_DATE ) ); } else if ( current.getKeyType() == Mapping.KeyType.STRING ) { item.setText( 5, ValueMeta.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); } else if ( current.getKeyType() == Mapping.KeyType.INTEGER || current.getKeyType() == Mapping.KeyType.UNSIGNED_INTEGER || current.getKeyType() == Mapping.KeyType.UNSIGNED_LONG || current.getKeyType() == Mapping.KeyType.LONG ) { item.setText( 5, ValueMeta.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); } else { item.setText( 5, ValueMeta.getTypeDesc( ValueMetaInterface.TYPE_BINARY ) ); } // get all the fields from the mapping for ( String alias : mappedColumns.keySet() ) { if ( alias.equalsIgnoreCase( m_keyName ) ) { continue; } HBaseValueMetaInterface column = mappedColumns.get( alias ); String aliasS = column.getAlias(); String family = column.getColumnFamily(); String name = column.getColumnName(); String type = column.getTypeDesc(); String format = column.getConversionMask(); item = new TableItem( m_fieldsView.table, SWT.NONE ); if ( column.getStorageType() == ValueMetaInterface.STORAGE_TYPE_INDEXED ) { String valuesString = byteConversionUtil.objectIndexValuesToString( column.getIndex() ); m_indexedLookup.put( aliasS, valuesString ); item.setText( 7, "Y" ); } else { item.setText( 7, "N" ); } item.setText( 1, aliasS ); item.setText( 2, "N" ); item.setText( 3, family ); item.setText( 4, name ); item.setText( 5, type ); if ( !Const.isEmpty( format ) ) { item.setText( 6, format ); } } } if ( !readFieldsFromMapping && m_currentMeta.getOutputFields() != null && m_currentMeta.getOutputFields().size() > 0 ) { // user has selected some fields from the mapping to output List filterAliasNames = new ArrayList(); for ( HBaseValueMetaInterface column : m_currentMeta.getOutputFields() ) { TableItem item = new TableItem( m_fieldsView.table, SWT.NONE ); String aliasS = column.getAlias(); String type = column.getTypeDesc(); item.setText( 1, aliasS ); item.setText( 5, type ); if ( column.isKey() ) { item.setText( 2, "Y" ); item.setText( 7, "N" ); if ( !Const.isEmpty( column.getConversionMask() ) ) { item.setText( 6, column.getConversionMask() ); } if ( !filterAliasesDone ) { //todo check for key type may be do not work in some cases filterAliasNames.add( aliasS ); } continue; // skip the rest } item.setText( 2, "N" ); if ( column.isNumeric() || column.isDate() || column.isString() ) { if ( !filterAliasesDone ) { filterAliasNames.add( aliasS ); } } String family = column.getColumnFamily(); String name = column.getColumnName(); String format = column.getConversionMask(); if ( column.getStorageType() == ValueMetaInterface.STORAGE_TYPE_INDEXED ) { String valuesString = byteConversionUtil.objectIndexValuesToString( column.getIndex() ); m_indexedLookup.put( aliasS, valuesString ); item.setText( 7, "Y" ); } else { item.setText( 7, "N" ); } item.setText( 3, family ); item.setText( 4, name ); if ( !Const.isEmpty( format ) ) { item.setText( 6, format ); } } // set the allowable combo values for the selectable columns in the // filter tab if ( !filterAliasesDone ) { String[] filterAliasNamesA = filterAliasNames.toArray( new String[ 1 ] ); m_filterAliasCI.setComboValues( filterAliasNamesA ); filterAliasesDone = true; } } m_fieldsView.removeEmptyRows(); m_fieldsView.setRowNums(); m_fieldsView.optWidth( true ); } catch ( Exception ex ) { if ( !quiet ) { logError( Messages.getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ) + " \"" + transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() + "," + transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) + "\"" ), ex ); new ErrorDialog( shell, Messages.getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ), Messages .getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ) + " \"" + transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() + "," + transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) + "\"" ), ex ); } m_keyInfo.setText( Messages.getString( "HBaseInputDialog.ErrorMessage.UnableToGetMapping" ) ); } } else { m_keyInfo.setText( "" ); } } private void setupMappedTableNames() { HBaseConnection connection = null; Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT ); try { shell.setCursor( busy ); connection = getHBaseConnection(); MappingAdmin admin = new MappingAdmin( connection ); Set tableNames = admin.getMappedTables( parseNamespaceFromTableName( null ) ); m_mappedTableNamesCombo.removeAll(); for ( String s : tableNames ) { m_mappedTableNamesCombo.add( s ); } } catch ( Exception e ) { logError( Messages.getString( "HBaseInputDialog.ErrorMessage.UnableToConnect" ), e ); new ErrorDialog( shell, Messages.getString( "HBaseInputDialog.ErrorMessage." + "UnableToConnect" ), Messages .getString( "HBaseInputDialog.ErrorMessage.UnableToConnect" ), e ); } finally { shell.setCursor( null ); busy.dispose(); if ( connection != null ) { try { connection.close(); } catch ( Exception e ) { String msg = Messages.getString( "HBaseInputDialog.ErrorMessage.FailedClosingHBaseConnection" ); logError( msg, e ); new ErrorDialog( shell, msg, msg, e ); } } } } private void setupMappingNamesForTable( boolean quiet ) { m_mappingNamesCombo.removeAll(); if ( !Const.isEmpty( m_mappedTableNamesCombo.getText() ) ) { HBaseConnection connection = null; try { connection = getHBaseConnection(); MappingAdmin admin = new MappingAdmin( connection ); List mappingNames = admin.getMappingNames( transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText().trim() ) ); for ( String n : mappingNames ) { m_mappingNamesCombo.add( n ); } } catch ( Exception ex ) { if ( !quiet ) { logError( Messages.getString( "HBaseInputDialog.ErrorMessage.UnableToConnect" ), ex ); new ErrorDialog( shell, Messages.getString( "HBaseInputDialog.ErrorMessage." + "UnableToConnect" ), Messages .getString( "HBaseInputDialog.ErrorMessage.UnableToConnect" ), ex ); } } finally { if ( connection != null ) { try { connection.close(); } catch ( Exception e ) { if ( !quiet ) { String msg = Messages.getString( "HBaseInputDialog.ErrorMessage.FailedClosingHBaseConnection" ); logError( msg, e ); new ErrorDialog( shell, msg, msg, e ); } } } } } } private String parseNamespaceFromTableName( String defaultNamespaceIfNoneSpecified ) { return HbaseUtil.parseNamespaceFromTableName( transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() ), defaultNamespaceIfNoneSpecified ); } public String getCurrentConfiguration() { updateMetaConnectionDetails( m_configurationMeta ); return m_configurationMeta.getXML(); } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/input/HBaseInputMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.input; import com.google.common.annotations.VisibleForTesting; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.big.data.kettle.plugins.hbase.FilterDefinition; import org.pentaho.big.data.kettle.plugins.hbase.HbaseUtil; import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition; import org.pentaho.big.data.kettle.plugins.hbase.NamedClusterLoadSaveUtil; import org.pentaho.big.data.kettle.plugins.hbase.ServiceStatus; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingAdmin; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingUtils; import org.pentaho.big.data.kettle.plugins.hbase.meta.AELHBaseMappingImpl; import org.pentaho.big.data.kettle.plugins.hbase.meta.AELHBaseValueMetaImpl; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionDeep; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.metastore.MetaStoreConst; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.ColumnFilter; import org.pentaho.hadoop.shim.api.hbase.mapping.ColumnFilterFactory; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.mapping.MappingFactory; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterfaceFactory; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.runtime.test.action.impl.RuntimeTestActionServiceImpl; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import org.w3c.dom.Node; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; /** * Class providing an input step for reading data from an HBase table according to meta data mapping info stored in a * separate HBase table called "pentaho_mappings". See org.pentaho.hbase.mapping.Mapping for details on the meta data * format. */ @Step( id = "HBaseInput", image = "HB.svg", name = "HBaseInput.Name", description = "HBaseInput.Description", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", documentationUrl = "pdi-transformation-steps-reference-overview/hbase-input-cp-main-page", i18nPackageName = "org.pentaho.di.trans.steps.hbaseinput" ) @InjectionSupported( localizationPrefix = "HBaseInput.Injection.", groups = {"OUTPUT_FIELDS", "MAPPING", "FILTER"} ) public class HBaseInputMeta extends BaseStepMeta implements StepMetaInterface { protected static Class PKG = HBaseInputMeta.class; private final NamedClusterLoadSaveUtil namedClusterLoadSaveUtil; private final NamedClusterService namedClusterService; private final NamedClusterServiceLocator namedClusterServiceLocator; private final RuntimeTestActionService runtimeTestActionService; private MetastoreLocator metaStoreService; private final RuntimeTester runtimeTester; protected NamedCluster namedCluster; /** * path/url to hbase-site.xml */ @Injection( name = "HBASE_SITE_XML_URL" ) protected String m_coreConfigURL; /** * path/url to hbase-default.xml */ @Injection( name = "HBASE_DEFAULT_XML_URL" ) protected String m_defaultConfigURL; /** * the name of the HBase table to read from */ @Injection( name = "SOURCE_TABLE_NAME" ) protected String m_sourceTableName; /** * the name of the mapping for columns/types for the source table */ @Injection( name = "SOURCE_MAPPING_NAME" ) protected String m_sourceMappingName; /** * Start key value for range scans */ @Injection( name = "START_KEY_VALUE" ) protected String m_keyStart; /** * Stop key value for range scans */ @Injection( name = "STOP_KEY_VALUE" ) protected String m_keyStop; /** * Scanner caching */ @Injection( name = "SCANNER_ROW_CACHE_SIZE" ) protected String m_scannerCacheSize; protected transient Mapping m_cachedMapping; /** * The selected fields to output. If null, then all fields from the mapping are output */ protected List m_outputFields; @InjectionDeep protected List outputFieldsDefinition; /** * The configured column filters. If null, then no filters are applied to the result set */ protected List m_filters; @InjectionDeep protected List filtersDefinition; /** * If true, then any matching filter will cause the row to be output, otherwise all filters have to return true before * the row is output */ @Injection( name = "MATCH_ANY_FILTER" ) protected boolean m_matchAnyFilter; /** * The mapping to use if we are not loading one dynamically at runtime from HBase itself */ protected Mapping m_mapping; @InjectionDeep protected MappingDefinition mappingDefinition; private ServiceStatus serviceStatus = ServiceStatus.OK; public HBaseInputMeta() { this( NamedClusterManager.getInstance(), BigDataServicesHelper.getNamedClusterServiceLocator(), RuntimeTestActionServiceImpl.getInstance(), RuntimeTesterImpl.getInstance() ); } public HBaseInputMeta( NamedClusterService namedClusterService, NamedClusterServiceLocator namedClusterServiceLocator, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) { this.namedClusterService = namedClusterService; this.namedClusterServiceLocator = namedClusterServiceLocator; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; namedClusterLoadSaveUtil = new NamedClusterLoadSaveUtil(); } public synchronized MetastoreLocator getMetastoreLocator() { if ( this.metaStoreService == null ) { try { Collection metastoreLocators = PluginServiceLoader.loadServices( MetastoreLocator.class ); this.metaStoreService = metastoreLocators.stream().findFirst().get(); } catch ( Exception e ) { getLog().logError( "Error getting MetastoreLocator", e ); } } return this.metaStoreService; } public HBaseInputMeta(NamedClusterService namedClusterService, NamedClusterServiceLocator namedClusterServiceLocator, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester, MetastoreLocator metastoreLocator) { this.namedClusterService = namedClusterService; this.namedClusterServiceLocator = namedClusterServiceLocator; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; namedClusterLoadSaveUtil = new NamedClusterLoadSaveUtil(); this.metaStoreService = metastoreLocator; } /** * Set the mapping to use for decoding the row * * @param m the mapping to use */ public void setMapping( Mapping m ) { m_mapping = m; } /** * Get the mapping to use for decoding the row * * @return the mapping to use */ public Mapping getMapping() { return m_mapping; } /** * Set the URL to the hbase-site.xml. Either this OR the zookeeper host list can be used to establish a connection. * * @param coreConfig */ public void setCoreConfigURL( String coreConfig ) { m_coreConfigURL = coreConfig; m_cachedMapping = null; } /** * Get the URL to the hbase-site.xml file. * * @return the URL to the hbase-site.xml file or null if not set. */ public String getCoreConfigURL() { return m_coreConfigURL; } /** * Set the URL to the hbase-default.xml file. This can be optionally supplied in conjuction with hbase-site.xml. If * not supplied, then the default hbase-default.xml included in the main hbase jar file is used. * * @param defaultConfig URL to the hbase-default.xml file. */ public void setDefaulConfigURL( String defaultConfig ) { m_defaultConfigURL = defaultConfig; m_cachedMapping = null; } /** * Get the URL to hbase-default.xml * * @return the URL to hbase-default.xml or null if not set. */ public String getDefaultConfigURL() { return m_defaultConfigURL; } public void setSourceTableName( String sourceTable ) { m_sourceTableName = sourceTable; m_cachedMapping = null; } /** * Get the name of the HBase table to read from. * * @return the name of the source HBase table. */ public String getSourceTableName() { return m_sourceTableName; } /** * Set the name of the mapping to use that defines column names and types for the source table. * * @param sourceMapping the name of the mapping to use. */ public void setSourceMappingName( String sourceMapping ) { m_sourceMappingName = sourceMapping; m_cachedMapping = null; } /** * Get the name of the mapping to use for reading and decoding column values for the source table. * * @return the name of the mapping to use. */ public String getSourceMappingName() { return m_sourceMappingName; } /** * Set whether a given row needs to match at least one of the user specified column filters. * * @param a true if at least one filter needs to match before a given row is returned. If false then *all* filters * must match. */ public void setMatchAnyFilter( boolean a ) { m_matchAnyFilter = a; } /** * Get whether a given row needs to match at least one of the user-specified column filters. * * @return true if a given row needs to match at least one of the user specified column filters. Returns false if * *all* column filters need to match */ public boolean getMatchAnyFilter() { return m_matchAnyFilter; } /** * Set the starting value (inclusive) of the key for range scans * * @param start the starting value of the key to use in range scans. */ public void setKeyStartValue( String start ) { m_keyStart = start; } /** * Get the starting value of the key to use in range scans * * @return the starting value of the key */ public String getKeyStartValue() { return m_keyStart; } /** * Set the stop value (exclusive) of the key to use in range scans. May be null to indicate scan to the end of the * table * * @param stop the stop value of the key to use in range scans */ public void setKeyStopValue( String stop ) { m_keyStop = stop; } /** * Get the stop value of the key to use in range scans * * @return the stop value of the key */ public String getKeyStopValue() { return m_keyStop; } /** * Set the number of rows to cache for scans. Higher values result in improved performance since there will be fewer * requests to HBase but at the expense of increased memory consumption. * * @param s the number of rows to cache for scans. */ public void setScannerCacheSize( String s ) { m_scannerCacheSize = s; } /** * The number of rows to cache for scans. * * @return the number of rows to cache for scans. */ public String getScannerCacheSize() { return m_scannerCacheSize; } /** * Set a list of fields to emit from this steo. If not specified, then all fields defined in the mapping for the * source table will be emitted. * * @param fields a list of fields to emit from this step. */ public void setOutputFields( List fields ) { m_outputFields = fields; } /** * Get the list of fields to emit from this step. May return null, which indicates that *all* fields defined in the * mapping for the source table will be emitted. * * @return the fields that will be output or null (indicating all fields defined in the mapping will be output). */ public List getOutputFields() { return m_outputFields; } /** * Set a list of column filters to use to refine the query * * @param list a list of column filters to refine the query */ public void setColumnFilters( List list ) { m_filters = list; } /** * Get the list of column filters to use for refining the results of a scan. May return null if no filters are in use. * * @return a list of columm filters by which to refine the results of a query scan. */ public List getColumnFilters() { return m_filters; } public void setDefault() { m_coreConfigURL = null; m_defaultConfigURL = null; m_cachedMapping = null; m_sourceTableName = null; m_sourceMappingName = null; m_keyStart = null; m_keyStop = null; namedCluster = namedClusterService.getClusterTemplate(); } private String getIndexValues( HBaseValueMetaInterface vm ) { Object[] labels = vm.getIndex(); StringBuffer vals = new StringBuffer(); vals.append( "{" ); for ( int i = 0; i < labels.length; i++ ) { if ( i != labels.length - 1 ) { vals.append( labels[i].toString().trim() ).append( "," ); } else { vals.append( labels[i].toString().trim() ).append( "}" ); } } return vals.toString(); } void applyInjection( VariableSpace space ) throws KettleException { if ( namedCluster == null ) { throw new KettleException( "Named cluster was not initialized!" ); } if ( namedCluster.getShimIdentifier() == null && getParentStepMeta() != null && getParentStepMeta().getParentTransMeta() != null ) { // If here we have a template for the named cluster, not the real thing. This is likely due to not having // the namedCluster present in the local metastore. Time to load it from the embedded Metastore which is only // present at runtime NamedCluster nc = namedClusterService.getNamedClusterByName( namedCluster.getName(), getMetastoreLocator() .getExplicitMetastore( getParentStepMeta().getParentTransMeta().getEmbeddedMetastoreProviderKey() ) ); if ( nc != null && nc.getShimIdentifier() != null ) { namedCluster = nc; //Overwrite with the real one } } try { HBaseService hBaseService = getService(); Mapping tempMapping = null; if ( mappingDefinition != null ) { tempMapping = getMapping( mappingDefinition, hBaseService ); setMapping( tempMapping ); } if ( outputFieldsDefinition != null && !outputFieldsDefinition.isEmpty() ) { if ( mappingDefinition == null ) { if ( !Const.isEmpty( m_sourceMappingName ) ) { tempMapping = getMappingFromHBase( hBaseService, space, m_sourceTableName, m_sourceMappingName, m_coreConfigURL, m_defaultConfigURL ); } else { tempMapping = m_mapping; } } setOutputFields( createOutputFieldsDefinition( tempMapping, hBaseService ) ); } if ( filtersDefinition != null && !filtersDefinition.isEmpty() ) { ColumnFilterFactory columnFilterFactory = hBaseService.getColumnFilterFactory(); setColumnFilters( createColumnFiltersFromDefinition( columnFilterFactory ) ); } } catch ( Exception e ) { throw new KettleException( e ); } } @VisibleForTesting Mapping getMapping( MappingDefinition mappingDefinition, HBaseService hBaseService ) throws KettleException { return MappingUtils.getMapping( mappingDefinition, hBaseService ); } static Mapping getMappingFromHBase( HBaseService hBaseService, VariableSpace space, String tableName, String mappingName, String coreConfigURL, String defaultConfigURL ) throws KettleException { try { String siteConfig = ""; if ( !Const.isEmpty( coreConfigURL ) ) { siteConfig = space.environmentSubstitute( coreConfigURL ); } String defaultConfig = ""; if ( !Const.isEmpty( ( defaultConfigURL ) ) ) { defaultConfig = space.environmentSubstitute( defaultConfigURL ); } MappingAdmin mappingAdmin = MappingUtils.getMappingAdmin( hBaseService, space, siteConfig, defaultConfig ); return mappingAdmin.getMapping( tableName, mappingName ); } catch ( Exception e ) { throw new KettleException( e ); } } @VisibleForTesting List createOutputFieldsDefinition( Mapping mapping, HBaseService hBaseService ) { return createOutputFieldsDefinition( outputFieldsDefinition, mapping, hBaseService ); } static List createOutputFieldsDefinition( List outputFieldsDefinition, Mapping m_mapping, HBaseService hBaseService ) { HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = hBaseService.getHBaseValueMetaInterfaceFactory(); ByteConversionUtil byteConversionUtil = hBaseService.getByteConversionUtil(); List outputFields = new ArrayList<>(); Map columns = m_mapping.getMappedColumns(); for ( OutputFieldDefinition fieldDefinition : outputFieldsDefinition ) { HBaseValueMetaInterface valueMeta = valueMetaInterfaceFactory.createHBaseValueMetaInterface( fieldDefinition.getFamily(), fieldDefinition .getColumnName(), fieldDefinition.getAlias(), ValueMeta.getType( fieldDefinition.getHbaseType() ), -1, -1 ); valueMeta.setKey( fieldDefinition.isKey() ); valueMeta.setConversionMask( fieldDefinition.getFormat() ); HBaseValueMetaInterface mappedColumn = columns.get( fieldDefinition.getAlias() ); if ( mappedColumn != null && mappedColumn.getIndex() != null ) { Object[] indexVal = mappedColumn.getIndex(); String indexStrign = byteConversionUtil.objectIndexValuesToString( indexVal ); Object[] vals = byteConversionUtil.stringIndexListToObjects( indexStrign ); valueMeta.setIndex( vals ); valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_INDEXED ); } outputFields.add( valueMeta ); } return outputFields; } @VisibleForTesting List createColumnFiltersFromDefinition( ColumnFilterFactory c ) { return createColumnFiltersFromDefinition( filtersDefinition, c ); } static List createColumnFiltersFromDefinition( List filtersDefinition, ColumnFilterFactory columnFilterFactory ) { List filters = new ArrayList<>(); for ( FilterDefinition filterDefinition : filtersDefinition ) { ColumnFilter columnFilter = columnFilterFactory.createFilter( filterDefinition.getAlias() ); columnFilter.setFieldType( filterDefinition.getFieldType() ); columnFilter.setComparisonOperator( filterDefinition.getComparisonType() ); columnFilter.setConstant( filterDefinition.getConstant() ); columnFilter.setSignedComparison( filterDefinition.isSignedComparison() ); columnFilter.setFormat( filterDefinition.getFormat() ); filters.add( columnFilter ); } return filters; } @Override public String getXML() { try { applyInjection( new Variables() ); } catch ( KettleException e ) { logError( "Error occurred while injecting metadata. Transformation meta could be incorrect!", e ); } StringBuilder retval = new StringBuilder(); namedClusterLoadSaveUtil .getXml( retval, namedClusterService, namedCluster, MetaStoreConst.getDefaultMetastore(), getLog() ); if ( parentStepMeta != null && parentStepMeta.getParentTransMeta() != null ) { parentStepMeta.getParentTransMeta().getNamedClusterEmbedManager().addClusterToMeta( namedCluster.getName() ); } if ( !Const.isEmpty( m_coreConfigURL ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "core_config_url", m_coreConfigURL ) ); } if ( !Const.isEmpty( m_defaultConfigURL ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "default_config_url", m_defaultConfigURL ) ); } if ( !Const.isEmpty( m_sourceTableName ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "source_table_name", m_sourceTableName ) ); } if ( !Const.isEmpty( m_sourceMappingName ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "source_mapping_name", m_sourceMappingName ) ); } if ( !Const.isEmpty( m_keyStart ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "key_start", m_keyStart ) ); } if ( !Const.isEmpty( m_keyStop ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "key_stop", m_keyStop ) ); } if ( !Const.isEmpty( m_scannerCacheSize ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "scanner_cache_size", m_scannerCacheSize ) ); } if ( m_outputFields != null && m_outputFields.size() > 0 ) { retval.append( "\n " ).append( XMLHandler.openTag( "output_fields" ) ); for ( HBaseValueMetaInterface vm : m_outputFields ) { vm.getXml( retval ); } retval.append( "\n " ).append( XMLHandler.closeTag( "output_fields" ) ); } if ( m_filters != null && m_filters.size() > 0 ) { retval.append( "\n " ).append( XMLHandler.openTag( "column_filters" ) ); for ( ColumnFilter f : m_filters ) { f.appendXML( retval ); } retval.append( "\n " ).append( XMLHandler.closeTag( "column_filters" ) ); } retval.append( "\n " ).append( XMLHandler.addTagValue( "match_any_filter", m_matchAnyFilter ) ); if ( m_mapping != null ) { retval.append( m_mapping.getXML() ); } return retval.toString(); } @Override public void loadXML( Node stepnode, List databases, IMetaStore metaStore ) throws KettleXMLException { System.out.println( "loading data" ); if ( metaStore == null ) { metaStore = getMetastoreLocator().getMetastore(); } this.namedCluster = namedClusterLoadSaveUtil.loadClusterConfig( namedClusterService, null, repository, metaStore, stepnode, getLog() ); HBaseService hBaseService = null; try { hBaseService = getService(); } catch ( Exception e ) { getLog().logError( e.getMessage() ); } m_coreConfigURL = XMLHandler.getTagValue( stepnode, "core_config_url" ); m_defaultConfigURL = XMLHandler.getTagValue( stepnode, "default_config_url" ); m_sourceTableName = HbaseUtil.expandLegacyTableNameOnLoad( XMLHandler.getTagValue( stepnode, "source_table_name" ) ); m_sourceMappingName = XMLHandler.getTagValue( stepnode, "source_mapping_name" ); m_keyStart = XMLHandler.getTagValue( stepnode, "key_start" ); m_keyStop = XMLHandler.getTagValue( stepnode, "key_stop" ); m_scannerCacheSize = XMLHandler.getTagValue( stepnode, "scanner_cache_size" ); String m = XMLHandler.getTagValue( stepnode, "match_any_filter" ); if ( !Const.isEmpty( m ) ) { m_matchAnyFilter = m.equalsIgnoreCase( "Y" ); } if ( hBaseService != null ) { HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = hBaseService.getHBaseValueMetaInterfaceFactory(); m_outputFields = valueMetaInterfaceFactory.createListFromNode( stepnode ); ColumnFilterFactory columnFilterFactory = hBaseService.getColumnFilterFactory(); MappingFactory mappingFactory = hBaseService.getMappingFactory(); Node filters = XMLHandler.getSubNode( stepnode, "column_filters" ); if ( filters != null && XMLHandler.countNodes( filters, "filter" ) > 0 ) { int nrFilters = XMLHandler.countNodes( filters, "filter" ); m_filters = new ArrayList(); for ( int i = 0; i < nrFilters; i++ ) { Node filterNode = XMLHandler.getSubNodeByNr( filters, "filter", i ); m_filters.add( columnFilterFactory.createFilter( filterNode ) ); } } Mapping tempMapping = mappingFactory.createMapping(); if ( tempMapping.loadXML( stepnode ) ) { m_mapping = tempMapping; } else { m_mapping = null; } } else { Mapping tempMapping = new AELHBaseMappingImpl(); if ( tempMapping.loadXML( stepnode ) ) { m_mapping = tempMapping; } else { getLog().logError( "There is no meta data to inflate meta object" ); } Node fields = XMLHandler.getSubNode( stepnode, "output_fields" ); if ( fields != null ) { int nrfields = XMLHandler.countNodes( fields, "field" ); List m_outputFields = new ArrayList<>( nrfields ); for ( int i = 0; i < nrfields; i++ ) { m_outputFields.add( createFromNode( XMLHandler.getSubNodeByNr( fields, "field", i ) ) ); } } } } private HBaseValueMetaInterface createFromNode( Node fieldNode ) { String isKey = XMLHandler.getTagValue( fieldNode, "key" ).trim(); String alias = XMLHandler.getTagValue( fieldNode, "alias" ).trim(); String columnFamily = ""; String columnName = alias; if ( !isKey.equalsIgnoreCase( "Y" ) ) { if ( XMLHandler.getTagValue( fieldNode, "family" ) != null ) { columnFamily = XMLHandler.getTagValue( fieldNode, "family" ).trim(); } if ( XMLHandler.getTagValue( fieldNode, "column" ) != null ) { columnName = XMLHandler.getTagValue( fieldNode, "column" ).trim(); } } String typeS = XMLHandler.getTagValue( fieldNode, "type" ).trim(); String tableName = XMLHandler.getTagValue( fieldNode, "table_name" ); String mappingName = XMLHandler.getTagValue( fieldNode, "mapping_name" ); AELHBaseValueMetaImpl vm = new AELHBaseValueMetaImpl( isKey.equalsIgnoreCase( "Y" ), alias, columnName, columnFamily, tableName, mappingName ); vm.setHBaseTypeFromString( typeS ); return vm; } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { if ( metaStore == null ) { metaStore = getMetastoreLocator().getMetastore(); } namedClusterLoadSaveUtil.saveRep( rep, metaStore, id_transformation, id_step, namedClusterService, namedCluster, getLog() ); if ( !Const.isEmpty( m_coreConfigURL ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "core_config_url", m_coreConfigURL ); } if ( !Const.isEmpty( m_defaultConfigURL ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "default_config_url", m_defaultConfigURL ); } if ( !Const.isEmpty( m_sourceTableName ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "source_table_name", m_sourceTableName ); } if ( !Const.isEmpty( m_sourceMappingName ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "source_mapping_name", m_sourceMappingName ); } if ( !Const.isEmpty( m_keyStart ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "key_start", m_keyStart ); } if ( !Const.isEmpty( m_keyStop ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "key_stop", m_keyStop ); } if ( !Const.isEmpty( m_scannerCacheSize ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "scanner_cache_size", m_scannerCacheSize ); } if ( m_outputFields != null && m_outputFields.size() > 0 ) { for ( int i = 0; i < m_outputFields.size(); i++ ) { m_outputFields.get( i ).saveRep( rep, id_transformation, id_step, i ); } } if ( m_filters != null && m_filters.size() > 0 ) { for ( int i = 0; i < m_filters.size(); i++ ) { ColumnFilter f = m_filters.get( i ); f.saveRep( rep, id_transformation, id_step, i ); } } rep.saveStepAttribute( id_transformation, id_step, 0, "match_any_filter", m_matchAnyFilter ); if ( m_mapping != null ) { m_mapping.saveRep( rep, id_transformation, id_step ); } } @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List databases ) throws KettleException { if ( metaStore == null ) { metaStore = getMetastoreLocator().getMetastore(); } this.namedCluster = namedClusterLoadSaveUtil.loadClusterConfig( namedClusterService, id_step, rep, metaStore, null, getLog() ); HBaseService hBaseService = null; try { hBaseService = getService(); } catch ( Exception e ) { getLog().logError( e.getMessage() ); } m_coreConfigURL = rep.getStepAttributeString( id_step, 0, "core_config_url" ); m_defaultConfigURL = rep.getStepAttributeString( id_step, 0, "default_config_url" ); m_sourceTableName = HbaseUtil.expandLegacyTableNameOnLoad( rep.getStepAttributeString( id_step, 0, "source_table_name" ) ); m_sourceMappingName = rep.getStepAttributeString( id_step, 0, "source_mapping_name" ); m_keyStart = rep.getStepAttributeString( id_step, 0, "key_start" ); m_keyStop = rep.getStepAttributeString( id_step, 0, "key_stop" ); m_matchAnyFilter = rep.getStepAttributeBoolean( id_step, 0, "match_any_filter" ); m_scannerCacheSize = rep.getStepAttributeString( id_step, 0, "scanner_cache_size" ); if ( hBaseService != null ) { HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = hBaseService.getHBaseValueMetaInterfaceFactory(); ColumnFilterFactory columnFilterFactory = hBaseService.getColumnFilterFactory(); MappingFactory mappingFactory = hBaseService.getMappingFactory(); m_outputFields = valueMetaInterfaceFactory.createListFromRepository( rep, id_step ); int nrFilters = rep.countNrStepAttributes( id_step, "cf_comparison_opp" ); if ( nrFilters > 0 ) { m_filters = new ArrayList<>(); for ( int i = 0; i < nrFilters; i++ ) { m_filters.add( columnFilterFactory.createFilter( rep, i, id_step ) ); } } Mapping tempMapping = mappingFactory.createMapping(); if ( tempMapping.readRep( rep, id_step ) ) { m_mapping = tempMapping; } else { m_mapping = null; } } } @Override public void check( List remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace variableSpace, Repository repository, IMetaStore metaStore ) { if ( metaStore == null ) { metaStore = getMetastoreLocator().getMetastore(); } RowMeta r = new RowMeta(); try { getFields( transMeta.getBowl(), r, "testName", null, null, null, repository, metaStore ); CheckResult cr = new CheckResult( CheckResult.TYPE_RESULT_OK, "Step can connect to HBase. Named mapping exists", stepMeta ); remarks.add( cr ); } catch ( Exception ex ) { CheckResult cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, ex.getMessage(), stepMeta ); remarks.add( cr ); } } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return new HBaseInput( stepMeta, stepDataInterface, copyNr, transMeta, trans, namedClusterServiceLocator ); } public StepDataInterface getStepData() { return new HBaseInputData(); } private void setupCachedMapping( VariableSpace space ) throws KettleStepException { HBaseService hBaseService = null; try { hBaseService = getService(); } catch ( ClusterInitializationException e ) { throw new KettleStepException( e ); } if ( Const.isEmpty( m_coreConfigURL ) && Const.isEmpty( namedCluster.getZooKeeperHost() ) ) { throw new KettleStepException( "No output fields available (missing " + "connection details)!" ); } if ( m_mapping == null && ( Const.isEmpty( m_sourceTableName ) || Const.isEmpty( m_sourceMappingName ) ) ) { throw new KettleStepException( "No output fields available (missing table " + "mapping details)!" ); } if ( m_cachedMapping == null ) { // cache the mapping information if ( m_mapping != null ) { m_cachedMapping = m_mapping; } else { String coreConf = null; String defaultConf = null; try { if ( !Const.isEmpty( m_coreConfigURL ) ) { coreConf = space.environmentSubstitute( m_coreConfigURL ); } if ( !Const.isEmpty( ( m_defaultConfigURL ) ) ) { defaultConf = space.environmentSubstitute( m_defaultConfigURL ); } } catch ( Exception ex ) { throw new KettleStepException( ex.getMessage(), ex ); } List forLogging = new ArrayList(); try ( HBaseConnection conf = hBaseService.getHBaseConnection( space, coreConf, defaultConf, getLog() ) ) { MappingAdmin mappingAdmin = null; for ( String m : forLogging ) { logBasic( m ); } mappingAdmin = new MappingAdmin( conf ); m_cachedMapping = mappingAdmin.getMapping( space.environmentSubstitute( m_sourceTableName ), space.environmentSubstitute( m_sourceMappingName ) ); } catch ( Exception ex ) { throw new KettleStepException( ex.getMessage(), ex ); } } } } @Override public void getFields( Bowl bowl, RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { rowMeta.clear(); // start afresh - eats the input if ( m_outputFields != null && m_outputFields.size() > 0 ) { // we have some stored field information - use this for ( HBaseValueMetaInterface vm : m_outputFields ) { vm.setOrigin( origin ); rowMeta.addValueMeta( vm ); } } else { // want all fields from the mapping - connect and get the details setupCachedMapping( space ); int kettleType; if ( m_cachedMapping.getKeyType() == Mapping.KeyType.DATE || m_cachedMapping.getKeyType() == Mapping.KeyType.UNSIGNED_DATE ) { kettleType = ValueMetaInterface.TYPE_DATE; } else if ( m_cachedMapping.getKeyType() == Mapping.KeyType.STRING ) { kettleType = ValueMetaInterface.TYPE_STRING; } else if ( m_cachedMapping.getKeyType() == Mapping.KeyType.BINARY ) { kettleType = ValueMetaInterface.TYPE_BINARY; } else { kettleType = ValueMetaInterface.TYPE_INTEGER; } ValueMetaInterface keyMeta = new ValueMeta( m_cachedMapping.getKeyName(), kettleType ); keyMeta.setOrigin( origin ); rowMeta.addValueMeta( keyMeta ); // } // Add the rest of the fields in the mapping Map mappedColumnsByAlias = m_cachedMapping.getMappedColumns(); Set aliasSet = mappedColumnsByAlias.keySet(); for ( String alias : aliasSet ) { HBaseValueMetaInterface columnMeta = mappedColumnsByAlias.get( alias ); columnMeta.setOrigin( origin ); rowMeta.addValueMeta( columnMeta ); } } } public NamedCluster getNamedCluster() { return namedCluster; } public void setNamedCluster( NamedCluster namedCluster ) { this.namedCluster = namedCluster; } public NamedClusterService getNamedClusterService() { return namedClusterService; } public NamedClusterServiceLocator getNamedClusterServiceLocator() { return namedClusterServiceLocator; } public RuntimeTestActionService getRuntimeTestActionService() { return runtimeTestActionService; } public RuntimeTester getRuntimeTester() { return runtimeTester; } public List getOutputFieldsDefinition() { return outputFieldsDefinition; } public void setOutputFieldsDefinition( List outputFieldsDefinition ) { this.outputFieldsDefinition = outputFieldsDefinition; } public List getFiltersDefinition() { return filtersDefinition; } public void setFiltersDefinition( List filtersDefinition ) { this.filtersDefinition = filtersDefinition; } public MappingDefinition getMappingDefinition() { return mappingDefinition; } public void setMappingDefinition( MappingDefinition mappingDefinition ) { this.mappingDefinition = mappingDefinition; } protected HBaseService getService() throws ClusterInitializationException { HBaseService service = null; try { String embeddedMetastoreProviderKey = parentStepMeta == null || parentStepMeta.getParentTransMeta() == null ? null : parentStepMeta.getParentTransMeta().getEmbeddedMetastoreProviderKey(); service = namedClusterServiceLocator.getService( this.namedCluster, HBaseService.class, embeddedMetastoreProviderKey ); this.serviceStatus = ServiceStatus.OK; } catch ( Exception e ) { this.serviceStatus = ServiceStatus.notOk( e ); logError( Messages.getString( "HBaseInput.Error.ServiceStatus" ) ); throw e; } return service; } public ServiceStatus getServiceStatus() { if ( this.serviceStatus == null ) { this.serviceStatus = ServiceStatus.OK; } return this.serviceStatus; } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/input/Messages.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.input; import org.pentaho.di.i18n.BaseMessages; public class Messages { public static final Class PKG = Messages.class; public static String getString( String key ) { return BaseMessages.getString( PKG, key ); } public static String getString( String key, String param1 ) { return BaseMessages.getString( PKG, key, param1 ); } public static String getString( String key, String param1, String param2 ) { return BaseMessages.getString( PKG, key, param1, param2 ); } public static String getString( String key, String param1, String param2, String param3 ) { return BaseMessages.getString( PKG, key, param1, param2, param3 ); } public static String getString( String key, String param1, String param2, String param3, String param4 ) { return BaseMessages.getString( PKG, key, param1, param2, param3, param4 ); } public static String getString( String key, String param1, String param2, String param3, String param4, String param5 ) { return BaseMessages.getString( PKG, key, param1, param2, param3, param4, param5 ); } public static String getString( String key, String param1, String param2, String param3, String param4, String param5, String param6 ) { return BaseMessages.getString( PKG, key, param1, param2, param3, param4, param5, param6 ); } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/input/OutputFieldDefinition.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.input; import org.pentaho.di.core.injection.Injection; public class OutputFieldDefinition { @Injection( name = "OUTPUT_FIELD_ALIAS", group = "OUTPUT_FIELDS" ) private String alias; @Injection( name = "OUTPUT_FIELD_KEY", group = "OUTPUT_FIELDS" ) private boolean key; @Injection( name = "OUTPUT_FIELD_COLUMN_NAME", group = "OUTPUT_FIELDS" ) private String columnName; @Injection( name = "OUTPUT_FIELD_FAMILY", group = "OUTPUT_FIELDS" ) private String family; @Injection( name = "OUTPUT_FIELD_TYPE", group = "OUTPUT_FIELDS" ) private String hbaseType; @Injection( name = "OUTPUT_FIELD_FORMAT", group = "OUTPUT_FIELDS" ) private String format; public boolean isKey() { return key; } public void setKey( boolean key ) { this.key = key; } public String getAlias() { return alias; } public void setAlias( String alias ) { this.alias = alias; } public String getColumnName() { return columnName; } public void setColumnName( String columnName ) { this.columnName = columnName; } public String getFamily() { return family; } public void setFamily( String family ) { this.family = family; } public String getHbaseType() { return hbaseType; } public void setHbaseType( String hbaseType ) { this.hbaseType = hbaseType; } public String getFormat() { return format; } public void setFormat( String format ) { this.format = format; } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/mapping/ConfigurationProducer.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.mapping; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import java.io.IOException; /** * Interface to something that can produce a connection to HBase * * @author Mark Hall (mhall{[at]}penthao{[dot]}com) * @version $Revision$ * */ public interface ConfigurationProducer { HBaseService getHBaseService() throws ClusterInitializationException; /** * Get a configuration object encapsulating connection information for HBase * * @return a HBaseConnection object for interacting with the currently configured connection to HBase * @throws Exception * if the connection can't be supplied for some reason */ HBaseConnection getHBaseConnection() throws ClusterInitializationException, IOException; String getCurrentConfiguration(); } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/mapping/FieldProducer.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.mapping; import org.pentaho.di.core.row.RowMetaInterface; /** * Interface to something that can provide meta data on the fields that it is receiving * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) * @version $Revision$ * */ public interface FieldProducer { /** * Get the incoming fields * * @return the incoming fields */ RowMetaInterface getIncomingFields(); } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/mapping/HBaseRowToKettleTuple.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.mapping; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterfaceFactory; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.NavigableMap; /** * Class for decoding HBase rows to a Kettle row format. * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) * @version $Revision$ */ public class HBaseRowToKettleTuple { /** * Holds a set of tuples (Kettle rows) - one for each column from an HBase row */ protected List mDecodedTuples; /** * Index in the Kettle row format of the key column */ protected int mKeyIndex = -1; /** * Index in the Kettle row format of the family column */ protected int mFamilyIndex = -1; /** * Index in the Kettle row format of the column name column */ protected int mColNameIndex = -1; /** * Index in the Kettle row format of the column value column */ protected int mValueIndex = -1; /** * Index in the Kettle row format of the time stamp column */ protected int mTimestampIndex = -1; /** * List of (optional) byte array encoded user-specified column families to extract column values for */ protected List mUserSpecifiedFamilies; /** * List of (optional) human-readable user-specified column families to extract column values for */ protected List mUserSpecifiedFamiliesHumanReadable; protected List mTupleColsFromAliasMap; protected ByteConversionUtil mBytesUtil; public HBaseRowToKettleTuple( ByteConversionUtil bytesUtil ) { if ( bytesUtil == null ) { throw new NullPointerException(); } mBytesUtil = bytesUtil; } public void reset() { mDecodedTuples = null; mKeyIndex = -1; mFamilyIndex = -1; mColNameIndex = -1; mValueIndex = -1; mTimestampIndex = -1; mUserSpecifiedFamilies = null; mUserSpecifiedFamiliesHumanReadable = null; mTupleColsFromAliasMap = null; } /** * Convert an HBase row to (potentially) multiple Kettle rows in tuple format. * * @param mapping the mapping information to use (must be a "tuple" mapping) * @param tupleColsMappedByAlias the meta data for each of the tuple columns the user has opted to have output * @param outputRowMeta the outgoing Kettle row format * @return a list of Kettle rows in tuple format * @throws KettleException if a problem occurs */ public List hbaseRowToKettleTupleMode( HBaseValueMetaInterfaceFactory hBaseValueMetaInterfaceFactory, Object result, Mapping mapping, Map tupleColsMappedByAlias, RowMetaInterface outputRowMeta ) throws KettleException { if ( mDecodedTuples == null ) { mTupleColsFromAliasMap = new ArrayList<>(); // add the key first - type (or name for that matter) // is not important as this is just a dummy placeholder // here so that indexes into m_tupleColsFromAliasMap align with the output // row meta // format HBaseValueMetaInterface keyMeta = hBaseValueMetaInterfaceFactory .createHBaseValueMetaInterface( null, mapping.getKeyName(), "dummy", ValueMetaInterface.TYPE_INTEGER, 0, 0 ); mTupleColsFromAliasMap.add( keyMeta ); for ( Map.Entry entry : tupleColsMappedByAlias.entrySet() ) { mTupleColsFromAliasMap.add( tupleColsMappedByAlias.get( entry.getValue() ) ); } } return hbaseRowToKettleTupleMode( result, mapping, mTupleColsFromAliasMap, outputRowMeta ); } /** * Convert an HBase row to (potentially) multiple Kettle rows in tuple format. * * @param mapping the mapping information to use (must be a "tuple" mapping) * @param tupleCols the meta data for each of the tuple columns the user has opted to have output * @param outputRowMeta the outgoing Kettle row format * @return a list of Kettle rows in tuple format * @throws KettleException if a problem occurs */ public List hbaseRowToKettleTupleMode( Object result, Mapping mapping, List tupleCols, RowMetaInterface outputRowMeta ) throws KettleException { if ( mDecodedTuples == null ) { mDecodedTuples = new ArrayList<>(); mKeyIndex = outputRowMeta.indexOfValue( mapping.getKeyName() ); mFamilyIndex = outputRowMeta.indexOfValue( Mapping.TupleMapping.FAMILY.toString() ); mColNameIndex = outputRowMeta.indexOfValue( Mapping.TupleMapping.COLUMN.toString() ); mValueIndex = outputRowMeta.indexOfValue( Mapping.TupleMapping.VALUE.toString() ); mTimestampIndex = outputRowMeta.indexOfValue( Mapping.TupleMapping.TIMESTAMP.toString() ); if ( !Const.isEmpty( mapping.getTupleFamilies() ) ) { String[] familiesS = mapping.getTupleFamiliesSplit(); mUserSpecifiedFamilies = new ArrayList<>(); mUserSpecifiedFamiliesHumanReadable = new ArrayList<>(); for ( String family : familiesS ) { mUserSpecifiedFamiliesHumanReadable.add( family ); mUserSpecifiedFamilies.add( mBytesUtil.toBytes( family.trim() ) ); } } } else { mDecodedTuples.clear(); } byte[] rawKey = null; try { rawKey = (byte[]) result.getClass().getMethod( "getRow" ).invoke( result ); } catch ( Exception ex ) { throw new KettleException( ex ); } Object decodedKey = mapping.decodeKeyValue( rawKey ); NavigableMap>> rowData = null; try { rowData = (NavigableMap>>) result.getClass().getMethod( "getMap" ) .invoke( result ); } catch ( Exception ex ) { throw new KettleException( ex ); } if ( !Const.isEmpty( mapping.getTupleFamilies() ) ) { int i = 0; for ( byte[] family : mUserSpecifiedFamilies ) { NavigableMap> colMap = rowData.get( family ); for ( Map.Entry> colMapEntry : colMap.entrySet() ) { NavigableMap valuesByTimestamp = colMapEntry.getValue(); Object[] newTuple = RowDataUtil.allocateRowData( outputRowMeta.size() ); // row key if ( mKeyIndex != -1 ) { newTuple[ mKeyIndex ] = decodedKey; } // get value of most recent column value Map.Entry mostRecentColVal = valuesByTimestamp.lastEntry(); // store the timestamp if ( mTimestampIndex != -1 ) { newTuple[ mTimestampIndex ] = mostRecentColVal.getKey(); } // column name if ( mColNameIndex != -1 ) { HBaseValueMetaInterface colNameMeta = tupleCols.get( mColNameIndex ); Object decodedColName = colNameMeta.decodeColumnValue( colMapEntry.getKey() ); newTuple[ mColNameIndex ] = decodedColName; } // column value if ( mValueIndex != -1 ) { HBaseValueMetaInterface colValueMeta = tupleCols.get( mValueIndex ); Object decodedValue = colValueMeta.decodeColumnValue( mostRecentColVal.getValue() ); newTuple[ mValueIndex ] = decodedValue; } // column family if ( mFamilyIndex != -1 ) { newTuple[ mFamilyIndex ] = mUserSpecifiedFamiliesHumanReadable.get( i ); } mDecodedTuples.add( newTuple ); } i++; } } else { // process all column families for ( Map.Entry>> rowDataEntry : rowData.entrySet() ) { // column family Object decodedFamily = null; if ( mFamilyIndex != -1 ) { HBaseValueMetaInterface colFamMeta = tupleCols.get( mFamilyIndex ); decodedFamily = colFamMeta.decodeColumnValue( rowDataEntry.getKey() ); } NavigableMap> colMap = rowDataEntry.getValue(); for ( Map.Entry> colMapEntry : colMap.entrySet() ) { NavigableMap valuesByTimestamp = colMapEntry.getValue(); Object[] newTuple = RowDataUtil.allocateRowData( outputRowMeta.size() ); // row key if ( mKeyIndex != -1 ) { newTuple[ mKeyIndex ] = decodedKey; } // get value of most recent column value Map.Entry mostRecentColVal = valuesByTimestamp.lastEntry(); // store the timestamp if ( mTimestampIndex != -1 ) { newTuple[ mTimestampIndex ] = mostRecentColVal.getKey(); } // column name if ( mColNameIndex != -1 ) { HBaseValueMetaInterface colNameMeta = tupleCols.get( mColNameIndex ); Object decodedColName = colNameMeta.decodeColumnValue( colMapEntry.getKey() ); newTuple[ mColNameIndex ] = decodedColName; } // column value if ( mValueIndex != -1 ) { HBaseValueMetaInterface colValueMeta = tupleCols.get( mValueIndex ); Object decodedValue = colValueMeta.decodeColumnValue( mostRecentColVal.getValue() ); newTuple[ mValueIndex ] = decodedValue; } // column family if ( mFamilyIndex != -1 ) { newTuple[ mFamilyIndex ] = decodedFamily; } mDecodedTuples.add( newTuple ); } } } return mDecodedTuples; } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/mapping/MappingAdmin.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.mapping; import org.pentaho.big.data.kettle.plugins.hbase.HbaseUtil; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.mapping.MappingFactory; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterfaceFactory; import org.pentaho.hadoop.shim.api.hbase.table.HBasePut; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTable; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTableWriteOperationManager; import org.pentaho.hadoop.shim.api.hbase.Result; import org.pentaho.hadoop.shim.api.hbase.table.ResultScanner; import org.pentaho.hadoop.shim.api.hbase.table.ResultScannerBuilder; import org.pentaho.di.core.Const; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.GregorianCalendar; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Random; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; /** * Class for managing a mapping table in HBase. Has routines for creating the mapping table, writing and reading * mappings to/from the table and creating a test table for debugging purposes. Also has a rough and ready command line * interface. For more information on the structure of a table mapping see org.pentaho.hbase.mapping.Mapping. * * @author Mark Hall (mhall[{at]}pentaho{[dot]}com) */ public class MappingAdmin implements Closeable { /** * Configuration object for the connection protected Configuration m_connection; */ private final HBaseConnection hBaseConnection; /** Name of the mapping table (might make this configurable at some stage) */ protected String m_mappingTableName = "pentaho_mappings"; /** family name to hold the mapped column meta data in a mapping */ public static final String COLUMNS_FAMILY_NAME = "columns"; /** * family name to hold the key meta data in a mapping. This meta data will be the same for any mapping defined on the * same table */ public static final String KEY_FAMILY_NAME = "key"; /** * Constructor. No conneciton information configured. */ // public MappingAdmin() { // try { // HadoopConfiguration active = // HadoopConfigurationBootstrap.getHadoopConfigurationProvider().getActiveConfiguration(); // HBaseShim hbaseShim = active.getHBaseShim(); // m_bytesUtil = hbaseShim.getHBaseConnection().getBytesUtil(); // } catch ( Exception ex ) { // // catastrophic failure if we can't obtain a concrete implementation // throw new RuntimeException( ex ); // } // } public MappingAdmin( HBaseConnection hBaseConnection ) { this.hBaseConnection = hBaseConnection; } /** * Set the name of the mapping table. * * @param tableName * the name to use for the mapping table. */ public void setMappingTableName( String tableName ) { m_mappingTableName = tableName; } /** * Get the name of the mapping table * * @return the name of the mapping table */ public String getMappingTableName() { return m_mappingTableName; } /** * Creates a test mapping (in standard format) called "MarksTestMapping" for a test table called "MarksTestTable" * * @throws Exception * if a problem occurs */ public void createTestMapping() throws Exception { String keyName = "MyKey"; String tableName = "MarksTestTable"; String mappingName = "MarksTestMapping"; MappingFactory mappingFactory = hBaseConnection.getMappingFactory(); HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = hBaseConnection.getHBaseValueMetaInterfaceFactory(); Mapping.KeyType keyType = Mapping.KeyType.LONG; Mapping testMapping = mappingFactory.createMapping( tableName, mappingName, keyName, keyType ); String family1 = "Family1"; String colA = "first_string_column"; HBaseValueMetaInterface vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family1, colA, colA, ValueMetaInterface.TYPE_STRING, -1, -1 ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colB = "first_unsigned_int_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family1, colB, colB, ValueMetaInterface.TYPE_INTEGER, -1, -1 ); vm.setIsLongOrDouble( false ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String family2 = "Family2"; String colC = "first_indexed_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family2, colC, colC, ValueMetaInterface.TYPE_STRING, -1, -1 ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); vm.setStorageType( ValueMetaInterface.STORAGE_TYPE_INDEXED ); Object[] vals = { "nomVal1", "nomVal2", "nomVal3" }; vm.setIndex( vals ); testMapping.addMappedColumn( vm, false ); String colD = "first_binary_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family1, colD, colD, ValueMetaInterface.TYPE_BINARY, -1, -1 ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colE = "first_boolean_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family1, colE, colE, ValueMetaInterface.TYPE_BOOLEAN, -1, -1 ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colF = "first_signed_date_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family1, colF, colF, ValueMetaInterface.TYPE_DATE, -1, -1 ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colG = "first_signed_double_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family2, colG, colG, ValueMetaInterface.TYPE_NUMBER, -1, -1 ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colH = "first_signed_float_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family2, colH, colH, ValueMetaInterface.TYPE_NUMBER, -1, -1 ); vm.setIsLongOrDouble( false ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colI = "first_signed_int_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family2, colI, colI, ValueMetaInterface.TYPE_INTEGER, -1, -1 ); vm.setIsLongOrDouble( false ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colJ = "first_signed_long_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family2, colJ, colJ, ValueMetaInterface.TYPE_INTEGER, -1, -1 ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colK = "first_unsigned_date_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family2, colK, colK, ValueMetaInterface.TYPE_DATE, -1, -1 ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colL = "first_unsigned_double_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family2, colL, colL, ValueMetaInterface.TYPE_NUMBER, -1, -1 ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colM = "first_unsigned_float_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family2, colM, colM, ValueMetaInterface.TYPE_NUMBER, -1, -1 ); vm.setIsLongOrDouble( false ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); String colN = "first_unsigned_long_column"; vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family2, colN, colN, ValueMetaInterface.TYPE_INTEGER, -1, -1 ); vm.setTableName( tableName ); vm.setMappingName( mappingName ); testMapping.addMappedColumn( vm, false ); putMapping( testMapping, false ); } /** * Creates a test mapping (in tuple format) called "MarksTestTupleMapping" for a test table called * "MarksTestTupleTable" * * @throws Exception * if a problem occurs */ public void createTestTupleMapping() throws Exception { String keyName = "KEY"; String tableName = "MarksTestTupleTable"; String mappingName = "MarksTestTupleMapping"; MappingFactory mappingFactory = hBaseConnection.getMappingFactory(); HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = hBaseConnection.getHBaseValueMetaInterfaceFactory(); Mapping.KeyType keyType = Mapping.KeyType.UNSIGNED_LONG; Mapping testMapping = mappingFactory.createMapping( tableName, mappingName, keyName, keyType ); testMapping.setTupleMapping( true ); String family = ""; String colName = ""; HBaseValueMetaInterface vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family, colName, "Family", ValueMetaInterface.TYPE_STRING, -1, -1 ); testMapping.addMappedColumn( vm, true ); vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family, colName, "Column", ValueMetaInterface.TYPE_STRING, -1, -1 ); testMapping.addMappedColumn( vm, true ); vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family, colName, "Value", ValueMetaInterface.TYPE_STRING, -1, -1 ); testMapping.addMappedColumn( vm, true ); vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family, colName, "Timestamp", ValueMetaInterface.TYPE_INTEGER, -1, -1 ); vm.setIsLongOrDouble( true ); testMapping.addMappedColumn( vm, true ); putMapping( testMapping, false ); } /** * Creates a test table called "MarksTestTupleTable" * * @throws Exception * if a problem occurs */ public void createTupleTestTable() throws Exception { // create a test table in the same format as the test tuple mapping ByteConversionUtil byteConversionUtil = hBaseConnection.getByteConversionUtil(); if ( hBaseConnection == null ) { throw new IOException( "No connection exists yet!" ); } HBaseTable marksTestTupleTable = hBaseConnection.getTable( "MarksTestTupleTable" ); if ( marksTestTupleTable.exists() ) { // drop/delete the table and re-create marksTestTupleTable.disable(); marksTestTupleTable.delete(); } List colFamilies = new ArrayList(); colFamilies.add( "Family1" ); colFamilies.add( "Family2" ); marksTestTupleTable.create( colFamilies, null ); HBaseTableWriteOperationManager writeOperationManager = marksTestTupleTable.createWriteOperationManager( (long) 1024 * 1024 * 12 ); for ( long key = 1; key < 500; key++ ) { HBasePut hBasePut = writeOperationManager.createPut( byteConversionUtil.encodeKeyValue( key, Mapping.KeyType.UNSIGNED_LONG ) ); hBasePut.setWriteToWAL( false ); // 20 columns every second row (all columns are string) for ( int i = 0; i < 10 * ( ( key % 2 ) + 1 ); i++ ) { if ( i < 10 ) { hBasePut.addColumn( "Family1", "string_col" + i, false, byteConversionUtil.toBytes( "StringValue_" + key ) ); } else { hBasePut.addColumn( "Family2", "string_col" + i, false, byteConversionUtil.toBytes( "StringValue_" + key ) ); } hBasePut.execute(); } } writeOperationManager.flushCommits(); writeOperationManager.close(); } /** * Creates a test table called "MarksTestTable" * * @throws Exception * if a problem occurs */ public void createTestTable() throws Exception { // create a test table in the same format as the test mapping ByteConversionUtil byteConversionUtil = hBaseConnection.getByteConversionUtil(); HBaseTable marksTestTable = hBaseConnection.getTable( "MarksTestTable" ); if ( marksTestTable != null ) { // drop/delete the table and re-create marksTestTable.disable(); marksTestTable.delete(); } List colFamilies = new ArrayList(); colFamilies.add( "Family1" ); colFamilies.add( "Family2" ); marksTestTable.create( colFamilies, null ); HBaseTableWriteOperationManager writeOperationManager = marksTestTable.createWriteOperationManager( (long) 1024 * 1024 * 12 ); // insert 200 test rows of random stuff Random r = new Random(); String[] nomVals = { "nomVal1", "nomVal2", "nomVal3" }; Date date = new Date(); Calendar c = new GregorianCalendar(); c.setTime( date ); Calendar c2 = new GregorianCalendar(); c2.set( 1970, 2, 1 ); for ( long key = -500; key < 20000; key++ ) { HBasePut hBasePut = writeOperationManager.createPut( byteConversionUtil.encodeKeyValue( key, Mapping.KeyType.LONG ) ); hBasePut.setWriteToWAL( false ); // unsigned (positive) integer column hBasePut.addColumn( "Family1", "first_unsigned_int_column", false, byteConversionUtil.toBytes( ( key < 0 ? (int) -key : key ) / 10 ) ); // String column hBasePut .addColumn( "Family1", "first_string_column", false, byteConversionUtil.toBytes( "StringValue_" + key ) ); // have some null values - every 10th row has no value for the indexed // column if ( key % 10L > 0 ) { int index = r.nextInt( 3 ); String nomVal = nomVals[ index ]; hBasePut.addColumn( "Family2", "first_indexed_column", false, byteConversionUtil.toBytes( nomVal ) ); } // signed integer column double d = r.nextDouble(); int signedInt = r.nextInt( 100 ); if ( d < 0.5 ) { signedInt = -signedInt; } hBasePut.addColumn( "Family2", "first_signed_int_column", false, byteConversionUtil.toBytes( signedInt ) ); // unsigned (positive) float column float f = r.nextFloat() * 1000.0f; hBasePut.addColumn( "Family2", "first_unsigned_float_column", false, byteConversionUtil.toBytes( f ) ); // signed float column if ( d > 0.5 ) { f = -f; } hBasePut.addColumn( "Family2", "first_signed_float_column", false, byteConversionUtil.toBytes( f ) ); // unsigned double column double dd = d * 10000 * r.nextDouble(); hBasePut.addColumn( "Family2", "first_unsigned_double_column", false, byteConversionUtil.toBytes( dd ) ); // signed double if ( d > 0.5 ) { dd = -dd; } hBasePut.addColumn( "Family2", "first_signed_double_column", false, byteConversionUtil.toBytes( dd ) ); // unsigned long long l = r.nextInt( 300 ); hBasePut.addColumn( "Family2", "first_unsigned_long_column", false, byteConversionUtil.toBytes( l ) ); if ( d < 0.5 ) { l = -l; } hBasePut.addColumn( "Family2", "first_signed_long_column", false, byteConversionUtil.toBytes( l ) ); // unsigned date (vals >= 1st Jan 1970) c.add( Calendar.DAY_OF_YEAR, 1 ); long longd = c.getTimeInMillis(); hBasePut.addColumn( "Family1", "first_unsigned_date_column", false, byteConversionUtil.toBytes( longd ) ); // signed date (vals < 1st Jan 1970) c2.add( Calendar.DAY_OF_YEAR, -1 ); longd = c2.getTimeInMillis(); hBasePut.addColumn( "Family1", "first_signed_date_column", false, byteConversionUtil.toBytes( longd ) ); // boolean column String bVal = ""; if ( d < 0.5 ) { bVal = "N"; } else { bVal = "Y"; } hBasePut.addColumn( "Family1", "first_boolean_column", false, byteConversionUtil.toBytes( bVal ) ); // serialized objects byte[] serialized = byteConversionUtil.encodeObject( new Double( d ) ); hBasePut.addColumn( "Family1", "first_serialized_column", false, serialized ); // binary (raw bytes) byte[] rawStuff = byteConversionUtil.toBytes( 5034555 ); hBasePut.addColumn( "Family1", "first_binary_column", false, rawStuff ); hBasePut.execute(); } writeOperationManager.flushCommits(); writeOperationManager.close(); } /** * Create the mapping table * @param tableName The fuly qualified tablename with namespace to make the mapping file for * @throws Exception * if there is no connection specified or the mapping table already exists. */ public void createMappingTable( String tableName ) throws Exception { HBaseTable hBaseTable = hBaseConnection.getTable( getMappingTableName( tableName ) ); if ( hBaseTable.exists() ) { throw new IOException( "Mapping table already exists!" ); } List colFamNames = new ArrayList(); colFamNames.add( COLUMNS_FAMILY_NAME ); colFamNames.add( KEY_FAMILY_NAME ); hBaseTable.create( colFamNames, null ); } /** * Check to see if the specified mapping name exists for the specified table * * @param tableName * the name of the table * @param mappingName * the name of the mapping * @return true if the specified mapping exists for the specified table * @throws IOException * if a problem occurs */ public boolean mappingExists( String tableName, String mappingName ) throws Exception { try ( HBaseTable hBaseTable = hBaseConnection.getTable( getMappingTableName( tableName ) ) ) { if ( hBaseTable.exists() ) { return hBaseTable.keyExists( hBaseConnection.getByteConversionUtil() .compoundKey( HbaseUtil.parseQualifierFromTableName( tableName ), mappingName ) ); } return false; } } /** * Get a list of fully qualifieed tableNames that have mappings in the given namesSpace. If the namespace is null then * cycle through all namespaces. List will be empty if there are no mappings defined yet. * * @return a list of tables that have mappings. * @throws IOException * if something goes wrong */ public Set getMappedTables( String nameSpace ) throws Exception { ByteConversionUtil byteConversionUtil = hBaseConnection.getByteConversionUtil(); TreeSet tableNames = new TreeSet<>(); if ( nameSpace != null ) { addMappedTables( tableNames, nameSpace ); } else { List namespaces = hBaseConnection.listNamespaces(); for ( String nextNamespace: namespaces ) { addMappedTables( tableNames, nextNamespace ); } } return tableNames; } private void addMappedTables( Set tableNames, String nameSpace ) throws Exception { ByteConversionUtil byteConversionUtil = hBaseConnection.getByteConversionUtil(); try ( HBaseTable hBaseTable = hBaseConnection.getTable( nameSpace + ":" + m_mappingTableName ) ) { if ( hBaseTable.exists() ) { ResultScannerBuilder scannerBuilder = hBaseTable.createScannerBuilder( null, null ); scannerBuilder.setCaching( 10 ); try ( ResultScanner resultScanner = scannerBuilder.build() ) { Result next; while ( ( next = resultScanner.next() ) != null ) { byte[] rawKey = next.getRow(); // extract the table name String tableName = nameSpace + ":" + HbaseUtil.parseQualifierFromTableName( byteConversionUtil.splitKey( rawKey )[ 0 ] ); tableNames.add( tableName ); } } } } } /** * Get a list of mappings for the supplied table name. List will be empty if there are no mappings defined for the * table. * * @param tableName * the table name * @return a list of mappings * @throws Exception * if something goes wrong. */ public List getMappingNames( String tableName ) throws Exception { tableName = HbaseUtil.expandTableName( tableName ); ByteConversionUtil byteConversionUtil = hBaseConnection.getByteConversionUtil(); List mappingsForTable = new ArrayList(); try ( HBaseTable hBaseTable = hBaseConnection.getTable( getMappingTableName( tableName ) ) ) { if ( hBaseTable.exists() ) { ResultScannerBuilder scannerBuilder = hBaseTable.createScannerBuilder( null, null ); scannerBuilder.setCaching( 10 ); try ( ResultScanner resultScanner = scannerBuilder.build() ) { Result next; String qualifier = HbaseUtil.parseQualifierFromTableName( tableName ); while ( ( next = resultScanner.next() ) != null ) { byte[] rowKey = next.getRow(); String[] splitKey = byteConversionUtil.splitKey( rowKey ); String tableN = splitKey[ 0 ]; if ( qualifier.equals( tableN ) ) { // extract out the mapping name mappingsForTable.add( splitKey[ 1 ] ); } } } } return mappingsForTable; } } /** * Delete a mapping from the mapping table * * @param tableName * name of the table in question * @param mappingName * name of the mapping in question * @return true if the named mapping for the named table was deleted successfully; false if the mapping table does not * exist or the named mapping for the named table does not exist in the mapping table * @throws Exception * if a problem occurs during deletion */ public boolean deleteMapping( String tableName, String mappingName ) throws Exception { ByteConversionUtil byteConversionUtil = hBaseConnection.getByteConversionUtil(); try ( HBaseTable hBaseTable = hBaseConnection.getTable( getMappingTableName( tableName ) ) ) { try ( HBaseTableWriteOperationManager hBaseTableWriteOperationManager = hBaseTable .createWriteOperationManager( null ) ) { if ( !hBaseTable.exists() ) { // create the mapping table createMappingTable( tableName ); return false; // no mapping table so nothing to delete! } if ( hBaseTable.disabled() ) { hBaseTable.enable(); } boolean mappingExists = mappingExists( tableName, mappingName ); if ( !mappingExists ) { return false; // mapping doesn't seem to exist } hBaseTableWriteOperationManager.createDelete( byteConversionUtil.compoundKey( HbaseUtil.parseQualifierFromTableName( tableName ), mappingName ) ) .execute(); return true; } } } /** * Delete a mapping from the mapping table * * @param theMapping * the mapping to delete * @return true if the mapping was deleted successfully; false if the mapping table does not exist or the suppied * mapping does not exist in the mapping table * @throws Exception * if a problem occurs during deletion */ public boolean deleteMapping( Mapping theMapping ) throws Exception { String tableName = theMapping.getTableName(); String mappingName = theMapping.getMappingName(); return deleteMapping( tableName, mappingName ); } public void putMapping( Mapping theMapping, boolean overwrite ) throws Exception { String tableName = theMapping.getTableName(); String mappingName = theMapping.getMappingName(); Map mapping = theMapping.getMappedColumns(); String keyName = theMapping.getKeyName(); Mapping.KeyType keyType = theMapping.getKeyType(); boolean isTupleMapping = theMapping.isTupleMapping(); String tupleFamilies = theMapping.getTupleFamilies(); ByteConversionUtil byteConversionUtil = hBaseConnection.getByteConversionUtil(); try ( HBaseTable hBaseTable = hBaseConnection.getTable( getMappingTableName( tableName ) ) ) { if ( !hBaseTable.exists() ) { // create the mapping table createMappingTable( tableName ); } if ( hBaseTable.disabled() ) { hBaseTable.enable(); } boolean mappingExists = mappingExists( tableName, mappingName ); if ( mappingExists && !overwrite ) { throw new IOException( "The mapping \"" + mappingName + "\" already exists " + "for table \"" + tableName + "\"" ); } if ( mappingExists ) { deleteMapping( tableName, mappingName ); } HBaseTableWriteOperationManager writeOperationManager = hBaseTable.createWriteOperationManager( null ); HBasePut hBasePut = writeOperationManager .createPut( byteConversionUtil.compoundKey( HbaseUtil.parseQualifierFromTableName( tableName ), mappingName ) ); hBasePut.setWriteToWAL( true ); String family = COLUMNS_FAMILY_NAME; Set aliases = mapping.keySet(); for ( String alias : aliases ) { HBaseValueMetaInterface vm = mapping.get( alias ); String valueType = ValueMetaInterface.typeCodes[ vm.getType() ]; // make sure that we save the correct type name so that unsigned filtering // works correctly! if ( vm.isInteger() && vm.getIsLongOrDouble() ) { valueType = "Long"; } if ( vm.isNumber() ) { if ( vm.getIsLongOrDouble() ) { valueType = "Double"; } else { valueType = "Float"; } } // check for nominal/indexed if ( vm.getStorageType() == ValueMetaInterface.STORAGE_TYPE_INDEXED && vm.isString() ) { Object[] labels = vm.getIndex(); StringBuffer vals = new StringBuffer(); vals.append( "{" ); for ( int i = 0; i < labels.length; i++ ) { if ( i != labels.length - 1 ) { vals.append( labels[ i ].toString().trim() ).append( "," ); } else { vals.append( labels[ i ].toString().trim() ).append( "}" ); } } valueType = vals.toString(); } // add this mapped column in hBasePut .addColumn( family, hBasePut.createColumnName( vm.getColumnFamily(), vm.getColumnName(), alias ), false, byteConversionUtil.toBytes( valueType ) ); } // now do the key family = KEY_FAMILY_NAME; List qualifier = new ArrayList<>( Collections.singletonList( keyName ) ); // indicate that this is a tuple mapping by appending SEPARATOR to the name // of the key + any specified column families to extract from if ( isTupleMapping ) { if ( Const.isEmpty( tupleFamilies ) ) { qualifier.add( "" ); } else { qualifier.add( tupleFamilies ); } } String valueType = keyType.toString(); hBasePut .addColumn( family, hBasePut.createColumnName( qualifier.toArray( new String[ qualifier.size() ] ) ), false, byteConversionUtil.toBytes( valueType ) ); // add the row hBasePut.execute(); writeOperationManager.flushCommits(); } } /** * Returns a textual description of a mapping * * @param tableName * the table name * @param mappingName * the mapping name * @return a string describing the specified mapping on the specified table * @throws IOException * if a problem occurs */ public String describeMapping( String tableName, String mappingName ) throws Exception { return describeMapping( getMapping( tableName, mappingName ) ); } /** * Returns a textual description of a mapping * * @param aMapping * the mapping * @return a textual description of the supplied mapping object * @throws IOException * if a problem occurs */ public String describeMapping( Mapping aMapping ) throws IOException { return aMapping.toString(); } /** * Get a mapping for the specified table under the specified mapping name * * @param tableName * the name of the table * @param mappingName * the name of the mapping to get for the table * @return a mapping for the supplied table * @throws Exception * if a mapping by the given name does not exist for the given table */ public Mapping getMapping( String tableName, String mappingName ) throws Exception { ByteConversionUtil byteConversionUtil = hBaseConnection.getByteConversionUtil(); MappingFactory mappingFactory = hBaseConnection.getMappingFactory(); HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = hBaseConnection.getHBaseValueMetaInterfaceFactory(); try ( HBaseTable hBaseTable = hBaseConnection.getTable( getMappingTableName( tableName ) ) ) { if ( !hBaseTable.exists() ) { // create the mapping table createMappingTable( tableName ); throw new IOException( "Mapping \"" + tableName + "," + mappingName + "\" does not exist!" ); } byte[] compoundKey = byteConversionUtil.compoundKey( HbaseUtil.parseQualifierFromTableName( tableName ), mappingName ); ResultScannerBuilder scannerBuilder = hBaseTable.createScannerBuilder( compoundKey, compoundKey ); scannerBuilder.setCaching( 10 ); ResultScanner resultScanner = scannerBuilder.build(); Result result = resultScanner.next(); if ( result == null ) { throw new IOException( "Mapping \"" + tableName + "," + mappingName + "\" does not exist!" ); } NavigableMap colsInKeyFamily = result.getFamilyMap( KEY_FAMILY_NAME ); Set keyCols = colsInKeyFamily.keySet(); // should only be one key defined!! if ( keyCols.size() != 1 ) { throw new IOException( "Mapping \"" + tableName + "," + mappingName + "\" has more than one key defined!" ); } byte[] keyNameB = keyCols.iterator().next(); String decodedKeyName = byteConversionUtil.toString( keyNameB ); byte[] keyTypeB = colsInKeyFamily.get( keyNameB ); String decodedKeyType = byteConversionUtil.toString( keyTypeB ); Mapping.KeyType keyType = null; for ( Mapping.KeyType t : Mapping.KeyType.values() ) { if ( decodedKeyType.equalsIgnoreCase( t.toString() ) ) { keyType = t; break; } } if ( keyType == null ) { throw new IOException( "Unrecognized type for the key column in \"" + compoundKey + "\"" ); } String tupleFamilies = ""; boolean isTupleMapping = false; if ( decodedKeyName.indexOf( ',' ) > 0 ) { isTupleMapping = true; if ( decodedKeyName.indexOf( ',' ) != decodedKeyName.length() - 1 ) { tupleFamilies = decodedKeyName.substring( decodedKeyName.indexOf( ',' ) + 1, decodedKeyName.length() ); } decodedKeyName = decodedKeyName.substring( 0, decodedKeyName.indexOf( ',' ) ); } Mapping resultMapping = mappingFactory.createMapping( tableName, mappingName, decodedKeyName, keyType ); resultMapping.setTupleMapping( isTupleMapping ); if ( !Const.isEmpty( tupleFamilies ) ) { resultMapping.setTupleFamilies( tupleFamilies ); } Map resultCols = new TreeMap(); // now process the mapping NavigableMap colsInMapping = result.getFamilyMap( COLUMNS_FAMILY_NAME ); Set colNames = colsInMapping.keySet(); for ( byte[] b : colNames ) { String decodedName = byteConversionUtil.toString( b ); byte[] c = colsInMapping.get( b ); if ( c == null ) { throw new IOException( "No type declaration for column \"" + decodedName + "\"" ); } String decodedType = byteConversionUtil.toString( c ); HBaseValueMetaInterface newMeta = null; if ( decodedType.equalsIgnoreCase( "Float" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_NUMBER, -1, -1 ); // While passing through Kettle this will be represented // as a double newMeta.setIsLongOrDouble( false ); } else if ( decodedType.equalsIgnoreCase( "Double" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_NUMBER, -1, -1 ); } else if ( decodedType.equalsIgnoreCase( "String" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_STRING, -1, -1 ); } else if ( decodedType.toLowerCase().startsWith( "date" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_DATE, -1, -1 ); } else if ( decodedType.equalsIgnoreCase( "Boolean" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_BOOLEAN, -1, -1 ); } else if ( decodedType.equalsIgnoreCase( "Integer" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_INTEGER, -1, -1 ); // Integer in the mapping is really an integer (not a long // as Kettle uses internally) newMeta.setIsLongOrDouble( false ); } else if ( decodedType.equalsIgnoreCase( "Long" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_INTEGER, -1, -1 ); } else if ( decodedType.equalsIgnoreCase( "BigNumber" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_BIGNUMBER, -1, -1 ); } else if ( decodedType.equalsIgnoreCase( "Serializable" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_SERIALIZABLE, -1, -1 ); } else if ( decodedType.equalsIgnoreCase( "Binary" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_BINARY, -1, -1 ); } else if ( decodedType.startsWith( "{" ) && decodedType.endsWith( "}" ) ) { newMeta = valueMetaInterfaceFactory .createHBaseValueMetaInterface( decodedName, ValueMetaInterface.TYPE_STRING, -1, -1 ); Object[] labels = null; try { labels = byteConversionUtil.stringIndexListToObjects( decodedType ); } catch ( IllegalArgumentException ex ) { throw new IOException( "Indexed/nominal type must have at least one " + "label declared" ); } newMeta.setIndex( labels ); newMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_INDEXED ); } else { throw new IOException( "Unknown column type : \"" + decodedType + "\"" ); } newMeta.setTableName( tableName ); newMeta.setMappingName( mappingName ); // check that this one doesn't have the same name as the key! String alias = newMeta.getAlias(); if ( !Mapping.TupleMapping.KEY.toString().equalsIgnoreCase( alias ) ) { if ( resultMapping.getKeyName().equals( alias ) ) { throw new IOException( "Error in mapping. Column \"" + newMeta.getAlias() + "\" has the same name as the table key (" + resultMapping.getKeyName() + ")" ); } else { resultCols.put( newMeta.getAlias(), newMeta ); } } } resultMapping.setMappedColumns( resultCols ); return resultMapping; } } @Override public void close() throws IOException { hBaseConnection.close(); } public HBaseConnection getConnection() { return hBaseConnection; } public static String getTableNameFromVariable( BaseStepMeta stepMeta, String mappedTableName ) { TransMeta parentTransMeta = stepMeta.getParentStepMeta().getParentTransMeta(); return parentTransMeta.environmentSubstitute( mappedTableName ); } private String getMappingTableName( String tableName ) { return HbaseUtil.expandTableName( HbaseUtil.parseNamespaceFromTableName( tableName ), m_mappingTableName ); } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/mapping/MappingEditor.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.mapping; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Cursor; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableItem; import org.pentaho.big.data.kettle.plugins.hbase.HbaseUtil; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.big.data.kettle.plugins.hbase.HBaseConnectionException; import org.pentaho.big.data.kettle.plugins.hbase.input.HBaseInput; import org.pentaho.big.data.kettle.plugins.hbase.input.Messages; import org.pentaho.big.data.plugins.common.ui.NamedClusterWidgetImpl; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterfaceFactory; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTable; import org.pentaho.di.core.Const; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.ComboValuesSelectionListener; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; /** * A re-usable composite for creating and editing table mappings for HBase. Also has the (optional) ability to create * the table if the table for which the mapping is being created does not exist. When creating a new table, the name * supplied may be optionally suffixed with some parameters for compression and bloom filter type. If no parameters are * supplied then the HBase defaults of no compression and no bloom filter(s) are used. The table name may be suffixed * with * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) * @[NONE | GZ | LZO][@[NONE | ROW | ROWCOL]] for compression and bloom filter type respectively. Note that LZO * compression requires LZO libraries to be installed on the HBase nodes. */ public class MappingEditor extends Composite implements ConfigurationProducer { private static final Class PKG = MappingEditor.class; private final NamedClusterServiceLocator namedClusterServiceLocator; protected Shell m_shell; protected Composite m_parent; protected boolean m_allowTableCreate; protected NamedClusterWidgetImpl namedClusterWidget; // table name line protected CCombo m_existingTableNamesCombo; protected Button m_getTableNames; protected boolean m_familiesInvalidated; // mapping name line protected CCombo m_existingMappingNamesCombo; // fields view protected TableView m_fieldsView; protected ColumnInfo m_keyCI; protected ColumnInfo m_familyCI; protected ColumnInfo m_typeCI; protected Button m_saveBut; protected Button m_deleteBut; protected Button m_getFieldsBut; protected Button m_keyValueTupleBut; protected MappingAdmin m_admin; protected ConfigurationProducer m_configProducer; protected FieldProducer m_incomingFieldsProducer; /** * default family name to use when creating a new table using incoming fields */ protected static final String DEFAULT_FAMILY = "Family1"; protected String m_currentConfiguration = ""; protected boolean m_connectionProblem; protected TransMeta m_transMeta; public MappingEditor( Shell shell, Composite parent, ConfigurationProducer configProducer, FieldProducer fieldProducer, int tableViewStyle, boolean allowTableCreate, PropsUI props, TransMeta transMeta, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester, NamedClusterServiceLocator namedClusterServiceLocator ) { super( parent, SWT.NONE ); this.namedClusterServiceLocator = namedClusterServiceLocator; m_shell = shell; m_parent = parent; m_transMeta = transMeta; boolean showConnectWidgets = false; m_configProducer = configProducer; if ( m_configProducer != null ) { m_currentConfiguration = m_configProducer.getCurrentConfiguration(); } else { showConnectWidgets = true; m_configProducer = this; } m_incomingFieldsProducer = fieldProducer; m_allowTableCreate = allowTableCreate; int middle = props.getMiddlePct(); int margin = Const.MARGIN; FormLayout controlLayout = new FormLayout(); controlLayout.marginWidth = 3; controlLayout.marginHeight = 3; setLayout( controlLayout ); props.setLook( this ); if ( showConnectWidgets ) { Label namedClusterLabel = new Label( this, SWT.RIGHT ); namedClusterLabel.setText( Messages.getString( "MappingDialog.NamedCluster.Label" ) ); props.setLook( namedClusterLabel ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 10 ); fd.right = new FormAttachment( middle, -margin ); namedClusterLabel.setLayoutData( fd ); namedClusterWidget = new NamedClusterWidgetImpl( this, false, namedClusterService, runtimeTestActionService, runtimeTester, false ); namedClusterWidget.initiate(); props.setLook( namedClusterWidget ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, 0 ); namedClusterWidget.setLayoutData( fd ); m_currentConfiguration = m_configProducer.getCurrentConfiguration(); } parent.addDisposeListener( new DisposeListener() { @Override public void widgetDisposed( DisposeEvent de ) { try { resetConnection(); } catch ( Exception e ) { // we have to swallow it. } } } ); // table names Label tableNameLab = new Label( this, SWT.RIGHT ); tableNameLab.setText( Messages.getString( "MappingDialog.TableName.Label" ) ); props.setLook( tableNameLab ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); if ( showConnectWidgets ) { fd.top = new FormAttachment( namedClusterWidget, margin ); } else { fd.top = new FormAttachment( 0, margin ); } fd.right = new FormAttachment( middle, -margin ); tableNameLab.setLayoutData( fd ); m_getTableNames = new Button( this, SWT.PUSH | SWT.CENTER ); props.setLook( m_getTableNames ); m_getTableNames.setText( Messages.getString( "MappingDialog.TableName.GetTableNames" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); if ( showConnectWidgets ) { fd.top = new FormAttachment( namedClusterWidget, 0 ); } else { fd.top = new FormAttachment( 0, 0 ); } m_getTableNames.setLayoutData( fd ); m_getTableNames.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { populateTableCombo( true ); if ( m_existingTableNamesCombo.getItemCount() > 0 ) { m_existingTableNamesCombo.setListVisible( true ); } } } ); m_existingTableNamesCombo = new CCombo( this, SWT.BORDER ); props.setLook( m_existingTableNamesCombo ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.right = new FormAttachment( m_getTableNames, -margin ); if ( showConnectWidgets ) { fd.top = new FormAttachment( namedClusterWidget, margin ); } else { fd.top = new FormAttachment( 0, margin ); } m_existingTableNamesCombo.setLayoutData( fd ); // Must be editable to change the namespace once populated (see Hbase row decoder). If m_allowTableCreate is false // then saving the map is disabled so it is not important what text exists here m_existingTableNamesCombo.setEditable( true ); // mapping names Label mappingNameLab = new Label( this, SWT.RIGHT ); mappingNameLab.setText( Messages.getString( "MappingDialog.MappingName.Label" ) ); props.setLook( mappingNameLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_getTableNames, margin ); fd.right = new FormAttachment( middle, -margin ); mappingNameLab.setLayoutData( fd ); m_existingMappingNamesCombo = new CCombo( this, SWT.BORDER ); props.setLook( m_existingMappingNamesCombo ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_getTableNames, margin ); fd.right = new FormAttachment( 100, 0 ); m_existingMappingNamesCombo.setLayoutData( fd ); m_existingTableNamesCombo.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { m_familiesInvalidated = true; populateMappingComboAndFamilyStuff(); } @Override public void widgetDefaultSelected( SelectionEvent e ) { m_familiesInvalidated = true; populateMappingComboAndFamilyStuff(); } } ); m_existingTableNamesCombo.addKeyListener( new KeyAdapter() { @Override public void keyPressed( KeyEvent e ) { m_familiesInvalidated = true; } } ); m_existingTableNamesCombo.addFocusListener( new FocusListener() { public void focusGained( FocusEvent e ) { // populateTableCombo(false); } public void focusLost( FocusEvent e ) { m_familiesInvalidated = true; populateMappingComboAndFamilyStuff(); } } ); m_existingMappingNamesCombo.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { loadTableViewFromMapping(); } @Override public void widgetDefaultSelected( SelectionEvent e ) { loadTableViewFromMapping(); } } ); // fields ColumnInfo[] colinf = new ColumnInfo[] { new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_ALIAS" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_KEY" ), ColumnInfo.COLUMN_TYPE_CCOMBO, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_FAMILY" ), ColumnInfo.COLUMN_TYPE_CCOMBO, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_NAME" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_TYPE" ), ColumnInfo.COLUMN_TYPE_CCOMBO, false ), new ColumnInfo( Messages.getString( "HBaseInputDialog.Fields.FIELD_INDEXED" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), }; m_keyCI = colinf[ 1 ]; m_keyCI.setComboValues( new String[] { "N", "Y" } ); m_familyCI = colinf[ 2 ]; m_familyCI.setComboValues( new String[] { "" } ); m_typeCI = colinf[ 4 ]; // default types for non-key fields m_typeCI.setComboValues( new String[] { "String", "Integer", "Long", "Float", "Double", "Date", "BigNumber", "Serializable", "Binary" } ); m_keyCI.setComboValuesSelectionListener( new ComboValuesSelectionListener() { public String[] getComboValues( TableItem tableItem, int rowNr, int colNr ) { tableItem.setText( 5, "" ); return m_keyCI.getComboValues(); } } ); m_typeCI.setComboValuesSelectionListener( new ComboValuesSelectionListener() { public String[] getComboValues( TableItem tableItem, int rowNr, int colNr ) { String[] comboValues = null; String keyOrNot = tableItem.getText( 2 ); if ( Utils.isEmpty( keyOrNot ) || keyOrNot.equalsIgnoreCase( "N" ) ) { comboValues = new String[] { "String", "Integer", "Long", "Float", "Double", "Boolean", "Date", "BigNumber", "Serializable", "Binary" }; } else { comboValues = new String[] { "String", "Integer", "UnsignedInteger", "Long", "UnsignedLong", "Date", "UnsignedDate", "Binary" }; } return comboValues; } } ); m_saveBut = new Button( this, SWT.PUSH | SWT.CENTER ); props.setLook( m_saveBut ); m_saveBut.setText( Messages.getString( "MappingDialog.SaveMapping" ) ); m_saveBut.setToolTipText( Messages.getString( "MappingDialog.SaveMapping.TipText" ) ); fd = new FormData(); fd.left = new FormAttachment( 0, margin ); fd.bottom = new FormAttachment( 100, -margin * 2 ); m_saveBut.setLayoutData( fd ); m_saveBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { saveMapping(); } } ); m_deleteBut = new Button( this, SWT.PUSH | SWT.CENTER ); props.setLook( m_deleteBut ); m_deleteBut.setText( Messages.getString( "MappingDialog.DeleteMapping" ) ); fd = new FormData(); fd.left = new FormAttachment( m_saveBut, margin ); fd.bottom = new FormAttachment( 100, -margin * 2 ); m_deleteBut.setLayoutData( fd ); m_deleteBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { deleteMapping(); } } ); m_keyValueTupleBut = new Button( this, SWT.PUSH | SWT.CENTER ); props.setLook( m_keyValueTupleBut ); m_keyValueTupleBut.setText( Messages.getString( "MappingDialog.KeyValueTemplate" ) ); m_keyValueTupleBut.setToolTipText( Messages.getString( "MappingDialog.KeyValueTemplate.TipText" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, -margin * 2 ); m_keyValueTupleBut.setLayoutData( fd ); m_keyValueTupleBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { populateTableWithTupleTemplate( m_allowTableCreate ); } } ); if ( m_allowTableCreate ) { m_getFieldsBut = new Button( this, SWT.PUSH | SWT.CENTER ); props.setLook( m_getFieldsBut ); m_getFieldsBut.setText( Messages.getString( "MappingDialog.GetIncomingFields" ) ); fd = new FormData(); fd.right = new FormAttachment( m_keyValueTupleBut, -margin ); fd.bottom = new FormAttachment( 100, -margin * 2 ); m_getFieldsBut.setLayoutData( fd ); m_getFieldsBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { populateTableWithIncomingFields(); } } ); } m_fieldsView = new TableView( transMeta, this, tableViewStyle, colinf, 1, null, props ); fd = new FormData(); fd.top = new FormAttachment( m_existingMappingNamesCombo, margin * 2 ); fd.bottom = new FormAttachment( m_saveBut, -margin * 2 ); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); m_fieldsView.setLayoutData( fd ); } private void populateTableWithTupleTemplate( boolean fromOutputStep ) { Table table = m_fieldsView.table; Set existingRowAliases = new HashSet(); for ( int i = 0; i < table.getItemCount(); i++ ) { TableItem tableItem = table.getItem( i ); String alias = tableItem.getText( 1 ); if ( !Utils.isEmpty( alias ) ) { existingRowAliases.add( alias ); } } int choice = 0; if ( existingRowAliases.size() > 0 ) { // Ask what we should do with existing mapping data MessageDialog md = new MessageDialog( m_shell, Messages.getString( "MappingDialog.GetFieldsChoice.Title" ), null, Messages .getString( "MappingDialog.GetFieldsChoice.Message", "" + existingRowAliases.size(), "" + ( fromOutputStep ? /* 6 */ 5 : 5 ) ), MessageDialog.WARNING, new String[] { Messages.getString( "MappingOutputDialog.ClearAndAdd" ), Messages.getString( "MappingOutputDialog.Cancel" ), }, 0 ); MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() ); int idx = md.open(); choice = idx & 0xFF; } if ( choice == 1 || choice == 255 /* 255 = escape pressed */ ) { return; // Cancel } m_fieldsView.clearAll(); TableItem item = new TableItem( table, SWT.NONE ); item.setText( 1, "KEY" ); item.setText( 2, "Y" ); item = new TableItem( table, SWT.NONE ); item.setText( 1, "Family" ); item.setText( 2, "N" ); item.setText( 5, "String" ); item = new TableItem( table, SWT.NONE ); item.setText( 1, "Column" ); item.setText( 2, "N" ); item = new TableItem( table, SWT.NONE ); item.setText( 1, "Value" ); item.setText( 2, "N" ); item = new TableItem( table, SWT.NONE ); item.setText( 1, "Timestamp" ); item.setText( 2, "N" ); item.setText( 5, "Long" ); /* * Disabled from GUI for now, since visibility/ACL processing * requires an additional co-processor on HBase * if ( fromOutputStep ) { item = new TableItem( table, SWT.NONE ); item.setText( 1, "Visibility" ); item.setText( 2, "N" ); item.setText( 5, "String" ); } */ m_fieldsView.removeEmptyRows(); m_fieldsView.setRowNums(); m_fieldsView.optWidth( true ); } private void populateTableWithIncomingFields() { if ( m_incomingFieldsProducer != null ) { RowMetaInterface incomingRowMeta = m_incomingFieldsProducer.getIncomingFields(); Table table = m_fieldsView.table; if ( incomingRowMeta != null ) { Set existingRowAliases = new HashSet(); for ( int i = 0; i < table.getItemCount(); i++ ) { TableItem tableItem = table.getItem( i ); String alias = tableItem.getText( 1 ); if ( !Utils.isEmpty( alias ) ) { existingRowAliases.add( alias ); } } int choice = 0; if ( existingRowAliases.size() > 0 ) { // Ask what we should do with existing mapping data MessageDialog md = new MessageDialog( m_shell, Messages.getString( "MappingDialog.GetFieldsChoice.Title" ), null, Messages .getString( "MappingDialog.GetFieldsChoice.Message", "" + existingRowAliases.size(), "" + incomingRowMeta.size() ), MessageDialog.WARNING, new String[] { Messages.getString( "MappingDialog.AddNew" ), Messages.getString( "MappingOutputDialog.Add" ), Messages.getString( "MappingOutputDialog.ClearAndAdd" ), Messages.getString( "MappingOutputDialog.Cancel" ), }, 0 ); MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() ); int idx = md.open(); choice = idx & 0xFF; } if ( choice == 3 || choice == 255 /* 255 = escape pressed */ ) { return; // Cancel } if ( choice == 2 ) { m_fieldsView.clearAll(); } ByteConversionUtil byteConversionUtil = null; try { byteConversionUtil = m_configProducer.getHBaseService().getByteConversionUtil(); } catch ( Exception e ) { throw new RuntimeException( e ); } for ( int i = 0; i < incomingRowMeta.size(); i++ ) { ValueMetaInterface vm = incomingRowMeta.getValueMeta( i ); boolean addIt = true; if ( choice == 0 ) { // only add if its not already in the table if ( existingRowAliases.contains( vm.getName() ) ) { addIt = false; } } if ( addIt ) { TableItem item = new TableItem( m_fieldsView.table, SWT.NONE ); item.setText( 1, vm.getName() ); item.setText( 2, "N" ); if ( m_familyCI.getComboValues()[ 0 ].length() > 0 ) { // use existing first column family name as the default item.setText( 3, m_familyCI.getComboValues()[ 0 ] ); } else { // default item.setText( 3, DEFAULT_FAMILY ); } item.setText( 4, vm.getName() ); item.setText( 5, vm.getTypeDesc() ); if ( vm.getType() == ValueMetaInterface.TYPE_INTEGER ) { item.setText( 5, "Long" ); } if ( vm.getType() == ValueMetaInterface.TYPE_NUMBER ) { item.setText( 5, "Double" ); } if ( vm.getStorageType() == ValueMetaInterface.STORAGE_TYPE_INDEXED ) { Object[] indexValus = vm.getIndex(); String indexValsS = byteConversionUtil.objectIndexValuesToString( indexValus ); item.setText( 6, indexValsS ); } } } m_fieldsView.removeEmptyRows(); m_fieldsView.setRowNums(); m_fieldsView.optWidth( true ); } } } private void populateTableCombo( boolean force ) { if ( namedClusterWidget != null && namedClusterWidget.getSelectedNamedCluster() == null ) { MessageDialog.openError( m_shell, BaseMessages.getString( PKG, "MappingDialog.Error.Title.NamedClusterNotSelected" ), BaseMessages.getString( PKG, "MappingDialog.Error.Message.NamedClusterNotSelected.Msg" ) ); return; } if ( m_configProducer == null ) { return; } if ( m_connectionProblem ) { if ( !m_currentConfiguration.equals( m_configProducer.getCurrentConfiguration() ) ) { // try again - perhaps the user has corrected connection information m_connectionProblem = false; m_currentConfiguration = m_configProducer.getCurrentConfiguration(); } } if ( ( m_existingTableNamesCombo.getItemCount() == 0 || force ) && !m_connectionProblem ) { String existingName = m_existingTableNamesCombo.getText(); String namespace = HbaseUtil.parseNamespaceFromTableName( existingName, null ); m_existingTableNamesCombo.removeAll(); Cursor busy = new Cursor( this.getDisplay(), SWT.CURSOR_WAIT ); try { this.setCursor( busy ); resetConnection(); m_admin = MappingUtils.getMappingAdmin( m_configProducer ); TreeSet tableNames = new TreeSet<>(); if ( namespace != null ) { addTables( tableNames, namespace ); } else { List namespaces = m_admin.getConnection().listNamespaces(); for ( String nextNamespace: namespaces ) { addTables( tableNames, nextNamespace ); } } for ( String currentTableName : tableNames ) { m_existingTableNamesCombo.add( currentTableName ); } // restore any previous value if ( !Utils.isEmpty( existingName ) ) { m_existingTableNamesCombo.setText( existingName ); } } catch ( Exception e ) { m_connectionProblem = true; showConnectionErrorDialog( e ); } finally { this.setCursor( null ); busy.dispose(); } } } private void addTables( Set tableNames, String namespace ) throws Exception { HBaseConnection hBaseConnection = m_admin.getConnection(); List tables = hBaseConnection.listTableNamesByNamespace( namespace ); for ( String currentTableName : tables ) { tableNames.add( HbaseUtil.expandTableName( currentTableName ) ); } } private void resetConnection() throws IOException { if ( m_admin != null ) { m_admin.close(); } m_admin = null; } private boolean notInitializedMappingAdmin() { return m_admin == null; } private void showConnectionErrorDialog( Exception ex ) { new ErrorDialog( m_shell, Messages.getString( "MappingDialog.Error.Title.UnableToConnect" ), Messages.getString( "MappingDialog.Error.Message.UnableToConnect" ) + "\n\n", ex ); } private void deleteMapping() { if ( namedClusterWidget != null && namedClusterWidget.getSelectedNamedCluster() == null ) { MessageDialog.openError( m_shell, BaseMessages.getString( PKG, "MappingDialog.Error.Title.NamedClusterNotSelected" ), BaseMessages.getString( PKG, "MappingDialog.Error.Message.NamedClusterNotSelected.Msg" ) ); return; } String tableName = ""; if ( !Utils.isEmpty( m_existingTableNamesCombo.getText().trim() ) ) { tableName = m_existingTableNamesCombo.getText().trim(); if ( tableName.indexOf( '@' ) > 0 ) { tableName = tableName.substring( 0, tableName.indexOf( '@' ) ); } } if ( Utils.isEmpty( tableName ) || Utils.isEmpty( m_existingMappingNamesCombo.getText().trim() ) ) { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.MissingTableMappingName" ), Messages.getString( "MappingDialog.Error.Message.MissingTableMappingName" ) ); return; } try { boolean ok = MessageDialog.openConfirm( m_shell, Messages.getString( "MappingDialog.Info.Title.ConfirmDelete" ), Messages .getString( "MappingDialog.Info.Message.ConfirmDelete", m_existingMappingNamesCombo.getText().trim(), tableName ) ); if ( ok ) { if ( notInitializedMappingAdmin() ) { try { m_admin = MappingUtils.getMappingAdmin( m_configProducer ); } catch ( HBaseConnectionException e ) { showConnectionErrorDialog( e ); return; } } boolean result = m_admin.deleteMapping( m_existingTableNamesCombo.getText().trim(), m_existingMappingNamesCombo.getText() .trim() ); if ( result ) { MessageDialog.openConfirm( m_shell, Messages.getString( "MappingDialog.Info.Title.MappingDeleted" ), Messages .getString( "MappingDialog.Info.Message.MappingDeleted", m_existingMappingNamesCombo.getText().trim(), tableName ) ); // make sure that the list of mappings for the selected table gets // updated. populateMappingComboAndFamilyStuff(); } else { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.DeleteMapping" ), Messages .getString( "MappingDialog.Error.Message.DeleteMapping", m_existingMappingNamesCombo.getText().trim(), tableName ) ); } } return; } catch ( Exception ex ) { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.DeleteMapping" ), Messages .getString( "MappingDialog.Error.Message.DeleteMappingIO", m_existingMappingNamesCombo.getText().trim(), tableName, ex.getMessage() ) ); } } public Mapping getMapping( boolean performChecksAndShowGUIErrorDialog, List problems ) throws Exception { return getMapping( performChecksAndShowGUIErrorDialog, problems, false ); } /** * Parameter includeKeyToColumns should be true if only we need key to be included in mapColumns and mapAliases */ public Mapping getMapping( boolean performChecksAndShowGUIErrorDialog, List problems, Boolean includeKeyToColumns ) { String tableName = ""; if ( !Utils.isEmpty( m_existingTableNamesCombo.getText().trim() ) ) { tableName = m_existingTableNamesCombo.getText().trim(); if ( tableName.indexOf( '@' ) > 0 ) { tableName = tableName.substring( 0, tableName.indexOf( '@' ) ); } } // empty table name or mapping name does not force an abort if ( performChecksAndShowGUIErrorDialog && ( Utils.isEmpty( m_existingMappingNamesCombo.getText().trim() ) || Utils.isEmpty( tableName ) ) ) { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.MissingTableMappingName" ), Messages.getString( "MappingDialog.Error.Message.MissingTableMappingName" ) ); if ( problems != null ) { problems.add( Messages.getString( "MappingDialog.Error.Message.MissingTableMappingName" ) ); } return null; } // do we have any non-empty rows in the table? if ( m_fieldsView.nrNonEmpty() == 0 && performChecksAndShowGUIErrorDialog ) { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.NoFieldsDefined" ), Messages .getString( "MappingDialog.Error.Message.NoFieldsDefined" ) ); if ( problems != null ) { problems.add( Messages.getString( "MappingDialog.Error.Message.NoFieldsDefined" ) ); } return null; } // do we have a key defined in the table? HBaseService hBaseService = null; try { hBaseService = m_configProducer.getHBaseService(); if ( hBaseService == null ) { // Backlog-32244; If we don't have a service don't bother trying to map it. return null; } } catch ( Exception e ) { if ( problems != null ) { problems.add( e.getMessage() ); } return null; } Mapping theMapping = hBaseService.getMappingFactory().createMapping( tableName, m_existingMappingNamesCombo.getText().trim() ); boolean keyDefined = false; boolean moreThanOneKey = false; List missingFamilies = new ArrayList(); List missingColumnNames = new ArrayList(); List missingTypes = new ArrayList(); int nrNonEmpty = m_fieldsView.nrNonEmpty(); // is the mapping a tuple mapping? boolean isTupleMapping = false; int tupleIdCount = 0; if ( nrNonEmpty >= 5 && nrNonEmpty <= 6 ) { for ( int i = 0; i < nrNonEmpty; i++ ) { if ( m_fieldsView.getNonEmpty( i ).getText( 1 ).equals( Mapping.TupleMapping.KEY.toString() ) || m_fieldsView .getNonEmpty( i ).getText( 1 ).equals( Mapping.TupleMapping.FAMILY.toString() ) || m_fieldsView.getNonEmpty( i ).getText( 1 ).equals( Mapping.TupleMapping.COLUMN.toString() ) || m_fieldsView.getNonEmpty( i ) .getText( 1 ).equals( Mapping.TupleMapping.VALUE.toString() ) || m_fieldsView.getNonEmpty( i ) .getText( 1 ).equals( Mapping.TupleMapping.TIMESTAMP.toString() ) || m_fieldsView.getNonEmpty( i ).getText( 1 ).equals( MappingUtils.TUPLE_MAPPING_VISIBILITY ) ) { tupleIdCount++; } } } if ( tupleIdCount == 5 || tupleIdCount == 6 ) { isTupleMapping = true; theMapping.setTupleMapping( true ); } for ( int i = 0; i < nrNonEmpty; i++ ) { TableItem item = m_fieldsView.getNonEmpty( i ); boolean isKey = false; String alias = null; if ( !Utils.isEmpty( item.getText( 1 ) ) ) { alias = item.getText( 1 ).trim(); } if ( !Utils.isEmpty( item.getText( 2 ) ) ) { isKey = item.getText( 2 ).trim().equalsIgnoreCase( "Y" ); if ( isKey && keyDefined ) { // more than one key, break here moreThanOneKey = true; break; } if ( isKey ) { keyDefined = true; } } // String family = null; String family = ""; if ( !Utils.isEmpty( item.getText( 3 ) ) ) { family = item.getText( 3 ); } else { if ( !isKey && !isTupleMapping ) { missingFamilies.add( item.getText( 0 ) ); } } // String colName = null; String colName = ""; if ( !Utils.isEmpty( item.getText( 4 ) ) ) { colName = item.getText( 4 ); } else { if ( !isKey && !isTupleMapping ) { missingColumnNames.add( item.getText( 0 ) ); } } String type = null; if ( !Utils.isEmpty( item.getText( 5 ) ) ) { type = item.getText( 5 ); } else { missingTypes.add( item.getText( 0 ) ); } String indexedVals = null; if ( !Utils.isEmpty( item.getText( 6 ) ) ) { indexedVals = item.getText( 6 ); } HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = hBaseService.getHBaseValueMetaInterfaceFactory(); // only add if we have all data and its all correct if ( isKey && !moreThanOneKey ) { if ( Utils.isEmpty( alias ) ) { // pop up an error dialog - key must have an alias because it does not // belong to a column family or have a column name if ( performChecksAndShowGUIErrorDialog ) { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.NoAliasForKey" ), Messages .getString( "MappingDialog.Error.Message.NoAliasForKey" ) ); } if ( problems != null ) { problems.add( Messages.getString( "MappingDialog.Error.Message.NoAliasForKey" ) ); } return null; } if ( Utils.isEmpty( type ) ) { // pop up an error dialog - must have a type for the key if ( performChecksAndShowGUIErrorDialog ) { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.NoTypeForKey" ), Messages .getString( "MappingDialog.Error.Message.NoTypeForKey" ) ); } if ( problems != null ) { problems.add( Messages.getString( "MappingDialog.Error.Message.NoTypeForKey" ) ); } return null; } if ( moreThanOneKey ) { // popup an error and then return if ( performChecksAndShowGUIErrorDialog ) { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.MoreThanOneKey" ), Messages .getString( "MappingDialog.Error.Message.MoreThanOneKey" ) ); } if ( problems != null ) { problems.add( Messages.getString( "MappingDialog.Error.Message.MoreThanOneKey" ) ); } return null; } if ( isTupleMapping ) { theMapping.setKeyName( alias ); theMapping.setTupleFamilies( family ); } else { theMapping.setKeyName( alias ); } HBaseValueMetaInterface vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( null, null, alias, 0, -1, -1 ); vm.setKey( true ); try { theMapping.setKeyTypeAsString( type ); vm.setType( HBaseInput.getKettleTypeByKeyType( theMapping.getKeyType() ) ); if ( includeKeyToColumns ) { theMapping.addMappedColumn( vm, isTupleMapping ); } } catch ( Exception ex ) { // Ignore } } else { ByteConversionUtil byteConversionUtil = hBaseService.getByteConversionUtil(); // don't bother adding if there are any errors if ( missingFamilies.size() == 0 && missingColumnNames.size() == 0 && missingTypes.size() == 0 ) { // Set the alias name to the column name if no alias value is detected if ( Utils.isEmpty( alias ) ) { alias = colName; item.setText( 1, colName ); } HBaseValueMetaInterface vm = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family, colName, alias, 0, -1, -1 ); try { vm.setHBaseTypeFromString( type ); } catch ( IllegalArgumentException e ) { // TODO pop up an error dialog for this one return null; } if ( vm.isString() && indexedVals != null && indexedVals.length() > 0 ) { Object[] vals = byteConversionUtil.stringIndexListToObjects( indexedVals ); vm.setIndex( vals ); vm.setStorageType( ValueMetaInterface.STORAGE_TYPE_INDEXED ); } try { theMapping.addMappedColumn( vm, isTupleMapping ); } catch ( Exception ex ) { // popup an error if this family:column is already in the mapping // and // then return. if ( performChecksAndShowGUIErrorDialog ) { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.DuplicateColumn" ), Messages.getString( "MappingDialog.Error.Message1.DuplicateColumn" ) + family + "," + colName + Messages.getString( "MappingDialog.Error.Message2.DuplicateColumn" ) ); } if ( problems != null ) { problems.add( Messages.getString( "MappingDialog.Error.Message1.DuplicateColumn" ) + family + "," + colName + Messages.getString( "MappingDialog.Error.Message2.DuplicateColumn" ) ); } return null; } } } } // now check for any errors in our Lists if ( !keyDefined ) { if ( performChecksAndShowGUIErrorDialog ) { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.NoKeyDefined" ), Messages .getString( "MappingDialog.Error.Message.NoKeyDefined" ) ); } if ( problems != null ) { problems.add( Messages.getString( "MappingDialog.Error.Message.NoKeyDefined" ) ); } return null; } if ( missingFamilies.size() > 0 || missingColumnNames.size() > 0 || missingTypes.size() > 0 ) { StringBuffer buff = new StringBuffer(); buff.append( Messages.getString( "MappingDialog.Error.Message.IssuesPreventingSaving" ) + ":\n\n" ); if ( missingFamilies.size() > 0 ) { buff.append( Messages.getString( "MappingDialog.Error.Message.FamilyIssue" ) + ":\n" ); buff.append( missingFamilies.toString() ).append( "\n\n" ); } if ( missingColumnNames.size() > 0 ) { buff.append( Messages.getString( "MappingDialog.Error.Message.ColumnIssue" ) + ":\n" ); buff.append( missingColumnNames.toString() ).append( "\n\n" ); } if ( missingTypes.size() > 0 ) { buff.append( Messages.getString( "MappingDialog.Error.Message.TypeIssue" ) + ":\n" ); buff.append( missingTypes.toString() ).append( "\n\n" ); } if ( performChecksAndShowGUIErrorDialog ) { MessageDialog.openError( m_shell, Messages.getString( "MappingDialog.Error.Title.IssuesPreventingSaving" ), buff .toString() ); } if ( problems != null ) { problems.add( buff.toString() ); } return null; } return theMapping; } private void saveMapping() { if ( namedClusterWidget != null && namedClusterWidget.getSelectedNamedCluster() == null ) { MessageDialog.openError( m_shell, BaseMessages.getString( PKG, "MappingDialog.Error.Title.NamedClusterNotSelected" ), BaseMessages.getString( PKG, "MappingDialog.Error.Message.NamedClusterNotSelected.Msg" ) ); return; } Mapping theMapping = getMapping( true, null, false ); if ( theMapping == null ) { // some problem with the mapping (user will have been informed via dialog) return; } if ( notInitializedMappingAdmin() ) { try { m_admin = MappingUtils.getMappingAdmin( m_configProducer ); } catch ( HBaseConnectionException e ) { showConnectionErrorDialog( e ); return; } } String tableName = theMapping.getTableName(); if ( m_allowTableCreate ) { // check for existence of the table. If table doesn't exist // prompt for creation HBaseConnection hbAdmin = m_admin.getConnection(); try { if ( !hbAdmin.getTable( tableName ).exists() ) { boolean result = MessageDialog.openConfirm( m_shell, "Create table", "Table \"" + tableName + "\" does not exist. Create it?" ); if ( !result ) { return; } if ( theMapping.getMappedColumns().size() == 0 ) { MessageDialog.openError( m_shell, "No columns defined", "A HBase table requires at least one column family to be defined." ); return; } // collect up all the column families so that we can create the table Set cols = theMapping.getMappedColumns().keySet(); Set families = new TreeSet(); for ( String col : cols ) { String family = theMapping.getMappedColumns().get( col ).getColumnFamily(); families.add( family ); } // do we have additional parameters supplied in the table name field // String compression = Compression.Algorithm.NONE.getName(); String compression = null; // String bloomFilter = "NONE"; String bloomFilter = null; String[] opts = m_existingTableNamesCombo.getText().trim().split( "@" ); if ( opts.length > 1 ) { compression = opts[ 1 ]; if ( opts.length == 3 ) { bloomFilter = opts[ 2 ]; } } Properties creationProps = new Properties(); if ( compression != null ) { creationProps.setProperty( HBaseConnection.COL_DESCRIPTOR_COMPRESSION_KEY, compression ); } if ( bloomFilter != null ) { creationProps.setProperty( HBaseConnection.COL_DESCRIPTOR_BLOOM_FILTER_KEY, bloomFilter ); } List familyList = new ArrayList(); for ( String fam : families ) { familyList.add( fam ); } // create the table hbAdmin.getTable( tableName ).create( familyList, creationProps ); // refresh the table combo populateTableCombo( true ); } } catch ( Exception ex ) { new ErrorDialog( m_shell, Messages.getString( "MappingDialog.Error.Title.ErrorCreatingTable" ), Messages .getString( "MappingDialog.Error.Message.ErrorCreatingTable" ) + " \"" + m_existingTableNamesCombo.getText() .trim() + "\"", ex ); return; } } try { // now check to see if the mapping exists if ( m_admin.mappingExists( tableName, m_existingMappingNamesCombo.getText().trim() ) ) { // prompt for overwrite boolean result = MessageDialog.openConfirm( m_shell, Messages.getString( "MappingDialog.Info.Title.MappingExists" ), Messages .getString( "MappingDialog.Info.Message1.MappingExists" ) + m_existingMappingNamesCombo.getText().trim() + Messages.getString( "MappingDialog.Info.Message2.MappingExists" ) + tableName + Messages.getString( "MappingDialog.Info.Message3.MappingExists" ) ); if ( !result ) { return; } } // finally add the mapping. m_admin.putMapping( theMapping, true ); MessageDialog.openConfirm( m_shell, Messages.getString( "MappingDialog.Info.Title.MappingSaved" ), Messages .getString( "MappingDialog.Info.Message1.MappingSaved" ) + m_existingMappingNamesCombo.getText().trim() + Messages.getString( "MappingDialog.Info.Message2.MappingSaved" ) + tableName + Messages.getString( "MappingDialog.Info.Message3.MappingSaved" ) ); } catch ( Exception ex ) { // inform the user via popup new ErrorDialog( m_shell, Messages.getString( "MappingDialog.Error.Title.ErrorSaving" ), Messages.getString( "MappingDialog.Error.Message.ErrorSaving" ), ex ); } } public void setMapping( Mapping mapping ) { if ( mapping == null ) { return; } if ( !Utils.isEmpty( mapping.getTableName() ) ) { m_existingTableNamesCombo.setText( mapping.getTableName() ); } if ( !Utils.isEmpty( mapping.getMappingName() ) ) { m_existingMappingNamesCombo.setText( mapping.getMappingName() ); } m_fieldsView.clearAll(); // do the key first TableItem keyItem = new TableItem( m_fieldsView.table, SWT.NONE ); if ( !Utils.isEmpty( mapping.getKeyName() ) ) { keyItem.setText( 1, mapping.getKeyName() ); } keyItem.setText( 2, "Y" ); if ( mapping.getKeyType() != null && !Utils.isEmpty( mapping.getKeyType().toString() ) ) { keyItem.setText( 5, mapping.getKeyType().toString() ); } if ( mapping.isTupleMapping() && !Utils.isEmpty( mapping.getTupleFamilies() ) ) { keyItem.setText( 3, mapping.getTupleFamilies() ); } // the rest of the fields in the mapping Map mappedFields = mapping.getMappedColumns(); for ( String alias : mappedFields.keySet() ) { HBaseValueMetaInterface vm = mappedFields.get( alias ); TableItem item = new TableItem( m_fieldsView.table, SWT.NONE ); item.setText( 1, alias ); item.setText( 2, "N" ); item.setText( 3, vm.getColumnFamily() ); item.setText( 4, vm.getColumnName() ); if ( vm.isInteger() ) { if ( vm.getIsLongOrDouble() ) { item.setText( 5, "Long" ); } else { item.setText( 5, "Integer" ); } } else if ( vm.isNumber() ) { if ( vm.getIsLongOrDouble() ) { item.setText( 5, "Double" ); } else { item.setText( 5, "Float" ); } } else { item.setText( 5, vm.getTypeDesc() ); } if ( vm.getStorageType() == ValueMetaInterface.STORAGE_TYPE_INDEXED ) { item.setText( 6, m_admin.getConnection().getByteConversionUtil().objectIndexValuesToString( vm .getIndex() ) ); } } m_fieldsView.removeEmptyRows(); m_fieldsView.setRowNums(); m_fieldsView.optWidth( true ); } private void loadTableViewFromMapping() { String tableName = ""; if ( !Utils.isEmpty( m_existingTableNamesCombo.getText().trim() ) ) { tableName = m_existingTableNamesCombo.getText().trim(); if ( tableName.indexOf( '@' ) > 0 ) { tableName = tableName.substring( 0, tableName.indexOf( '@' ) ); } } try { if ( m_admin.mappingExists( tableName, m_existingMappingNamesCombo.getText().trim() ) ) { Mapping mapping = m_admin.getMapping( tableName, m_existingMappingNamesCombo.getText().trim() ); setMapping( mapping ); } } catch ( Exception ex ) { // inform the user via popup new ErrorDialog( m_shell, Messages.getString( "MappingDialog.Error.Title.ErrorLoadingMapping" ), Messages .getString( "MappingDialog.Error.Message.ErrorLoadingMapping" ), ex ); } } private void populateMappingComboAndFamilyStuff() { String tableName = ""; if ( !Utils.isEmpty( m_existingTableNamesCombo.getText().trim() ) ) { tableName = m_existingTableNamesCombo.getText().trim(); if ( tableName.indexOf( '@' ) > 0 ) { tableName = tableName.substring( 0, tableName.indexOf( '@' ) ); } } // defaults if we fail to connect, table doesn't exist etc.. m_familyCI.setComboValues( new String[] { "" } ); m_existingMappingNamesCombo.removeAll(); if ( m_admin != null && !Utils.isEmpty( tableName ) ) { try { // first get the existing mapping names (if any) List mappingNames = m_admin.getMappingNames( tableName ); for ( String m : mappingNames ) { m_existingMappingNamesCombo.add( m ); } // now get family information for this table HBaseConnection hbAdmin = m_admin.getConnection(); HBaseTable hBaseTable = hbAdmin.getTable( tableName ); if ( !HbaseUtil.parseQualifierFromTableName( tableName ).isEmpty() && hBaseTable.exists() ) { List colFams = hBaseTable.getColumnFamilies(); String[] familyNames = colFams.toArray( new String[ 1 ] ); m_familyCI.setComboValues( familyNames ); } else { m_familyCI.setComboValues( new String[] { "" } ); } m_familiesInvalidated = false; return; } catch ( Exception e ) { showConnectionErrorDialog( e ); } } } @Override public HBaseService getHBaseService() throws ClusterInitializationException { NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); return namedClusterServiceLocator.getService( nc, HBaseService.class ); } public HBaseConnection getHBaseConnection() throws IOException, ClusterInitializationException { return getHBaseService().getHBaseConnection( m_transMeta, null, null, null ); } public String getCurrentConfiguration() { String host = ""; String port = ""; NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); if ( nc != null ) { host = m_transMeta.environmentSubstitute( nc.getZooKeeperHost() ); port = m_transMeta.environmentSubstitute( nc.getZooKeeperPort() ); } return host + ":" + port; } @Override public void dispose() { // TODO Auto-generated method stub super.dispose(); } /** * @param name */ public void setSelectedNamedCluster( String name ) { namedClusterWidget.setSelectedNamedCluster( name ); } /** * @return */ public NamedCluster getSelectedNamedCluster() { return namedClusterWidget.getSelectedNamedCluster(); } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/mapping/MappingUtils.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.mapping; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.Set; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.big.data.kettle.plugins.hbase.HBaseConnectionException; import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition; import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition.MappingColumn; import org.pentaho.big.data.kettle.plugins.hbase.input.HBaseInput; import org.pentaho.big.data.kettle.plugins.hbase.input.Messages; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterfaceFactory; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.variables.VariableSpace; public class MappingUtils { public static final int TUPLE_COLUMNS_COUNT = 5; public static final int UNDEFINED_VALUE = -1; private static final Set TUPLE_COLUMNS = new HashSet(); public static final String TUPLE_MAPPING_VISIBILITY = "Visibility"; static { TUPLE_COLUMNS.add( Mapping.TupleMapping.KEY.toString() ); TUPLE_COLUMNS.add( Mapping.TupleMapping.FAMILY.toString() ); TUPLE_COLUMNS.add( Mapping.TupleMapping.COLUMN.toString() ); TUPLE_COLUMNS.add( Mapping.TupleMapping.VALUE.toString() ); TUPLE_COLUMNS.add( Mapping.TupleMapping.TIMESTAMP.toString() ); } public static MappingAdmin getMappingAdmin( ConfigurationProducer cProducer ) throws HBaseConnectionException { HBaseConnection hbConnection = null; try { hbConnection = cProducer.getHBaseConnection(); hbConnection.checkHBaseAvailable(); return new MappingAdmin( hbConnection ); } catch ( ClusterInitializationException | IOException e ) { throw new HBaseConnectionException( Messages.getString( "MappingDialog.Error.Message.UnableToConnect" ), e ); } } public static MappingAdmin getMappingAdmin( HBaseService hBaseService, VariableSpace variableSpace, String siteConfig, String defaultConfig ) throws IOException { HBaseConnection hBaseConnection = hBaseService.getHBaseConnection( variableSpace, siteConfig, defaultConfig, null ); return new MappingAdmin( hBaseConnection ); } public static Mapping getMapping( MappingDefinition mappingDefinition, HBaseService hBaseService ) throws KettleException { final String tableName = mappingDefinition.getTableName(); // empty table name or mapping name does not force an abort if ( Const.isEmpty( tableName ) || Const.isEmpty( mappingDefinition.getMappingName() ) ) { throw new KettleException( Messages.getString( "MappingDialog.Error.Message.MissingTableMappingName" ) ); } // do we have any non-empty mapping definition? if ( mappingDefinition.getMappingColumns() == null || mappingDefinition.getMappingColumns().isEmpty() ) { throw new KettleException( Messages.getString( "MappingDialog.Error.Message.NoFieldsDefined" ) ); } Mapping theMapping = hBaseService.getMappingFactory().createMapping( tableName, mappingDefinition.getMappingName() ); // is the mapping a tuple mapping? final boolean isTupleMapping = isTupleMapping( mappingDefinition ); if ( isTupleMapping ) { theMapping.setTupleMapping( true ); } List mappingColumns = mappingDefinition.getMappingColumns(); // think about more specific identifier then a row number int columnNumber = 0; boolean keyDefined = false; for ( MappingColumn column : mappingColumns ) { columnNumber++; final String alias = column.getAlias(); final boolean isKey = column.isKey(); if ( isKey ) { if ( keyDefined ) { throw new KettleException( Messages.getString( "MappingDialog.Error.Message.MoreThanOneKey" ) ); } keyDefined = true; } String family = null; if ( !Const.isEmpty( column.getColumnFamily() ) ) { family = column.getColumnFamily(); } else if ( !isKey && !isTupleMapping ) { throw new KettleException( Messages.getString( "MappingDialog.Error.Message.FamilyIssue" ) + ": " + columnNumber ); } String colName = null; if ( !Const.isEmpty( column.getColumnName() ) ) { colName = column.getColumnName(); } else if ( !isKey && !isTupleMapping ) { throw new KettleException( Messages.getString( "MappingDialog.Error.Message.ColumnIssue" ) + ": " + columnNumber ); } String type = null; if ( !Const.isEmpty( column.getType() ) ) { type = column.getType(); } else { throw new KettleException( Messages.getString( "MappingDialog.Error.Message.TypeIssue" ) + ": " + columnNumber ); } HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = hBaseService.getHBaseValueMetaInterfaceFactory(); if ( isKey ) { if ( Const.isEmpty( alias ) ) { throw new KettleException( Messages.getString( "MappingDialog.Error.Message.NoAliasForKey" ) ); } if ( isTupleMapping ) { theMapping.setKeyName( alias ); theMapping.setTupleFamilies( family ); } else { theMapping.setKeyName( alias ); } HBaseValueMetaInterface valueMeta = valueMetaInterfaceFactory.createHBaseValueMetaInterface( null, null, alias, 0, UNDEFINED_VALUE, UNDEFINED_VALUE ); valueMeta.setKey( true ); try { theMapping.setKeyTypeAsString( type ); valueMeta.setType( HBaseInput.getKettleTypeByKeyType( theMapping.getKeyType() ) ); } catch ( Exception ex ) { // Ignore } } else { try { HBaseValueMetaInterface valueMeta = buildNonKeyValueMeta( alias, family, colName, type, column.getIndexedValues(), hBaseService ); theMapping.addMappedColumn( valueMeta, isTupleMapping ); } catch ( Exception ex ) { String message = Messages.getString( "MappingDialog.Error.Message1.DuplicateColumn" ) + family + "," + colName + Messages .getString( "MappingDialog.Error.Message2.DuplicateColumn" ); throw new KettleException( message ); } } } if ( !keyDefined ) { throw new KettleException( Messages.getString( "MappingDialog.Error.Message.NoKeyDefined" ) ); } return theMapping; } public static HBaseValueMetaInterface buildNonKeyValueMeta( String alias, String family, String columnName, String type, String indexedVals, HBaseService hBaseService ) throws KettleException { HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = hBaseService.getHBaseValueMetaInterfaceFactory(); HBaseValueMetaInterface valueMeta = valueMetaInterfaceFactory.createHBaseValueMetaInterface( family, columnName, alias, 0, UNDEFINED_VALUE, UNDEFINED_VALUE ); try { valueMeta.setHBaseTypeFromString( type ); if ( valueMeta.isString() && !Const.isEmpty( indexedVals ) ) { ByteConversionUtil byteConversionUtil = hBaseService.getByteConversionUtil(); Object[] vals = byteConversionUtil.stringIndexListToObjects( indexedVals ); valueMeta.setIndex( vals ); valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_INDEXED ); } return valueMeta; } catch ( IllegalArgumentException e ) { throw new KettleException( e ); } } public static boolean isTupleMapping( MappingDefinition mappingDefinition ) { List mappingColumns = mappingDefinition.getMappingColumns(); int mappingSize = mappingColumns.size(); if ( !( mappingSize == TUPLE_COLUMNS_COUNT || mappingSize == TUPLE_COLUMNS_COUNT + 1 ) ) { return false; } int tupleIdCount = 0; for ( MappingColumn column : mappingColumns ) { if ( isTupleMappingColumn( column.getAlias() ) ) { tupleIdCount++; } } return tupleIdCount == TUPLE_COLUMNS_COUNT || tupleIdCount == TUPLE_COLUMNS_COUNT + 1; } public static boolean isTupleMappingColumn( String columnName ) { return TUPLE_COLUMNS.contains( columnName ) || columnName.equals( TUPLE_MAPPING_VISIBILITY ); } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/output/HBaseOutput.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.output; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingAdmin; import org.pentaho.big.data.kettle.plugins.hbase.output.KettleRowToHBaseTuple.FieldException; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.table.HBaseDelete; import org.pentaho.hadoop.shim.api.hbase.table.HBasePut; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTable; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTableWriteOperationManager; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Class providing an output step for writing data to an HBase table according to meta data column/type mapping info * stored in a separate HBase table called "pentaho_mappings". See org.pentaho.hbase.mapping.Mapping for details on the * meta data format. * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ public class HBaseOutput extends BaseStep implements StepInterface { protected HBaseOutputMeta m_meta; protected HBaseOutputData m_data; private final NamedClusterServiceLocator namedClusterServiceLocator; private HBaseService hBaseService; private HBaseTableWriteOperationManager targetTableWriteOperationManager; public HBaseOutput( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans, NamedClusterServiceLocator namedClusterServiceLocator ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); this.namedClusterServiceLocator = namedClusterServiceLocator; } /** Configuration object for connecting to HBase */ protected HBaseConnection m_hbAdmin; /** Byte utilities */ protected ByteConversionUtil m_bytesUtil; /** The mapping admin object for interacting with mapping information */ protected MappingAdmin m_mappingAdmin; /** The mapping information to use in order to decode HBase column values */ protected Mapping m_tableMapping; /** Information from the mapping */ protected Map m_columnsMappedByAlias; /** True if the target table has been connected to successfully */ protected HBaseTable targetTable; /** Index of the key in the incoming fields */ protected int m_incomingKeyIndex; /** The ValueMetaInterface of the incoming key field */ protected ValueMetaInterface m_incomingKeyValueMeta; /** Object used when a tuple is supplied as the incoming fields */ protected KettleRowToHBaseTuple tupleRowConverter; @Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { Object[] r = getRow(); if ( r == null ) { // no more input // clean up/close connections etc. // target table will be null if we haven't seen any input if ( targetTable != null ) { if ( targetTableWriteOperationManager != null ) { try { if ( !targetTableWriteOperationManager.isAutoFlush() ) { logBasic( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.FlushingWriteBuffer" ) ); targetTableWriteOperationManager.flushCommits(); } } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.ProblemFlushingBufferedData", ex.getMessage() ), ex ); } finally { try { targetTableWriteOperationManager.close(); } catch ( IOException e ) { // Ignore } } } try { targetTable.close(); } catch ( IOException e ) { // Ignore } try { logBasic( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.ClosingConnectionToTable" ) ); targetTable = null; m_hbAdmin.close(); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.ProblemWhenClosingConnection", ex.getMessage() ), ex ); } } setOutputDone(); return false; } if ( first ) { first = false; m_meta = (HBaseOutputMeta) smi; m_data = (HBaseOutputData) sdi; // Get the connection to HBase try { logBasic( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.ConnectingToHBase" ) ); List connectionMessages = new ArrayList(); hBaseService = namedClusterServiceLocator.getService( m_meta.getNamedCluster(), HBaseService.class ); m_hbAdmin = hBaseService.getHBaseConnection( this, environmentSubstitute( m_meta.getCoreConfigURL() ), environmentSubstitute( m_meta.getDefaultConfigURL() ), log ); m_bytesUtil = hBaseService.getByteConversionUtil(); if ( connectionMessages.size() > 0 ) { for ( String m : connectionMessages ) { logBasic( m ); } } } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.UnableToObtainConnection", ex.getMessage() ), ex ); } try { m_mappingAdmin = new MappingAdmin( m_hbAdmin ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.UnableToObtainConnection", ex.getMessage() ), ex ); } // check on the existence and readiness of the target table String targetName = environmentSubstitute( m_meta.getTargetTableName() ); if ( Utils.isEmpty( targetName ) ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.NoTargetTableSpecified" ) ); } try { targetTable = m_hbAdmin.getTable( targetName ); if ( !targetTable.exists() ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.TargetTableDoesNotExist", targetName ) ); } if ( targetTable.disabled() || !targetTable.available() ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.TargetTableIsNotAvailable", targetName ) ); } } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.ProblemWhenCheckingAvailReadiness", targetName, ex.getMessage() ), ex ); } // Get mapping details for the target table if ( m_meta.getMapping() != null && Utils.isEmpty( m_meta.getTargetMappingName() ) ) { m_tableMapping = m_meta.getMapping(); } else { try { logBasic( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.RetrievingMappingDetails" ) ); m_tableMapping = m_mappingAdmin.getMapping( environmentSubstitute( m_meta.getTargetTableName() ), environmentSubstitute( m_meta.getTargetMappingName() ) ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.ProblemGettingMappingInfo", ex.getMessage() ), ex ); } } m_columnsMappedByAlias = m_tableMapping.getMappedColumns(); if ( !m_meta.m_deleteRowKey && m_tableMapping.isTupleMapping() ) { /* * We are not executing a delete and the mapping is a tuple mapping * Deletes need to go through the other branch of code to decode the incoming key field index */ try { tupleRowConverter = new KettleRowToHBaseTuple( getInputRowMeta(), m_tableMapping, m_columnsMappedByAlias ); } catch ( Exception e ) { throw new KettleException( e ); } } else { // check that all incoming fields are in the mapping. // fewer fields than the mapping defines is OK as long as we have // the key as an incoming field. Can either use strict type checking // or use an error stream for rows where type-conversion to the mapping // types fail. Probably should use an error stream - e.g. could get rows // with negative numeric key value where mapping specifies an unsigned key boolean incomingKey = false; RowMetaInterface inMeta = getInputRowMeta(); for ( int i = 0; i < inMeta.size(); i++ ) { ValueMetaInterface vm = inMeta.getValueMeta( i ); String inName = vm.getName(); if ( m_tableMapping.getKeyName().equals( inName ) ) { incomingKey = true; m_incomingKeyIndex = i; m_incomingKeyValueMeta = vm; // should we check the type? } else { HBaseValueMetaInterface hvm = m_columnsMappedByAlias.get( inName.trim() ); if ( hvm == null && !m_meta.getDeleteRowKey() ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.CantFindIncomingField", inName, m_tableMapping.getMappingName() ) ); } } } if ( !incomingKey ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.TableKeyNotPresentInIncomingFields", m_tableMapping.getKeyName(), m_tableMapping .getMappingName() ) ); } } try { logBasic( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.ConnectingToTargetTable" ) ); // set a write buffer size (and disable auto flush) Long writeBufferSize = null; if ( !Utils.isEmpty( m_meta.getWriteBufferSize() ) ) { writeBufferSize = Long.parseLong( environmentSubstitute( m_meta.getWriteBufferSize() ) ); logBasic( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.SettingWriteBuffer", writeBufferSize ) ); if ( m_meta.getDisableWriteToWAL() ) { logBasic( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.DisablingWriteToWAL" ) ); } } targetTableWriteOperationManager = targetTable.createWriteOperationManager( writeBufferSize ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.ProblemConnectingToTargetTable", e.getMessage() ), e ); } // output (downstream) is the same as input m_data.setOutputRowMeta( getInputRowMeta() ); } if ( m_meta.getDeleteRowKey() ) { try { if ( m_incomingKeyValueMeta.isNull( r[m_incomingKeyIndex] ) ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.IncomingRowHasNullKeyValue" ) ); } byte[] encodedKeyBytes = m_bytesUtil.encodeKeyValue( r[m_incomingKeyIndex], m_incomingKeyValueMeta, m_tableMapping.getKeyType() ); HBaseDelete hBaseDelete = targetTableWriteOperationManager.createDelete( encodedKeyBytes ); hBaseDelete.execute(); } catch ( Exception ex ) { if ( getStepMeta().isDoingErrorHandling() ) { String errorDescriptions = ""; if ( !Utils.isEmpty( ex.getMessage() ) ) { errorDescriptions = ex.getMessage(); } else { errorDescriptions = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.ErrorCreatingDelete" ); } putError( getInputRowMeta(), r, 1, errorDescriptions, m_tableMapping.getKeyName(), "HBaseOutput004" ); return true; } else { throw new KettleException( ex ); } } } else { // Put the data HBasePut hBasePut; if ( tupleRowConverter != null ) { try { hBasePut = tupleRowConverter.createTuplePut( targetTableWriteOperationManager, m_bytesUtil, r, !m_meta .getDisableWriteToWAL() ); } catch ( Exception ex ) { if ( getStepMeta().isDoingErrorHandling() ) { String errorDescriptions = ""; String errorFields = "Unknown"; if ( ex instanceof FieldException ) { errorFields = ( (FieldException) ex ).getFieldString(); errorDescriptions = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.MissingFieldData", errorFields ); } else if ( !Utils.isEmpty( ex.getMessage() ) ) { errorDescriptions = ex.getMessage(); } else { errorDescriptions = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.ErrorCreatingPut" ); } putError( getInputRowMeta(), r, 1, errorDescriptions, errorFields, "HBaseOutput003" ); return true; } else { throw new KettleException( ex ); } } } else { try { // key must not be null hBasePut = HBaseOutputData.initializeNewPut( getInputRowMeta(), m_incomingKeyIndex, r, m_tableMapping, m_bytesUtil, targetTableWriteOperationManager, !m_meta.getDisableWriteToWAL() ); if ( hBasePut == null ) { String errorDescriptions = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.IncomingRowHasNullKeyValue" ); if ( getStepMeta().isDoingErrorHandling() ) { String errorFields = m_tableMapping.getKeyName(); putError( getInputRowMeta(), r, 1, errorDescriptions, errorFields, "HBaseOutput001" ); return true; } else { throw new KettleException( errorDescriptions ); } } } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.UnableToSetTargetTable" ), ex ); } // now encode the rest of the fields. Nulls do not get inserted of course HBaseOutputData.addColumnsToPut( getInputRowMeta(), r, m_incomingKeyIndex, m_columnsMappedByAlias, hBasePut, m_bytesUtil ); } try { hBasePut.execute(); } catch ( Exception e ) { String errorDescriptions = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.ProblemInsertingRowIntoHBase", e .getMessage() ); if ( getStepMeta().isDoingErrorHandling() ) { String errorFields = "Unknown"; putError( getInputRowMeta(), r, 1, errorDescriptions, errorFields, "HBaseOutput002" ); } else { throw new KettleException( errorDescriptions, e ); } } } // pass on the data to any downstream steps putRow( m_data.getOutputRowMeta(), r ); if ( log.isRowLevel() ) { log.logRowlevel( toString(), "Read row #" + getLinesRead() + " : " + r ); } if ( checkFeedback( getLinesRead() ) ) { logBasic( "Linenr " + getLinesRead() ); } return true; } @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { if ( super.init( smi, sdi ) ) { HBaseOutputMeta meta = (HBaseOutputMeta) smi; try { // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS if ( getTransMeta().getNamedClusterEmbedManager() != null ) { getTransMeta().getNamedClusterEmbedManager().passEmbeddedMetastoreKey( getTransMeta(), getTransMeta().getEmbeddedMetastoreProviderKey() ); } meta.applyInjection( this ); return true; } catch ( KettleException e ) { logError( "Error while injecting properties", e ); } } return false; } @Override public void setStopped( boolean stopped ) { if ( isStopped() && stopped == true ) { return; } super.setStopped( stopped ); if ( stopped ) { if ( targetTable != null ) { try { if ( !targetTableWriteOperationManager.isAutoFlush() ) { logBasic( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.FlushingWriteBuffer" ) ); targetTableWriteOperationManager.flushCommits(); } } catch ( Exception ex ) { logError( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.ProblemFlushingBufferedData", ex .getMessage() ), ex ); } } if ( m_hbAdmin != null ) { try { logBasic( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.ClosingConnectionToTable" ) ); m_hbAdmin.close(); } catch ( Exception ex ) { logError( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.ProblemWhenClosingConnection", ex .getMessage() ), ex ); } } } } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/output/HBaseOutputData.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.output; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.table.HBasePut; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTableWriteOperationManager; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.step.BaseStepData; import org.pentaho.di.trans.step.StepDataInterface; import java.net.MalformedURLException; import java.net.URL; import java.util.Map; /** * Class providing an output step for writing data to an HBase table according to meta data column/type mapping info * stored in a separate HBase table called "pentaho_mappings". See org.pentaho.hbase.mapping.Mapping for details on the * meta data format. * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ public class HBaseOutputData extends BaseStepData implements StepDataInterface { /** The output data format */ protected RowMetaInterface m_outputRowMeta; public RowMetaInterface getOutputRowMeta() { return m_outputRowMeta; } public void setOutputRowMeta( RowMetaInterface rmi ) { m_outputRowMeta = rmi; } /** * Sets up a new target table put operation using the connection shim * * @param inRowMeta * the incoming kettle row meta data * @param keyIndex * the index of the key in the incoming row structure * @param kettleRow * the current incoming kettle row * @param tableMapping * the HBase table mapping to use * @param bu * the byte util shim to use for conversion to and from byte arrays * @param hbAdmin * the connection shim * @param writeToWAL * true if the write ahead log should be written to * @return false if the key is null (missing) for the current incoming kettle row * @throws Exception * if a problem occurs when initializing the new put operation */ public static HBasePut initializeNewPut( RowMetaInterface inRowMeta, int keyIndex, Object[] kettleRow, Mapping tableMapping, ByteConversionUtil bu, HBaseTableWriteOperationManager hBaseTableWriteOperationManager, boolean writeToWAL ) throws Exception { ValueMetaInterface keyvm = inRowMeta.getValueMeta( keyIndex ); if ( keyvm.isNull( kettleRow[keyIndex] ) ) { return null; } byte[] encodedKey = bu.encodeKeyValue( kettleRow[keyIndex], keyvm, tableMapping.getKeyType() ); HBasePut hBaseTablePut = hBaseTableWriteOperationManager.createPut( encodedKey ); hBaseTablePut.setWriteToWAL( writeToWAL ); return hBaseTablePut; } /** * Adds those incoming kettle field values that are defined in the table mapping for the current row to the target * table put operation * * @param inRowMeta * the incoming kettle row meta data * @param kettleRow * the current incoming kettle row * @param keyIndex * the index of the key in the incoming row structure * @param columnsMappedByAlias * the columns in the table mapping * @param hbAdmin * the connection shim * @param bu * the byte util shim to use for conversion to and from byte arrays * @throws KettleException * if a problem occurs when adding a column to the put operation */ public static void addColumnsToPut( RowMetaInterface inRowMeta, Object[] kettleRow, int keyIndex, Map columnsMappedByAlias, HBasePut hBasePut, ByteConversionUtil bu ) throws KettleException { for ( int i = 0; i < inRowMeta.size(); i++ ) { ValueMetaInterface current = inRowMeta.getValueMeta( i ); if ( i != keyIndex && !current.isNull( kettleRow[i] ) ) { HBaseValueMetaInterface hbaseColMeta = columnsMappedByAlias.get( current.getName() ); String columnFamily = hbaseColMeta.getColumnFamily(); String columnName = hbaseColMeta.getColumnName(); boolean binaryColName = false; if ( columnName.startsWith( "@@@binary@@@" ) ) { // assume hex encoded column name columnName = columnName.replace( "@@@binary@@@", "" ); binaryColName = true; } byte[] encoded = hbaseColMeta.encodeColumnValue( kettleRow[i], current ); try { hBasePut.addColumn( columnFamily, columnName, binaryColName, encoded ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.UnableToAddColumnToTargetTablePut" ), ex ); } } } } public static URL stringToURL( String pathOrURL ) throws MalformedURLException { URL result = null; if ( !Const.isEmpty( pathOrURL ) ) { if ( pathOrURL.toLowerCase().startsWith( "http://" ) || pathOrURL.toLowerCase().startsWith( "file://" ) ) { result = new URL( pathOrURL ); } else { String c = "file://" + pathOrURL; result = new URL( c ); } } return result; } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/output/HBaseOutputDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.output; import org.apache.commons.lang.StringUtils; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.Cursor; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.big.data.kettle.plugins.hbase.HbaseUtil; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.big.data.kettle.plugins.hbase.ServiceStatus; import org.pentaho.big.data.kettle.plugins.hbase.mapping.ConfigurationProducer; import org.pentaho.big.data.kettle.plugins.hbase.mapping.FieldProducer; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingAdmin; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingEditor; import org.pentaho.big.data.plugins.common.ui.NamedClusterWidgetImpl; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; /** * Dialog class for HBaseOutput * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ @PluginDialog( id = "HBaseOutput", image = "HBO.svg", pluginType = PluginDialog.PluginType.JOBENTRY, documentationUrl = "Products/HBase_Output" ) public class HBaseOutputDialog extends BaseStepDialog implements StepDialogInterface, ConfigurationProducer, FieldProducer { private final HBaseOutputMeta m_currentMeta; private final HBaseOutputMeta m_originalMeta; private final HBaseOutputMeta m_configurationMeta; /** * various UI bits and pieces for the dialog */ private Label m_stepnameLabel; private Text m_stepnameText; // The tabs of the dialog private CTabFolder m_wTabFolder; private CTabItem m_wConfigTab; private CTabItem m_editorTab; NamedClusterWidgetImpl namedClusterWidget; // Core config line private Button m_coreConfigBut; private TextVar m_coreConfigText; // Default config line private Button m_defaultConfigBut; private TextVar m_defaultConfigText; // Table name line private Button m_mappedTableNamesBut; private CCombo m_mappedTableNamesCombo; // Mapping name line private Button m_mappingNamesBut; private CCombo m_mappingNamesCombo; //Delete row key line private Button m_deleteRowKeyBut; /** Store the mapping information in the step's meta data */ private Button m_storeMappingInStepMetaData; // Disable write to WAL check box private Button m_disableWriteToWALBut; // Write buffer size line private TextVar m_writeBufferSizeText; // mapping editor composite private MappingEditor m_mappingEditor; private NamedClusterService namedClusterService; private RuntimeTestActionService runtimeTestActionService; private RuntimeTester runtimeTester; private NamedClusterServiceLocator namedClusterServiceLocator; public HBaseOutputDialog( Shell parent, Object in, TransMeta tr, String name ) { super( parent, (BaseStepMeta) in, tr, name ); m_currentMeta = (HBaseOutputMeta) in; m_originalMeta = (HBaseOutputMeta) m_currentMeta.clone(); m_configurationMeta = (HBaseOutputMeta) m_currentMeta.clone(); namedClusterService = m_currentMeta.getNamedClusterService(); runtimeTestActionService = m_currentMeta.getRuntimeTestActionService(); runtimeTester = m_currentMeta.getRuntimeTester(); namedClusterServiceLocator = m_currentMeta.getNamedClusterServiceLocator(); } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX ); props.setLook( shell ); setShellImage( shell, m_currentMeta ); // used to listen to a text field (m_wStepname) ModifyListener lsMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); } }; changed = m_currentMeta.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.Shell.Title" ) ); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line m_stepnameLabel = new Label( shell, SWT.RIGHT ); m_stepnameLabel.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.StepName.Label" ) ); props.setLook( m_stepnameLabel ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( middle, -margin ); fd.top = new FormAttachment( 0, margin ); m_stepnameLabel.setLayoutData( fd ); m_stepnameText = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); m_stepnameText.setText( stepname ); props.setLook( m_stepnameText ); m_stepnameText.addModifyListener( lsMod ); // format the text field fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, 0 ); m_stepnameText.setLayoutData( fd ); m_wTabFolder = new CTabFolder( shell, SWT.BORDER ); props.setLook( m_wTabFolder, Props.WIDGET_STYLE_TAB ); m_wTabFolder.setSimple( false ); // Start of the config tab m_wConfigTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_wConfigTab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.ConfigTab.TabTitle" ) ); Composite wConfigComp = new Composite( m_wTabFolder, SWT.NONE ); props.setLook( wConfigComp ); FormLayout configLayout = new FormLayout(); configLayout.marginWidth = 3; configLayout.marginHeight = 3; wConfigComp.setLayout( configLayout ); Label namedClusterLab = new Label( wConfigComp, SWT.RIGHT ); namedClusterLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.NamedCluster.Label" ) ); namedClusterLab.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.NamedCluster.TipText" ) ); props.setLook( namedClusterLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 10 ); fd.right = new FormAttachment( middle, -margin ); namedClusterLab.setLayoutData( fd ); namedClusterWidget = new NamedClusterWidgetImpl( wConfigComp, false, namedClusterService, runtimeTestActionService, runtimeTester, false ); namedClusterWidget.initiate(); props.setLook( namedClusterWidget ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( middle, 0 ); namedClusterWidget.setLayoutData( fd ); // core config line Label coreConfigLab = new Label( wConfigComp, SWT.RIGHT ); coreConfigLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.CoreConfig.Label" ) ); coreConfigLab .setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.CoreConfig.TipText" ) ); props.setLook( coreConfigLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( namedClusterWidget, margin ); fd.right = new FormAttachment( middle, -margin ); coreConfigLab.setLayoutData( fd ); m_coreConfigBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_coreConfigBut ); m_coreConfigBut.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "System.Button.Browse" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( namedClusterWidget, 0 ); m_coreConfigBut.setLayoutData( fd ); m_coreConfigBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { FileDialog dialog = new FileDialog( shell, SWT.OPEN ); String[] extensions = null; String[] filterNames = null; extensions = new String[ 2 ]; filterNames = new String[ 2 ]; extensions[ 0 ] = "*.xml"; filterNames[ 0 ] = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.FileType.XML" ); extensions[ 1 ] = "*"; filterNames[ 1 ] = BaseMessages.getString( HBaseOutputMeta.PKG, "System.FileType.AllFiles" ); dialog.setFilterExtensions( extensions ); if ( dialog.open() != null ) { m_coreConfigText.setText( dialog.getFilterPath() + System.getProperty( "file.separator" ) + dialog.getFileName() ); } } } ); m_coreConfigText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( m_coreConfigText ); m_coreConfigText.addModifyListener( lsMod ); // set the tool tip to the contents with any env variables expanded m_coreConfigText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_coreConfigText.setToolTipText( transMeta.environmentSubstitute( m_coreConfigText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( namedClusterWidget, margin ); fd.right = new FormAttachment( m_coreConfigBut, -margin ); m_coreConfigText.setLayoutData( fd ); // default config line Label defaultConfigLab = new Label( wConfigComp, SWT.RIGHT ); defaultConfigLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DefaultConfig.Label" ) ); defaultConfigLab.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DefaultConfig.TipText" ) ); props.setLook( defaultConfigLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_coreConfigText, margin ); fd.right = new FormAttachment( middle, -margin ); defaultConfigLab.setLayoutData( fd ); m_defaultConfigBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_defaultConfigBut ); m_defaultConfigBut.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "System.Button.Browse" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( m_coreConfigText, 0 ); m_defaultConfigBut.setLayoutData( fd ); m_defaultConfigBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { FileDialog dialog = new FileDialog( shell, SWT.OPEN ); String[] extensions = null; String[] filterNames = null; extensions = new String[ 2 ]; filterNames = new String[ 2 ]; extensions[ 0 ] = "*.xml"; filterNames[ 0 ] = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.FileType.XML" ); extensions[ 1 ] = "*"; filterNames[ 1 ] = BaseMessages.getString( HBaseOutputMeta.PKG, "System.FileType.AllFiles" ); dialog.setFilterExtensions( extensions ); if ( dialog.open() != null ) { m_defaultConfigText.setText( dialog.getFilterPath() + System.getProperty( "file.separator" ) + dialog.getFileName() ); } } } ); m_defaultConfigText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( m_defaultConfigText ); m_defaultConfigText.addModifyListener( lsMod ); // set the tool tip to the contents with any env variables expanded m_defaultConfigText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_defaultConfigText.setToolTipText( transMeta.environmentSubstitute( m_defaultConfigText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_coreConfigText, margin ); fd.right = new FormAttachment( m_defaultConfigBut, -margin ); m_defaultConfigText.setLayoutData( fd ); // table name Label tableNameLab = new Label( wConfigComp, SWT.RIGHT ); tableNameLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.TableName.Label" ) ); tableNameLab.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.TableName.TipText" ) ); props.setLook( tableNameLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_defaultConfigText, margin ); fd.right = new FormAttachment( middle, -margin ); tableNameLab.setLayoutData( fd ); m_mappedTableNamesBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_mappedTableNamesBut ); m_mappedTableNamesBut.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.TableName.Button" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( m_defaultConfigText, 0 ); m_mappedTableNamesBut.setLayoutData( fd ); m_mappedTableNamesCombo = new CCombo( wConfigComp, SWT.BORDER ); props.setLook( m_mappedTableNamesCombo ); m_mappedTableNamesCombo.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_mappedTableNamesCombo.setToolTipText( transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_defaultConfigText, margin ); fd.right = new FormAttachment( m_mappedTableNamesBut, -margin ); m_mappedTableNamesCombo.setLayoutData( fd ); m_mappedTableNamesBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { setupMappedTableNames(); if ( m_mappedTableNamesCombo.getItemCount() > 0 ) { m_mappedTableNamesCombo.setListVisible( true ); } } } ); // mapping name Label mappingNameLab = new Label( wConfigComp, SWT.RIGHT ); mappingNameLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.MappingName.Label" ) ); mappingNameLab.setToolTipText( BaseMessages .getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.MappingName.TipText" ) ); props.setLook( mappingNameLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_mappedTableNamesCombo, margin ); fd.right = new FormAttachment( middle, -margin ); mappingNameLab.setLayoutData( fd ); m_mappingNamesBut = new Button( wConfigComp, SWT.PUSH | SWT.CENTER ); props.setLook( m_mappingNamesBut ); m_mappingNamesBut.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.MappingName.Button" ) ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( m_mappedTableNamesCombo, 0 ); m_mappingNamesBut.setLayoutData( fd ); m_mappingNamesBut.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { setupMappingNamesForTable( false ); if ( m_mappingNamesCombo.getItemCount() > 0 ) { m_mappingNamesCombo.setListVisible( true ); } } } ); m_mappingNamesCombo = new CCombo( wConfigComp, SWT.BORDER ); props.setLook( m_mappingNamesCombo ); m_mappingNamesCombo.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_mappingNamesCombo.setToolTipText( transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) ); m_storeMappingInStepMetaData.setSelection( false ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_mappedTableNamesCombo, margin ); fd.right = new FormAttachment( m_mappingNamesBut, -margin ); m_mappingNamesCombo.setLayoutData( fd ); // store mapping in meta data Label storeMapping = new Label( wConfigComp, SWT.RIGHT ); storeMapping.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.StoreMapping.Label" ) ); storeMapping .setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.StoreMapping.TipText" ) ); props.setLook( storeMapping ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_mappingNamesCombo, margin ); fd.right = new FormAttachment( middle, -margin ); storeMapping.setLayoutData( fd ); m_storeMappingInStepMetaData = new Button( wConfigComp, SWT.CHECK ); props.setLook( m_storeMappingInStepMetaData ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_mappingNamesCombo, margin ); m_storeMappingInStepMetaData.setLayoutData( fd ); //delete rows by key option Label deleteRows = new Label( wConfigComp, SWT.RIGHT ); deleteRows.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DeleteRowKey.Label" ) ); deleteRows.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DeleteRowKey.TipText" ) ); props.setLook( deleteRows ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_storeMappingInStepMetaData, margin ); fd.right = new FormAttachment( middle, -margin ); deleteRows.setLayoutData( fd ); m_deleteRowKeyBut = new Button( wConfigComp, SWT.CHECK ); props.setLook( m_deleteRowKeyBut ); fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_storeMappingInStepMetaData, margin ); m_deleteRowKeyBut.setLayoutData( fd ); m_deleteRowKeyBut.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent se ) { walEnabled(); }; } ); // disable write to WAL Label disableWALLab = new Label( wConfigComp, SWT.RIGHT ); disableWALLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DisableWAL.Label" ) ); disableWALLab .setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DisableWAL.TipText" ) ); props.setLook( disableWALLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_deleteRowKeyBut, margin ); fd.right = new FormAttachment( middle, -margin ); disableWALLab.setLayoutData( fd ); m_disableWriteToWALBut = new Button( wConfigComp, SWT.CHECK | SWT.CENTER ); m_disableWriteToWALBut.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.DisableWAL.TipText" ) ); props.setLook( m_disableWriteToWALBut ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_deleteRowKeyBut, margin ); // fd.right = new FormAttachment(middle, -margin); m_disableWriteToWALBut.setLayoutData( fd ); // write buffer size line Label writeBufferLab = new Label( wConfigComp, SWT.RIGHT ); writeBufferLab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.WriteBufferSize.Label" ) ); writeBufferLab.setToolTipText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.WriteBufferSize.TipText" ) ); props.setLook( writeBufferLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_disableWriteToWALBut, margin ); fd.right = new FormAttachment( middle, -margin ); writeBufferLab.setLayoutData( fd ); m_writeBufferSizeText = new TextVar( transMeta, wConfigComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( m_writeBufferSizeText ); m_writeBufferSizeText.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_writeBufferSizeText.setToolTipText( transMeta.environmentSubstitute( m_writeBufferSizeText.getText() ) ); } } ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_disableWriteToWALBut, margin ); fd.right = new FormAttachment( 100, 0 ); m_writeBufferSizeText.setLayoutData( fd ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, 0 ); wConfigComp.setLayoutData( fd ); wConfigComp.layout(); m_wConfigTab.setControl( wConfigComp ); // mapping editor tab m_editorTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_editorTab.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.MappingEditorTab.TabTitle" ) ); m_mappingEditor = new MappingEditor( shell, m_wTabFolder, this, this, SWT.FULL_SELECTION | SWT.MULTI, true, props, transMeta, namedClusterService, runtimeTestActionService, runtimeTester, namedClusterServiceLocator ); fd = new FormData(); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( 0, 0 ); m_mappingEditor.setLayoutData( fd ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.bottom = new FormAttachment( 100, -margin * 2 ); fd.right = new FormAttachment( 100, 0 ); m_mappingEditor.setLayoutData( fd ); m_mappingEditor.layout(); m_editorTab.setControl( m_mappingEditor ); // ----------------- fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_stepnameText, margin ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, -50 ); m_wTabFolder.setLayoutData( fd ); // Buttons inherited from BaseStepDialog wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "System.Button.OK" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "System.Button.Cancel" ) ); setButtonPositions( new Button[] { wOK, wCancel }, margin, m_wTabFolder ); // Add listeners lsCancel = new Listener() { public void handleEvent( Event e ) { cancel(); } }; lsOK = new Listener() { public void handleEvent( Event e ) { ok(); } }; wCancel.addListener( SWT.Selection, lsCancel ); wOK.addListener( SWT.Selection, lsOK ); lsDef = new SelectionAdapter() { @Override public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; m_stepnameText.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { @Override public void shellClosed( ShellEvent e ) { cancel(); } } ); m_wTabFolder.setSelection( 0 ); setSize(); getData(); ServiceStatus serviceStatus = m_currentMeta.getServiceStatus(); if ( !serviceStatus.isOk() ) { new ErrorDialog( shell, Messages.getString( "Dialog.Error" ), Messages.getString( "HBaseOutput.Error.ServiceStatus" ), serviceStatus.getException() ); } shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } protected void cancel() { stepname = null; m_currentMeta.setChanged( changed ); dispose(); } protected void ok() { if ( Utils.isEmpty( m_stepnameText.getText() ) ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "System.StepJobEntryNameMissing.Title" ) ); mb.setMessage( BaseMessages.getString( HBaseOutputMeta.PKG, "System.JobEntryNameMissing.Msg" ) ); mb.open(); return; } if ( namedClusterWidget.getSelectedNamedCluster() == null ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "Dialog.Error" ) ); mb.setMessage( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.NamedClusterNotSelected.Msg" ) ); mb.open(); return; } else { NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); if ( !nc.isUseGateway() && StringUtils.isEmpty( nc.getZooKeeperHost() ) ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( BaseMessages.getString( HBaseOutputMeta.PKG, "Dialog.Error" ) ); mb.setMessage( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.NamedClusterMissingValues.Msg" ) ); mb.open(); return; } } stepname = m_stepnameText.getText(); updateMetaConnectionDetails( m_currentMeta ); if ( m_storeMappingInStepMetaData.getSelection() ) { if ( Utils.isEmpty( m_mappingNamesCombo.getText() ) ) { List problems = new ArrayList(); Mapping toSet = m_mappingEditor.getMapping( false, problems, false ); if ( problems.size() > 0 ) { StringBuffer p = new StringBuffer(); for ( String s : problems ) { p.append( s ).append( "\n" ); } MessageDialog md = new MessageDialog( shell, BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.Error.IssuesWithMapping.Title" ), null, BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.Error.IssuesWithMapping" ) + ":\n\n" + p.toString(), MessageDialog.WARNING, new String[] { BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.Error.IssuesWithMapping.ButtonOK" ), BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.Error.IssuesWithMapping.ButtonCancel" ) }, 0 ); MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() ); int idx = md.open() & 0xFF; if ( idx == 1 || idx == 255 /* 255 = escape pressed */ ) { return; // Cancel } } m_currentMeta.setMapping( toSet ); } else { HBaseConnection connection = null; try { connection = getHBaseConnection(); MappingAdmin admin = new MappingAdmin( connection ); Mapping current = null; current = admin.getMapping( transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() ), transMeta .environmentSubstitute( m_mappingNamesCombo.getText() ) ); m_currentMeta.setMapping( current ); m_currentMeta.setTargetMappingName( "" ); } catch ( Exception e ) { logError( Messages.getString( "HBaseOutputDialog.ErrorMessage.UnableToGetMapping" ) + " \"" + transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() + "," + transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) + "\"" ), e ); new ErrorDialog( shell, Messages.getString( "HBaseOutputDialog.ErrorMessage.UnableToGetMapping" ), Messages .getString( "HBaseOutputDialog.ErrorMessage.UnableToGetMapping" ) + " \"" + transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() + "," + transMeta.environmentSubstitute( m_mappingNamesCombo.getText() ) + "\"" ), e ); } finally { try { if ( connection != null ) { connection.close(); } } catch ( Exception e ) { String msg = Messages.getString( "HBaseInputDialog.ErrorMessage.FailedClosingHBaseConnection" ); logError( msg, e ); new ErrorDialog( shell, msg, msg, e ); } } } } else { // we're going to use a mapping stored in HBase - null out any stored // mapping m_currentMeta.setMapping( null ); } if ( !m_originalMeta.equals( m_currentMeta ) ) { m_currentMeta.setChanged(); changed = m_currentMeta.hasChanged(); } dispose(); } protected void updateMetaConnectionDetails( HBaseOutputMeta meta ) { if ( Utils.isEmpty( m_stepnameText.getText() ) ) { return; } NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); if ( nc != null ) { meta.setNamedCluster( nc ); } meta.setCoreConfigURL( m_coreConfigText.getText() ); meta.setDefaulConfigURL( m_defaultConfigText.getText() ); meta.setTargetTableName( m_mappedTableNamesCombo.getText() ); meta.setTargetMappingName( m_mappingNamesCombo.getText() ); meta.setDeleteRowKey( m_deleteRowKeyBut.getSelection() ); meta.setDisableWriteToWAL( m_disableWriteToWALBut.getSelection() ); meta.setWriteBufferSize( m_writeBufferSizeText.getText() ); } private void getData() { namedClusterWidget.setSelectedNamedCluster( m_currentMeta.getNamedCluster().getName() ); if ( !Utils.isEmpty( m_currentMeta.getCoreConfigURL() ) ) { m_coreConfigText.setText( m_currentMeta.getCoreConfigURL() ); } if ( !Utils.isEmpty( m_currentMeta.getDefaultConfigURL() ) ) { m_defaultConfigText.setText( m_currentMeta.getDefaultConfigURL() ); } if ( !Utils.isEmpty( m_currentMeta.getTargetTableName() ) ) { m_mappedTableNamesCombo.setText( m_currentMeta.getTargetTableName() ); } if ( !Utils.isEmpty( m_currentMeta.getTargetMappingName() ) ) { m_mappingNamesCombo.setText( m_currentMeta.getTargetMappingName() ); } m_deleteRowKeyBut.setSelection( m_currentMeta.getDeleteRowKey() ); m_disableWriteToWALBut.setSelection( m_currentMeta.getDisableWriteToWAL() ); walEnabled(); if ( !Utils.isEmpty( m_currentMeta.getWriteBufferSize() ) ) { m_writeBufferSizeText.setText( m_currentMeta.getWriteBufferSize() ); } if ( Utils.isEmpty( m_currentMeta.getTargetMappingName() ) && m_currentMeta.getMapping() != null ) { m_mappingEditor.setMapping( m_currentMeta.getMapping() ); m_storeMappingInStepMetaData.setSelection( true ); } } @Override public HBaseService getHBaseService() throws ClusterInitializationException { NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); return namedClusterServiceLocator.getService( nc, HBaseService.class ); } @Override public HBaseConnection getHBaseConnection() throws IOException, ClusterInitializationException { /* * URL coreConf = null; URL defaultConf = null; */ String coreConf = ""; String defaultConf = ""; String zookeeperHosts = ""; if ( !Utils.isEmpty( m_coreConfigText.getText() ) ) { coreConf = transMeta.environmentSubstitute( m_coreConfigText.getText() ); } if ( !Utils.isEmpty( m_defaultConfigText.getText() ) ) { defaultConf = transMeta.environmentSubstitute( m_defaultConfigText.getText() ); } NamedCluster nc = namedClusterWidget.getSelectedNamedCluster(); if ( nc != null && !nc.isUseGateway() ) { zookeeperHosts = transMeta.environmentSubstitute( nc.getZooKeeperHost() ); } if ( Utils.isEmpty( zookeeperHosts ) && Utils.isEmpty( coreConf ) && Utils.isEmpty( defaultConf ) && ( nc != null && !nc.isUseGateway() ) ) { throw new IOException( BaseMessages.getString( HBaseOutputMeta.PKG, "MappingDialog.Error.Message.CantConnectNoConnectionDetailsProvided" ) ); } return getHBaseService().getHBaseConnection( transMeta, coreConf, defaultConf, null ); } private void setupMappedTableNames() { HBaseConnection connection = null; Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT ); try { shell.setCursor( busy ); connection = getHBaseConnection(); MappingAdmin admin = new MappingAdmin( connection ); Set tableNames = admin.getMappedTables( parseNamespaceFromTableName( null ) ); m_mappedTableNamesCombo.removeAll(); for ( String s : tableNames ) { m_mappedTableNamesCombo.add( s ); } } catch ( Exception ex ) { logError( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.ErrorMessage.UnableToConnect" ), ex ); new ErrorDialog( shell, BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.ErrorMessage." + "UnableToConnect" ), BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutputDialog.ErrorMessage.UnableToConnect" ), ex ); } finally { shell.setCursor( null ); busy.dispose(); try { if ( connection != null ) { connection.close(); } } catch ( Exception e ) { String msg = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.ErrorMessage.FailedClosingHBaseConnection" ); logError( msg, e ); new ErrorDialog( shell, msg, msg, e ); } } } private void setupMappingNamesForTable( boolean quiet ) { m_mappingNamesCombo.removeAll(); if ( !Utils.isEmpty( m_mappedTableNamesCombo.getText() ) ) { HBaseConnection connection = null; try { connection = getHBaseConnection(); MappingAdmin admin = new MappingAdmin( connection ); String mappedTableName = MappingAdmin.getTableNameFromVariable( m_currentMeta, m_mappedTableNamesCombo.getText().trim() ); List mappingNames = admin.getMappingNames( mappedTableName ); for ( String n : mappingNames ) { m_mappingNamesCombo.add( n ); } } catch ( Exception ex ) { if ( !quiet ) { logError( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.ErrorMessage.UnableToConnect" ), ex ); new ErrorDialog( shell, BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.ErrorMessage." + "UnableToConnect" ), BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.ErrorMessage.UnableToConnect" ), ex ); } } finally { try { if ( connection != null ) { connection.close(); } } catch ( Exception e ) { if ( !quiet ) { String msg = BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseInputDialog.ErrorMessage.FailedClosingHBaseConnection" ); logError( msg, e ); new ErrorDialog( shell, msg, msg, e ); } } } } } public RowMetaInterface getIncomingFields() { StepMeta stepMeta = transMeta.findStep( stepname ); RowMetaInterface result = null; try { if ( stepMeta != null ) { result = transMeta.getPrevStepFields( stepMeta ); } } catch ( KettleException ex ) { // quietly ignore } return result; } public String getCurrentConfiguration() { updateMetaConnectionDetails( m_configurationMeta ); return m_configurationMeta.getXML(); } public void walEnabled() { m_disableWriteToWALBut.setEnabled( !m_deleteRowKeyBut.getSelection() ); } private String parseNamespaceFromTableName( String defaultNamespaceIfNoneSpecified ) { return HbaseUtil.parseNamespaceFromTableName( transMeta.environmentSubstitute( m_mappedTableNamesCombo.getText() ), defaultNamespaceIfNoneSpecified ); } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/output/HBaseOutputMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.output; import com.google.common.annotations.VisibleForTesting; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.big.data.kettle.plugins.hbase.HbaseUtil; import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition; import org.pentaho.big.data.kettle.plugins.hbase.NamedClusterLoadSaveUtil; import org.pentaho.big.data.kettle.plugins.hbase.ServiceStatus; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingUtils; import org.pentaho.big.data.kettle.plugins.hbase.meta.AELHBaseMappingImpl; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionDeep; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.metastore.MetaStoreConst; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.runtime.test.action.impl.RuntimeTestActionServiceImpl; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import org.w3c.dom.Node; import java.util.Collection; import java.util.List; /** * Class providing an output step for writing data to an HBase table according to meta data column/type mapping info * stored in a separate HBase table called "pentaho_mappings". See org.pentaho.hbase.mapping.Mapping for details on the * meta data format. * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ @Step( id = "HBaseOutput", image = "HBO.svg", name = "HBaseOutput.Name", description = "HBaseOutput.Description", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", documentationUrl = "pdi-transformation-steps-reference-overview/hbase-output", i18nPackageName = "org.pentaho.di.trans.steps.hbaseoutput" ) @InjectionSupported( localizationPrefix = "HBaseOutput.Injection.", groups = { "MAPPING" } ) public class HBaseOutputMeta extends BaseStepMeta implements StepMetaInterface { protected static Class PKG = HBaseOutputMeta.class; /** * path/url to hbase-site.xml */ @Injection( name = "HBASE_SITE_XML_URL" ) protected String m_coreConfigURL; /** * path/url to hbase-default.xml */ @Injection( name = "HBASE_DEFAULT_XML_URL" ) protected String m_defaultConfigURL; /** * the name of the HBase table to write to */ @Injection( name = "TARGET_TABLE_NAME" ) protected String m_targetTableName; /** * the name of the mapping for columns/types for the target table */ @Injection( name = "TARGET_MAPPING_NAME" ) protected String m_targetMappingName; /** * if true then the incoming column with row key from the mapping will be deleted */ @Injection( name = "DELETE_ROW_KEY" ) protected boolean m_deleteRowKey; /** * if true then the WAL will not be written to */ @Injection( name = "DISABLE_WRITE_TO_WAL" ) protected boolean m_disableWriteToWAL; /** * The size of the write buffer in bytes (empty - default from hbase-default.xml is used) */ @Injection( name = "WRITE_BUFFER_SIZE" ) protected String m_writeBufferSize; /** * The mapping to use if we are not loading one dynamically at runtime from HBase itself */ protected Mapping m_mapping; @InjectionDeep protected MappingDefinition mappingDefinition; private NamedCluster namedCluster; private final NamedClusterLoadSaveUtil namedClusterLoadSaveUtil; private final NamedClusterService namedClusterService; private final NamedClusterServiceLocator namedClusterServiceLocator; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; private MetastoreLocator metaStoreService; private ServiceStatus serviceStatus = ServiceStatus.OK; public NamedClusterService getNamedClusterService() { return namedClusterService; } public NamedClusterServiceLocator getNamedClusterServiceLocator() { return namedClusterServiceLocator; } public RuntimeTestActionService getRuntimeTestActionService() { return runtimeTestActionService; } public RuntimeTester getRuntimeTester() { return runtimeTester; } public HBaseOutputMeta() { this( NamedClusterManager.getInstance(), BigDataServicesHelper.getNamedClusterServiceLocator(), RuntimeTestActionServiceImpl.getInstance(), RuntimeTesterImpl.getInstance(), new NamedClusterLoadSaveUtil(), null ); } public HBaseOutputMeta( NamedClusterService namedClusterService, NamedClusterServiceLocator namedClusterServiceLocator, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) { this( namedClusterService, namedClusterServiceLocator, runtimeTestActionService, runtimeTester, new NamedClusterLoadSaveUtil(), null ); } protected synchronized MetastoreLocator getMetastoreService() { if ( this.metaStoreService == null ) { try { Collection metastoreLocators = PluginServiceLoader.loadServices( MetastoreLocator.class ); this.metaStoreService = metastoreLocators.stream().findFirst().get(); } catch ( Exception e ) { getLog().logError( "Error getting MetastoreLocator", e ); } } return this.metaStoreService; } @VisibleForTesting HBaseOutputMeta( NamedClusterService namedClusterService, NamedClusterServiceLocator namedClusterServiceLocator, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester, NamedClusterLoadSaveUtil namedClusterLoadSaveUtil, MetastoreLocator metaStore ) { this.namedClusterService = namedClusterService; this.namedClusterServiceLocator = namedClusterServiceLocator; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; this.namedClusterLoadSaveUtil = namedClusterLoadSaveUtil; this.metaStoreService = metaStore; } /** * Set the mapping to use for decoding the row * * @param m the mapping to use */ public void setMapping( Mapping m ) { m_mapping = m; } /** * Get the mapping to use for decoding the row * * @return the mapping to use */ public Mapping getMapping() { return m_mapping; } public void setCoreConfigURL( String coreConfig ) { m_coreConfigURL = coreConfig; } public String getCoreConfigURL() { return m_coreConfigURL; } public void setDefaulConfigURL( String defaultConfig ) { m_defaultConfigURL = defaultConfig; } public String getDefaultConfigURL() { return m_defaultConfigURL; } public void setTargetTableName( String targetTable ) { m_targetTableName = targetTable; } public String getTargetTableName() { return m_targetTableName; } public void setTargetMappingName( String targetMapping ) { m_targetMappingName = targetMapping; } public String getTargetMappingName() { return m_targetMappingName; } public boolean getDeleteRowKey() { return m_deleteRowKey; } public void setDeleteRowKey( boolean m_deleteRowKey ) { this.m_deleteRowKey = m_deleteRowKey; } public void setDisableWriteToWAL( boolean d ) { m_disableWriteToWAL = d; } public boolean getDisableWriteToWAL() { return m_disableWriteToWAL; } public void setWriteBufferSize( String size ) { m_writeBufferSize = size; } public String getWriteBufferSize() { return m_writeBufferSize; } void applyInjection( VariableSpace space ) throws KettleException { if ( namedCluster == null ) { throw new KettleException( "Named cluster was not initialized!" ); } if ( namedCluster.getShimIdentifier() == null && getParentStepMeta() != null && getParentStepMeta().getParentTransMeta() != null ) { // If here we have a template for the named cluster, not the real thing. This is likely due to not having // the namedCluster present in the local metastore. Time to load it from the embedded Metastore which is only // present at runtime NamedCluster nc = namedClusterService.getNamedClusterByName( namedCluster.getName(), getMetastoreService().getExplicitMetastore( getParentStepMeta().getParentTransMeta().getEmbeddedMetastoreProviderKey() ) ); if ( nc != null && nc.getShimIdentifier() != null ) { namedCluster = nc; //Overwrite with the real one } } try { if ( mappingDefinition == null ) { ServiceStatus serviceStatus = this.getServiceStatus(); if ( !serviceStatus.isOk() ) { throw serviceStatus.getException(); } return; } HBaseService hBaseService = getService(); Mapping tempMapping = null; tempMapping = getMapping( mappingDefinition, hBaseService ); setMapping( tempMapping ); } catch ( Exception e ) { throw new KettleException( e ); } } @VisibleForTesting Mapping getMapping( MappingDefinition mappingDefinition, HBaseService hBaseService ) throws KettleException { return MappingUtils.getMapping( mappingDefinition, hBaseService ); } public void check( List remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info ) { CheckResult cr; if ( ( prev == null ) || ( prev.size() == 0 ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_WARNING, "Not receiving any fields from previous steps!", stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, "Step is connected to previous one, receiving " + prev.size() + " fields", stepMeta ); remarks.add( cr ); } // See if we have input streams leading to this step! if ( input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, "Step is receiving info from other steps.", stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, "No input received from other steps!", stepMeta ); remarks.add( cr ); } } @Override public String getXML() { try { applyInjection( new Variables() ); } catch ( KettleException e ) { logError( "Error occurred while injecting metadata. Transformation meta could be incorrect!", e ); } StringBuilder retval = new StringBuilder(); namedClusterLoadSaveUtil .getXml( retval, namedClusterService, namedCluster, MetaStoreConst.getDefaultMetastore(), getLog() ); if ( parentStepMeta != null && parentStepMeta.getParentTransMeta() != null ) { parentStepMeta.getParentTransMeta().getNamedClusterEmbedManager().addClusterToMeta( namedCluster.getName() ); } if ( !Utils.isEmpty( m_coreConfigURL ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "core_config_url", m_coreConfigURL ) ); } if ( !Utils.isEmpty( m_defaultConfigURL ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "default_config_url", m_defaultConfigURL ) ); } if ( !Utils.isEmpty( m_targetTableName ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "target_table_name", m_targetTableName ) ); } if ( !Utils.isEmpty( m_targetMappingName ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "target_mapping_name", m_targetMappingName ) ); } retval.append( "\n " ).append( XMLHandler.addTagValue( "delete_rows_by_key", m_deleteRowKey ) ); if ( !Utils.isEmpty( m_writeBufferSize ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( "write_buffer_size", m_writeBufferSize ) ); } retval.append( "\n " ).append( XMLHandler.addTagValue( "disable_wal", m_disableWriteToWAL ) ); if ( m_mapping != null ) { retval.append( m_mapping.getXML() ); } return retval.toString(); } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return new HBaseOutput( stepMeta, stepDataInterface, copyNr, transMeta, trans, namedClusterServiceLocator ); } public StepDataInterface getStepData() { return new HBaseOutputData(); } @Override public void loadXML( Node stepnode, List databases, IMetaStore metaStore ) throws KettleXMLException { if ( metaStore == null ) { metaStore = getMetastoreService().getMetastore(); } this.namedCluster = namedClusterLoadSaveUtil.loadClusterConfig( namedClusterService, null, null, metaStore, stepnode, getLog() ); m_coreConfigURL = XMLHandler.getTagValue( stepnode, "core_config_url" ); m_defaultConfigURL = XMLHandler.getTagValue( stepnode, "default_config_url" ); m_targetTableName = HbaseUtil.expandLegacyTableNameOnLoad( XMLHandler.getTagValue( stepnode, "target_table_name" ) ); m_targetMappingName = XMLHandler.getTagValue( stepnode, "target_mapping_name" ); String deleteKeys = XMLHandler.getTagValue( stepnode, "delete_rows_by_key" ); if ( !Utils.isEmpty( deleteKeys ) ) { m_deleteRowKey = deleteKeys.equalsIgnoreCase( "Y" ); } m_writeBufferSize = XMLHandler.getTagValue( stepnode, "write_buffer_size" ); String disableWAL = XMLHandler.getTagValue( stepnode, "disable_wal" ); m_disableWriteToWAL = disableWAL.equalsIgnoreCase( "Y" ); Mapping tempMapping = null; try { tempMapping = getService().getMappingFactory().createMapping(); } catch ( Exception e ) { getLog().logError( e.getMessage() ); } /** * Assume that null mappings indicate * a missing HBaseService. Try loading * from KTR */ if ( tempMapping == null ) { tempMapping = new AELHBaseMappingImpl(); } if ( tempMapping != null && tempMapping.loadXML( stepnode ) ) { m_mapping = tempMapping; } else { m_mapping = null; } } @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List databases ) throws KettleException { if ( metaStore == null ) { metaStore = getMetastoreService().getMetastore(); } this.namedCluster = namedClusterLoadSaveUtil.loadClusterConfig( namedClusterService, id_step, rep, metaStore, null, getLog() ); m_coreConfigURL = rep.getStepAttributeString( id_step, 0, "core_config_url" ); m_defaultConfigURL = rep.getStepAttributeString( id_step, 0, "default_config_url" ); m_targetTableName = HbaseUtil.expandLegacyTableNameOnLoad( rep.getStepAttributeString( id_step, 0, "target_table_name" ) ); m_targetMappingName = rep.getStepAttributeString( id_step, 0, "target_mapping_name" ); m_deleteRowKey = rep.getStepAttributeBoolean( id_step, 0, "delete_rows_by_key" ); m_writeBufferSize = rep.getStepAttributeString( id_step, 0, "write_buffer_size" ); m_disableWriteToWAL = rep.getStepAttributeBoolean( id_step, 0, "disable_wal" ); Mapping tempMapping = null; try { tempMapping = getService().getMappingFactory().createMapping(); } catch ( Exception e ) { getLog().logError( e.getMessage() ); } if ( tempMapping != null && tempMapping.readRep( rep, id_step ) ) { m_mapping = tempMapping; } else { m_mapping = null; } } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { if ( metaStore == null ) { metaStore = getMetastoreService().getMetastore(); } namedClusterLoadSaveUtil .saveRep( rep, metaStore, id_transformation, id_step, namedClusterService, namedCluster, getLog() ); if ( !Utils.isEmpty( m_coreConfigURL ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "core_config_url", m_coreConfigURL ); } if ( !Utils.isEmpty( m_defaultConfigURL ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "default_config_url", m_defaultConfigURL ); } if ( !Utils.isEmpty( m_targetTableName ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "target_table_name", m_targetTableName ); } if ( !Utils.isEmpty( m_targetMappingName ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "target_mapping_name", m_targetMappingName ); } rep.saveStepAttribute( id_transformation, id_step, 0, "delete_rows_by_key", m_deleteRowKey ); if ( !Utils.isEmpty( m_writeBufferSize ) ) { rep.saveStepAttribute( id_transformation, id_step, 0, "write_buffer_size", m_writeBufferSize ); } rep.saveStepAttribute( id_transformation, id_step, 0, "disable_wal", m_disableWriteToWAL ); if ( m_mapping != null ) { m_mapping.saveRep( rep, id_transformation, id_step ); } } public void setDefault() { m_coreConfigURL = null; m_defaultConfigURL = null; m_targetTableName = null; m_targetMappingName = null; m_deleteRowKey = false; m_disableWriteToWAL = false; m_writeBufferSize = null; namedCluster = namedClusterService.getClusterTemplate(); } @Override public boolean supportsErrorHandling() { return true; } public NamedCluster getNamedCluster() { return namedCluster; } public void setNamedCluster( NamedCluster namedCluster ) { this.namedCluster = namedCluster; } public MappingDefinition getMappingDefinition() { return mappingDefinition; } public void setMappingDefinition( MappingDefinition mappingDefinition ) { this.mappingDefinition = mappingDefinition; } protected HBaseService getService() throws ClusterInitializationException { HBaseService service = null; try { String embeddedMetastoreProviderKey = parentStepMeta == null || parentStepMeta.getParentTransMeta() == null ? null : parentStepMeta.getParentTransMeta().getEmbeddedMetastoreProviderKey(); service = namedClusterServiceLocator.getService( this.namedCluster, HBaseService.class, embeddedMetastoreProviderKey ); this.serviceStatus = ServiceStatus.OK; } catch ( Exception e ) { this.serviceStatus = ServiceStatus.notOk( e ); logError( Messages.getString( "HBaseOutput.Error.ServiceStatus" ) ); throw e; } return service; } public ServiceStatus getServiceStatus() { if ( this.serviceStatus == null ) { this.serviceStatus = ServiceStatus.OK; } return this.serviceStatus; } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/output/KettleRowToHBaseTuple.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.output; import java.util.Map; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingUtils; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping.KeyType; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.table.HBasePut; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTableWriteOperationManager; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.i18n.BaseMessages; public class KettleRowToHBaseTuple { private int keyIndex = -1; private ValueMetaInterface keyInMeta; private KeyType keyType; private int familyIndex = -1; private ValueMetaInterface familyInMeta; private int columnIndex = -1; private ValueMetaInterface columnInMeta; private int valueIndex = -1; private ValueMetaInterface valueInMeta; private HBaseValueMetaInterface valueMeta; private int visibilityIndex = -1; private ValueMetaInterface visibilityInMeta; private HBaseValueMetaInterface visibilityMeta; /** * Creates a conversion class that converts an incoming row object with values for the various Tuple fields into an HBasePut * * @param inputRowMeta * The row meta of the incoming row structure * @param tupleMapping * The mapping in use for the step * @param columnMapping * The non-KEY columns in the mapping mapped by column alias * @throws KettleException */ public KettleRowToHBaseTuple( RowMetaInterface inputRowMeta, Mapping tupleMapping, Map columnMapping ) throws KettleException { String keyName = tupleMapping.getKeyName(); keyIndex = inputRowMeta.indexOfValue( keyName ); if ( keyIndex < 0 ) { // No Key Column throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.NoKeyColumn" ) ); } keyInMeta = inputRowMeta.getValueMeta( keyIndex ); keyType = tupleMapping.getKeyType(); familyIndex = inputRowMeta.indexOfValue( Mapping.TupleMapping.FAMILY.toString() ); if ( familyIndex < 0 ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.NoFamilyColumn" ) ); } familyInMeta = inputRowMeta.getValueMeta( familyIndex ); columnIndex = inputRowMeta.indexOfValue( Mapping.TupleMapping.COLUMN.toString() ); if ( columnIndex < 0 ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.NoColumnColumn" ) ); } columnInMeta = inputRowMeta.getValueMeta( columnIndex ); // NOTE: TIMESTAMPS cannot be written via HBase Put, so the column is useless for writing valueIndex = inputRowMeta.indexOfValue( Mapping.TupleMapping.VALUE.toString() ); if ( valueIndex < 0 ) { throw new KettleException( BaseMessages.getString( HBaseOutputMeta.PKG, "HBaseOutput.Error.NoValueColumn" ) ); } valueInMeta = inputRowMeta.getValueMeta( valueIndex ); valueMeta = columnMapping.get( valueInMeta.getName() ); // NOTE: The Visibility Index is optional visibilityIndex = inputRowMeta.indexOfValue( MappingUtils.TUPLE_MAPPING_VISIBILITY ); if ( visibilityIndex >= 0 ) { visibilityInMeta = inputRowMeta.getValueMeta( visibilityIndex ); visibilityMeta = columnMapping.get( visibilityInMeta.getName() ); if ( visibilityMeta == null ) { // There is no column mapping for Visibility, so disable it by removing the index in the RowMeta visibilityInMeta = null; visibilityIndex = -1; } } } /** * Creates an HBasePut representing the tuple by extracting data from a row * * @param hBaseTableWriteOperationManager * HBase write manager * @param bu * The Byte Conversion utility (Required for key conversion) * @param row * Object containing row data * @param writeToWAL * Should data be written to WAL? * @return An HBase Put for the tuple * @throws Exception */ public HBasePut createTuplePut( HBaseTableWriteOperationManager hBaseTableWriteOperationManager, ByteConversionUtil bu, Object[] row, boolean writeToWAL ) throws Exception { if ( keyInMeta.isNull( row[keyIndex] ) ) { throw new FieldException( Mapping.TupleMapping.KEY ); } if ( familyInMeta.isNull( row[familyIndex] ) ) { throw new FieldException( Mapping.TupleMapping.FAMILY ); } if ( columnInMeta.isNull( row[columnIndex] ) ) { throw new FieldException( Mapping.TupleMapping.COLUMN ); } if ( valueInMeta.isNull( row[valueIndex] ) ) { throw new FieldException( Mapping.TupleMapping.VALUE ); } byte[] encodedKey = bu.encodeKeyValue( row[keyIndex], keyInMeta, keyType ); HBasePut put = hBaseTableWriteOperationManager.createPut( encodedKey ); // Note: Families must always be string with the implementation of HBasePut String columnFamily = familyInMeta.getString( row[familyIndex] ); boolean binaryColName = false; String columnName = columnInMeta.getString( row[columnIndex] ); if ( columnName.startsWith( "@@@binary@@@" ) ) { // assume hex encoded column name columnName = columnName.replace( "@@@binary@@@", "" ); binaryColName = true; } byte[] encodedValue = valueMeta.encodeColumnValue( row[valueIndex], valueInMeta ); put.addColumn( columnFamily, columnName, binaryColName, encodedValue ); if ( visibilityIndex >= 0 && !visibilityInMeta.isNull( row[visibilityIndex] ) ) { byte[] encodedVisibility = visibilityMeta.encodeColumnValue( row[visibilityIndex], visibilityInMeta ); put.addColumn( columnFamily, MappingUtils.TUPLE_MAPPING_VISIBILITY, false, encodedVisibility ); } put.setWriteToWAL( writeToWAL ); return put; } public static class FieldException extends Exception { public Mapping.TupleMapping field; public FieldException( Mapping.TupleMapping field ) { super(); this.field = field; } public String getFieldString() { return field.toString(); } } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/output/Messages.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.output; import org.pentaho.di.i18n.BaseMessages; public class Messages { public static final Class PKG = Messages.class; public static String getString( String key ) { return BaseMessages.getString( PKG, key ); } public static String getString( String key, String param1 ) { return BaseMessages.getString( PKG, key, param1 ); } public static String getString( String key, String param1, String param2 ) { return BaseMessages.getString( PKG, key, param1, param2 ); } public static String getString( String key, String param1, String param2, String param3 ) { return BaseMessages.getString( PKG, key, param1, param2, param3 ); } public static String getString( String key, String param1, String param2, String param3, String param4 ) { return BaseMessages.getString( PKG, key, param1, param2, param3, param4 ); } public static String getString( String key, String param1, String param2, String param3, String param4, String param5 ) { return BaseMessages.getString( PKG, key, param1, param2, param3, param4, param5 ); } public static String getString( String key, String param1, String param2, String param3, String param4, String param5, String param6 ) { return BaseMessages.getString( PKG, key, param1, param2, param3, param4, param5, param6 ); } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/rowdecoder/HBaseRowDecoder.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.rowdecoder; import java.lang.reflect.InvocationTargetException; import java.util.List; import org.apache.commons.lang.StringUtils; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.big.data.kettle.plugins.hbase.mapping.HBaseRowToKettleTuple; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Step for decoding incoming HBase row objects using a supplied mapping. Can be used in a Hadoop MR job for processing * tables split by org.pentaho.hbase.mapred.PentahoTableInputFormat (see the javadoc for this class for properties that * can be set in the job to control the query) * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ public class HBaseRowDecoder extends BaseStep implements StepInterface { public static final String HBASE_ROW_DECODER_ERROR_NOT_RESULT = "HBaseRowDecoder.Error.NotResult"; public static final String HBASE_ROW_DECODER_ERROR_NOT_IMMUTABLE_BYTES_WRITABLE = "HBaseRowDecoder.Error.NotImmutableBytesWritable"; private static Class hBaseRowDecoderMetaClass = HBaseRowDecoderMeta.class; private final NamedClusterServiceLocator namedClusterServiceLocator; protected HBaseRowDecoderMeta hBaseRowDecoderMeta; protected HBaseRowDecoderData hBaseRowDecoderData; private HBaseService hBaseService; public HBaseRowDecoder( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans, NamedClusterServiceLocator namedClusterServiceLocator ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); this.namedClusterServiceLocator = namedClusterServiceLocator; } /** * The mapping information to use in order to decode HBase column values */ protected Mapping mTableMapping; /** * Information from the mapping */ protected HBaseValueMetaInterface[] mOutputColumns; /** * Index of incoming key value */ protected int mKeyInIndex = -1; /** * Index of incoming HBase row (Result object) */ protected int mResultInIndex = -1; /** * Used when decoding columns to tuples */ protected HBaseRowToKettleTuple mTupleHandler; /** * Bytes util */ protected ByteConversionUtil mBytesUtil; @Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { Object[] inputRow = getRow(); if ( inputRow == null ) { setOutputDone(); return false; } if ( first ) { first = false; hBaseRowDecoderMeta = (HBaseRowDecoderMeta) smi; hBaseRowDecoderData = (HBaseRowDecoderData) sdi; try { hBaseService = namedClusterServiceLocator.getService( hBaseRowDecoderMeta.getNamedCluster(), HBaseService.class ); mBytesUtil = hBaseService.getByteConversionUtil(); // no configuration needed here because we don't need access to the // actual database, just a few utility routines from HBaseShim for // decoding row objects handed to us by the table input format } catch ( Exception ex ) { throw new KettleException( ex.getMessage(), ex ); } mTableMapping = hBaseRowDecoderMeta.getMapping(); if ( mTableMapping == null || StringUtils.isEmpty( mTableMapping.getKeyName() ) ) { throw new KettleException( BaseMessages.getString( hBaseRowDecoderMetaClass, "HBaseRowDecoder.Error.NoMappingInfo" ) ); } if ( mTableMapping.isTupleMapping() ) { mTupleHandler = new HBaseRowToKettleTuple( mBytesUtil ); } mOutputColumns = new HBaseValueMetaInterface[ mTableMapping.getMappedColumns().keySet().size() ]; int k = 0; for ( String alias : mTableMapping.getMappedColumns().keySet() ) { mOutputColumns[ k++ ] = mTableMapping.getMappedColumns().get( alias ); } hBaseRowDecoderData.setOutputRowMeta( getInputRowMeta().clone() ); hBaseRowDecoderMeta.getFields( getTransMeta().getBowl(), hBaseRowDecoderData.getOutputRowMeta(), getStepname(), null, null, this ); // check types first RowMetaInterface inputMeta = getInputRowMeta(); String inKey = environmentSubstitute( hBaseRowDecoderMeta.getIncomingKeyField() ); mKeyInIndex = inputMeta.indexOfValue( inKey ); if ( mKeyInIndex == -1 ) { throw new KettleException( BaseMessages.getString( hBaseRowDecoderMetaClass, "HBaseRowDecoder.Error.UnableToFindHBaseKey", inKey ) ); } try { inputRow[ mKeyInIndex ] = mBytesUtil.convertToImmutableBytesWritable( inputRow[ mKeyInIndex ] ); } catch ( InvocationTargetException | IllegalAccessException | NoSuchMethodException e ) { throw new KettleException( BaseMessages.getString( hBaseRowDecoderMetaClass, HBASE_ROW_DECODER_ERROR_NOT_IMMUTABLE_BYTES_WRITABLE, hBaseRowDecoderMeta.getIncomingKeyField() ) ); } if ( !mBytesUtil.isImmutableBytesWritable( inputRow[ mKeyInIndex ] ) ) { throw new KettleException( BaseMessages.getString( hBaseRowDecoderMetaClass, HBASE_ROW_DECODER_ERROR_NOT_IMMUTABLE_BYTES_WRITABLE, hBaseRowDecoderMeta.getIncomingKeyField() ) ); } String inResult = environmentSubstitute( hBaseRowDecoderMeta.getIncomingResultField() ); mResultInIndex = inputMeta.indexOfValue( inResult ); if ( mResultInIndex == -1 ) { throw new KettleException( BaseMessages.getString( hBaseRowDecoderMetaClass, "HBaseRowDecoder.Error.UnableToFindHBaseRow", inResult ) ); } } try { inputRow[ mKeyInIndex ] = mBytesUtil.convertToImmutableBytesWritable( inputRow[ mKeyInIndex ] ); } catch ( InvocationTargetException | IllegalAccessException | NoSuchMethodException e ) { throw new KettleException( BaseMessages.getString( hBaseRowDecoderMetaClass, HBASE_ROW_DECODER_ERROR_NOT_IMMUTABLE_BYTES_WRITABLE, hBaseRowDecoderMeta.getIncomingKeyField() ) ); } Object hRow = inputRow[ mResultInIndex ]; if ( inputRow[ mKeyInIndex ] != null && hRow != null ) { if ( mTableMapping.isTupleMapping() ) { List hrowToKettleRow = mTupleHandler.hbaseRowToKettleTupleMode( hBaseService.getHBaseValueMetaInterfaceFactory(), hRow, mTableMapping, mTableMapping .getMappedColumns(), hBaseRowDecoderData.getOutputRowMeta() ); for ( Object[] tuple : hrowToKettleRow ) { putRow( hBaseRowDecoderData.getOutputRowMeta(), tuple ); } } else { Object[] outputRowData = RowDataUtil.allocateRowData( mOutputColumns.length + 1 ); // + 1 for key byte[] rowKey = null; try { rowKey = (byte[]) hRow.getClass().getMethod( "getRow" ).invoke( hRow ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( hBaseRowDecoderMetaClass, "HBaseRowDecoder.Error.UnableToGetRowKey" ), ex ); } Object decodedKey = mTableMapping.decodeKeyValue( rowKey ); outputRowData[ 0 ] = decodedKey; for ( int i = 0; i < mOutputColumns.length; i++ ) { HBaseValueMetaInterface current = mOutputColumns[ i ]; byte[] colFamilyName = current.getColumnFamily().getBytes(); byte[] qualifier = current.getColumnName().getBytes(); byte[] kv = null; try { kv = (byte[]) hRow.getClass().getMethod( "getValue", byte[].class, byte[].class ) .invoke( hRow, colFamilyName, qualifier ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( hBaseRowDecoderMetaClass, "HBaseRowDecoder.Error.UnableToGetColumnValue" ), ex ); } Object decodedVal = current.decodeColumnValue( ( kv == null ) ? null : kv ); outputRowData[ i + 1 ] = decodedVal; } // output the row putRow( hBaseRowDecoderData.getOutputRowMeta(), outputRowData ); } } return true; } @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { if ( super.init( smi, sdi ) ) { HBaseRowDecoderMeta meta = (HBaseRowDecoderMeta) smi; try { meta.applyInjection(); return true; } catch ( KettleException e ) { logError( "Error while injecting properties", e ); } } return false; } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/rowdecoder/HBaseRowDecoderData.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.rowdecoder; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.trans.step.BaseStepData; import org.pentaho.di.trans.step.StepDataInterface; /** * Data class for the HBase row decoder step * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ public class HBaseRowDecoderData extends BaseStepData implements StepDataInterface { /** The output data format */ protected RowMetaInterface m_outputRowMeta; /** * Get the output row format * * @return the output row format */ public RowMetaInterface getOutputRowMeta() { return m_outputRowMeta; } /** * Set the output row format * * @param rmi * the output row format */ public void setOutputRowMeta( RowMetaInterface rmi ) { m_outputRowMeta = rmi; } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/rowdecoder/HBaseRowDecoderDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.rowdecoder; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingEditor; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import java.util.ArrayList; import java.util.List; /** * UI dialog for the HBase row decoder step * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) */ @PluginDialog( id = "HBaseRowDecoder", image = "HBRD.svg", pluginType = PluginDialog.PluginType.JOBENTRY, documentationUrl = "Products/HBase_Row_Decoder" ) public class HBaseRowDecoderDialog extends BaseStepDialog implements StepDialogInterface { private static final Class PKG = HBaseRowDecoderMeta.class; /** various UI bits and pieces for the dialog */ private Label m_stepnameLabel; private Text m_stepnameText; // The tabs of the dialog private CTabFolder m_wTabFolder; private CTabItem m_wConfigTab; private CTabItem m_editorTab; private CCombo m_incomingKeyCombo; private CCombo m_incomingResultCombo; // mapping editor composite private MappingEditor m_mappingEditor; private final HBaseRowDecoderMeta m_currentMeta; private final HBaseRowDecoderMeta m_originalMeta; private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; private final NamedClusterServiceLocator namedClusterServiceLocator; public HBaseRowDecoderDialog( Shell parent, Object in, TransMeta tr, String name, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester, NamedClusterServiceLocator namedClusterServiceLocator ) { super( parent, (BaseStepMeta) in, tr, name ); this.namedClusterService = namedClusterService; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; this.namedClusterServiceLocator = namedClusterServiceLocator; m_currentMeta = (HBaseRowDecoderMeta) in; m_originalMeta = (HBaseRowDecoderMeta) m_currentMeta.clone(); } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX ); props.setLook( shell ); setShellImage( shell, m_currentMeta ); // used to listen to a text field (m_wStepname) ModifyListener lsMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); } }; changed = m_currentMeta.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.Shell.Title" ) ); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line m_stepnameLabel = new Label( shell, SWT.RIGHT ); m_stepnameLabel.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.StepName.Label" ) ); props.setLook( m_stepnameLabel ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( middle, -margin ); fd.top = new FormAttachment( 0, margin ); m_stepnameLabel.setLayoutData( fd ); m_stepnameText = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); m_stepnameText.setText( stepname ); props.setLook( m_stepnameText ); m_stepnameText.addModifyListener( lsMod ); // format the text field fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, 0 ); m_stepnameText.setLayoutData( fd ); m_wTabFolder = new CTabFolder( shell, SWT.BORDER ); props.setLook( m_wTabFolder, Props.WIDGET_STYLE_TAB ); m_wTabFolder.setSimple( false ); // Start of the config tab m_wConfigTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_wConfigTab.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.ConfigTab.TabTitle" ) ); Composite wConfigComp = new Composite( m_wTabFolder, SWT.NONE ); props.setLook( wConfigComp ); FormLayout configLayout = new FormLayout(); configLayout.marginWidth = 3; configLayout.marginHeight = 3; wConfigComp.setLayout( configLayout ); // incoming key field line Label inKeyLab = new Label( wConfigComp, SWT.RIGHT ); inKeyLab.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.KeyField.Label" ) ); props.setLook( inKeyLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( middle, -margin ); inKeyLab.setLayoutData( fd ); m_incomingKeyCombo = new CCombo( wConfigComp, SWT.BORDER ); props.setLook( m_incomingKeyCombo ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( 0, margin ); fd.right = new FormAttachment( 100, 0 ); m_incomingKeyCombo.setLayoutData( fd ); m_incomingKeyCombo.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_incomingKeyCombo.setToolTipText( transMeta.environmentSubstitute( m_incomingKeyCombo.getText() ) ); } } ); // incoming result line Label inResultLab = new Label( wConfigComp, SWT.RIGHT ); inResultLab.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.ResultField.Label" ) ); props.setLook( inResultLab ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_incomingKeyCombo, margin ); fd.right = new FormAttachment( middle, -margin ); inResultLab.setLayoutData( fd ); m_incomingResultCombo = new CCombo( wConfigComp, SWT.BORDER ); props.setLook( m_incomingResultCombo ); fd = new FormData(); fd.left = new FormAttachment( middle, 0 ); fd.top = new FormAttachment( m_incomingKeyCombo, margin ); fd.right = new FormAttachment( 100, 0 ); m_incomingResultCombo.setLayoutData( fd ); m_incomingResultCombo.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { m_currentMeta.setChanged(); m_incomingResultCombo.setToolTipText( transMeta.environmentSubstitute( m_incomingResultCombo.getText() ) ); } } ); populateFieldsCombo(); wConfigComp.layout(); m_wConfigTab.setControl( wConfigComp ); // --- mapping editor tab m_editorTab = new CTabItem( m_wTabFolder, SWT.NONE ); m_editorTab.setText( BaseMessages.getString( PKG, "HBaseRowDecoderDialog.MappingEditorTab.TabTitle" ) ); m_mappingEditor = new MappingEditor( shell, m_wTabFolder, null, null, SWT.FULL_SELECTION | SWT.MULTI, false, props, transMeta, namedClusterService, runtimeTestActionService, runtimeTester, namedClusterServiceLocator ); fd = new FormData(); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( 0, 0 ); fd.bottom = new FormAttachment( 100, -margin * 2 ); fd.right = new FormAttachment( 100, 0 ); m_mappingEditor.setLayoutData( fd ); m_mappingEditor.layout(); m_editorTab.setControl( m_mappingEditor ); fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); fd.top = new FormAttachment( m_stepnameText, margin ); fd.right = new FormAttachment( 100, 0 ); fd.bottom = new FormAttachment( 100, -50 ); m_wTabFolder.setLayoutData( fd ); // Buttons inherited from BaseStepDialog wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) ); setButtonPositions( new Button[] { wOK, wCancel }, margin, m_wTabFolder ); // Add listeners lsCancel = new Listener() { public void handleEvent( Event e ) { cancel(); } }; lsOK = new Listener() { public void handleEvent( Event e ) { ok(); } }; wCancel.addListener( SWT.Selection, lsCancel ); wOK.addListener( SWT.Selection, lsOK ); lsDef = new SelectionAdapter() { @Override public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; m_stepnameText.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { @Override public void shellClosed( ShellEvent e ) { cancel(); } } ); m_wTabFolder.setSelection( 0 ); setSize(); getData(); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } protected void cancel() { stepname = null; m_currentMeta.setChanged( changed ); dispose(); } protected void ok() { if ( Const.isEmpty( m_stepnameText.getText() ) ) { return; } stepname = m_stepnameText.getText(); m_currentMeta.setIncomingKeyField( m_incomingKeyCombo.getText() ); m_currentMeta.setIncomingResultField( m_incomingResultCombo.getText() ); List problems = new ArrayList(); Mapping mapping = m_mappingEditor.getMapping( false, problems, false ); if ( problems.size() > 0 ) { StringBuffer p = new StringBuffer(); for ( String s : problems ) { p.append( s ).append( "\n" ); } MessageDialog md = new MessageDialog( shell, BaseMessages.getString( PKG, "HBaseRowDecoderDialog.Error.IssuesWithMapping.Title" ), null, BaseMessages .getString( PKG, "HBaseRowDecoderDialog.Error.IssuesWithMapping" ) + ":\n\n" + p.toString(), MessageDialog.WARNING, new String[] { BaseMessages.getString( PKG, "HBaseRowDecoderDialog.Error.IssuesWithMapping.ButtonOK" ), BaseMessages.getString( PKG, "HBaseRowDecoderDialog.Error.IssuesWithMapping.ButtonCancel" ) }, 0 ); MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() ); int idx = md.open() & 0xFF; if ( idx == 1 || idx == 255 /* 255 = escape pressed */ ) { return; // Cancel } } if ( mapping != null ) { m_currentMeta.setMapping( mapping ); } NamedCluster selectedNamedCluster = m_mappingEditor.getSelectedNamedCluster(); if ( selectedNamedCluster != null ) { m_currentMeta.setNamedCluster( selectedNamedCluster ); } if ( !m_originalMeta.equals( m_currentMeta ) ) { m_currentMeta.setChanged(); changed = m_currentMeta.hasChanged(); } dispose(); } protected void getData() { if ( !Const.isEmpty( m_currentMeta.getIncomingKeyField() ) ) { m_incomingKeyCombo.setText( m_currentMeta.getIncomingKeyField() ); } if ( !Const.isEmpty( m_currentMeta.getIncomingResultField() ) ) { m_incomingResultCombo.setText( m_currentMeta.getIncomingResultField() ); } m_mappingEditor.setSelectedNamedCluster( m_currentMeta.getNamedCluster().getName() ); if ( m_currentMeta.getMapping() != null ) { m_mappingEditor.setMapping( m_currentMeta.getMapping() ); } } private void populateFieldsCombo() { StepMeta stepMeta = transMeta.findStep( stepname ); String currentKey = m_incomingKeyCombo.getText(); String currentResult = m_incomingResultCombo.getText(); int keyIndex = -1; int valueIndex = -1; if ( stepMeta != null ) { try { RowMetaInterface rowMeta = transMeta.getPrevStepFields( stepMeta ); if ( rowMeta != null && rowMeta.size() > 0 ) { m_incomingKeyCombo.removeAll(); m_incomingResultCombo.removeAll(); for ( int i = 0; i < rowMeta.size(); i++ ) { ValueMetaInterface vm = rowMeta.getValueMeta( i ); String fieldName = vm.getName(); if ( fieldName.equalsIgnoreCase( "key" ) ) { keyIndex = i; } else if ( fieldName.equalsIgnoreCase( "value" ) ) { valueIndex = i; } m_incomingKeyCombo.add( fieldName ); m_incomingResultCombo.add( fieldName ); } if ( !Const.isEmpty( currentKey ) ) { m_incomingKeyCombo.setText( currentKey ); } else if ( keyIndex >= 0 ) { // auto set key field m_incomingKeyCombo.select( keyIndex ); } if ( !Const.isEmpty( currentResult ) ) { m_incomingResultCombo.setText( currentResult ); } else if ( valueIndex >= 0 ) { // auto set value (Result) field m_incomingResultCombo.select( valueIndex ); } } } catch ( KettleException ex ) { if ( log.isError() ) { log.logError( "Error populating fields", ex ); } } } } } ================================================ FILE: kettle-plugins/hbase/core/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/rowdecoder/HBaseRowDecoderMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.rowdecoder; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang.StringUtils; import org.eclipse.swt.widgets.Shell; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition; import org.pentaho.big.data.kettle.plugins.hbase.NamedClusterLoadSaveUtil; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingUtils; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionDeep; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBase; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.metastore.MetaStoreConst; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.runtime.test.action.impl.RuntimeTestActionServiceImpl; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import org.w3c.dom.Node; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import static org.pentaho.di.core.CheckResult.TYPE_RESULT_ERROR; import static org.pentaho.di.core.CheckResult.TYPE_RESULT_OK; import static org.pentaho.di.core.CheckResult.TYPE_RESULT_WARNING; /** * Meta class for the HBase row decoder. * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) * */ @Step( id = "HBaseRowDecoder", image = "HBRD.svg", name = "HBaseRowDecoder.Name", description = "HBaseRowDecoder.Description", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", documentationUrl = "pdi-transformation-steps-reference-overview/hbase-row-decoder-pdi", i18nPackageName = "org.pentaho.di.trans.steps.hbaserowdecoder" ) @InjectionSupported( localizationPrefix = "HBaseRowDecoder.Injection.", groups = { "MAPPING" } ) public class HBaseRowDecoderMeta extends BaseStepMeta implements StepMetaInterface { public static final String INCOMING_KEY_FIELD = "incoming_key_field"; public static final String INCOMING_RESULT_FIELD = "incoming_result_field"; protected NamedCluster namedCluster; /** The incoming field that contains the HBase row key */ @Injection( name = "KEY_FIELD" ) protected String mIncomingKeyField = ""; /** The incoming field that contains the HBase row Result object */ @Injection( name = "HBASE_RESULT_FIELD" ) protected String mIncomingResultField = ""; /** The mapping to use */ protected Mapping mMapping; @InjectionDeep protected MappingDefinition mappingDefinition; private MetastoreLocator metaStoreService; private final NamedClusterServiceLocator namedClusterServiceLocator; private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; private final NamedClusterLoadSaveUtil namedClusterLoadSaveUtil; public HBaseRowDecoderMeta() { this( BigDataServicesHelper.getNamedClusterServiceLocator(), NamedClusterManager.getInstance(), RuntimeTestActionServiceImpl.getInstance(), RuntimeTesterImpl.getInstance() ); } public HBaseRowDecoderMeta( NamedClusterServiceLocator namedClusterServiceLocator, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) { this( namedClusterServiceLocator, namedClusterService, runtimeTestActionService, runtimeTester, null ); } public synchronized MetastoreLocator getMetastoreLocators() { if ( this.metaStoreService == null ) { try { Collection metastoreLocators = PluginServiceLoader.loadServices( MetastoreLocator.class ); this.metaStoreService = metastoreLocators.stream().findFirst().get(); } catch ( Exception e ) { logError( "Error getting MetastoreLocator", e ); } } return this.metaStoreService; } @VisibleForTesting HBaseRowDecoderMeta( NamedClusterServiceLocator namedClusterServiceLocator, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester, MetastoreLocator metaStore ) { this.namedClusterServiceLocator = namedClusterServiceLocator; this.namedClusterService = namedClusterService; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; this.namedClusterLoadSaveUtil = new NamedClusterLoadSaveUtil(); this.metaStoreService = metaStore; } /** * @param namedCluster the namedCluster to set */ public void setNamedCluster( NamedCluster namedCluster ) { this.namedCluster = namedCluster; } /** * @return the namedCluster */ public NamedCluster getNamedCluster() { return namedCluster; } /** * Set the incoming field that holds the HBase row key * * @param inKey * the name of the field that holds the key */ public void setIncomingKeyField( String inKey ) { mIncomingKeyField = inKey; } /** * Get the incoming field that holds the HBase row key * * @return the name of the field that holds the key */ public String getIncomingKeyField() { return mIncomingKeyField; } /** * Set the incoming field that holds the HBase row Result object * * @param inResult * the name of the field that holds the HBase row Result object */ public void setIncomingResultField( String inResult ) { mIncomingResultField = inResult; } /** * Get the incoming field that holds the HBase row Result object * * @return the name of the field that holds the HBase row Result object */ public String getIncomingResultField() { return mIncomingResultField; } /** * Set the mapping to use for decoding the row * * @param m * the mapping to use */ public void setMapping( Mapping m ) { mMapping = m; } /** * Get the mapping to use for decoding the row * * @return the mapping to use */ public Mapping getMapping() { return mMapping; } public MappingDefinition getMappingDefinition() { return mappingDefinition; } public void setMappingDefinition( MappingDefinition mappingDefinition ) { this.mappingDefinition = mappingDefinition; } public void setDefault() { mIncomingKeyField = ""; mIncomingResultField = ""; namedCluster = namedClusterService.getClusterTemplate(); } @Override public void getFields( Bowl bowl, RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space ) throws KettleStepException { rowMeta.clear(); // start afresh - eats the input if ( mMapping != null ) { int kettleType; if ( mMapping.getKeyType() == Mapping.KeyType.DATE || mMapping.getKeyType() == Mapping.KeyType.UNSIGNED_DATE ) { kettleType = ValueMetaInterface.TYPE_DATE; } else if ( mMapping.getKeyType() == Mapping.KeyType.STRING ) { kettleType = ValueMetaInterface.TYPE_STRING; } else if ( mMapping.getKeyType() == Mapping.KeyType.BINARY ) { kettleType = ValueMetaInterface.TYPE_BINARY; } else { kettleType = ValueMetaInterface.TYPE_INTEGER; } ValueMetaInterface keyMeta = new ValueMetaBase( mMapping.getKeyName(), kettleType ); keyMeta.setOrigin( origin ); rowMeta.addValueMeta( keyMeta ); // Add the rest of the fields in the mapping Map mappedColumnsByAlias = mMapping.getMappedColumns(); Set aliasSet = mappedColumnsByAlias.keySet(); for ( String alias : aliasSet ) { HBaseValueMetaInterface columnMeta = mappedColumnsByAlias.get( alias ); columnMeta.setOrigin( origin ); rowMeta.addValueMeta( columnMeta ); } } } public void check( List remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info ) { CheckResult cr; if ( ( prev == null ) || ( prev.size() == 0 ) ) { cr = new CheckResult( TYPE_RESULT_WARNING, "Not receiving any fields from previous steps!", stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( TYPE_RESULT_OK, "Step is connected to previous one, receiving " + prev.size() + " fields", stepMeta ); remarks.add( cr ); } // See if we have input streams leading to this step! if ( input.length > 0 ) { cr = new CheckResult( TYPE_RESULT_OK, "Step is receiving info from other steps.", stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( TYPE_RESULT_ERROR, "No input received from other steps!", stepMeta ); remarks.add( cr ); } } void applyInjection() throws KettleException { if ( namedCluster == null ) { throw new KettleException( "Named cluster was not initialized!" ); } try { HBaseService hBaseService = namedClusterServiceLocator.getService( this.namedCluster, HBaseService.class ); Mapping tempMapping = null; if ( mappingDefinition != null ) { tempMapping = MappingUtils.getMapping( mappingDefinition, hBaseService ); mMapping = tempMapping; } } catch ( ClusterInitializationException e ) { throw new KettleException( e ); } } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { return new HBaseRowDecoder( stepMeta, stepDataInterface, copyNr, transMeta, trans, namedClusterServiceLocator ); } public StepDataInterface getStepData() { return new HBaseRowDecoderData(); } @Override public String getXML() { try { applyInjection(); } catch ( KettleException e ) { log.logError( "Error occurred while injecting metadata. Transformation meta could be incorrect!", e ); } StringBuilder retval = new StringBuilder(); if ( StringUtils.isNotEmpty( mIncomingKeyField ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( INCOMING_KEY_FIELD, mIncomingKeyField ) ); } if ( StringUtils.isNotEmpty( mIncomingResultField ) ) { retval.append( "\n " ).append( XMLHandler.addTagValue( INCOMING_RESULT_FIELD, mIncomingResultField ) ); } namedClusterLoadSaveUtil.getXml( retval, namedClusterService, namedCluster, MetaStoreConst.getDefaultMetastore(), log ); if ( mMapping != null ) { retval.append( mMapping.getXML() ); } return retval.toString(); } public void loadXML( Node stepnode, List databases, IMetaStore metaStore ) throws KettleXMLException { if ( metaStore == null ) { metaStore = getMetastoreLocators().getMetastore(); } mIncomingKeyField = XMLHandler.getTagValue( stepnode, INCOMING_KEY_FIELD ); mIncomingResultField = XMLHandler.getTagValue( stepnode, INCOMING_RESULT_FIELD ); this.namedCluster = namedClusterLoadSaveUtil.loadClusterConfig( namedClusterService, null, repository, metaStore, stepnode, log ); try { HBaseService hbaseService = namedClusterServiceLocator.getService( this.namedCluster, HBaseService.class ); mMapping = ( hbaseService == null ? null : hbaseService.getMappingFactory().createMapping() ); } catch ( ClusterInitializationException e ) { throw new KettleXMLException( e ); } if ( mMapping != null ) { mMapping.loadXML( stepnode ); } } public void readRep( Repository rep, IMetaStore metaStore, ObjectId idStep, List databases ) throws KettleException { mIncomingKeyField = rep.getStepAttributeString( idStep, 0, INCOMING_KEY_FIELD ); mIncomingResultField = rep.getStepAttributeString( idStep, 0, INCOMING_RESULT_FIELD ); this.namedCluster = namedClusterLoadSaveUtil.loadClusterConfig( namedClusterService, idStep, rep, metaStore, null, log ); try { mMapping = namedClusterServiceLocator.getService( this.namedCluster, HBaseService.class ).getMappingFactory() .createMapping(); } catch ( ClusterInitializationException e ) { throw new KettleXMLException( e ); } mMapping.readRep( rep, idStep ); } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId idTransformation, ObjectId idStep ) throws KettleException { if ( StringUtils.isNotEmpty( mIncomingKeyField ) ) { rep.saveStepAttribute( idTransformation, idStep, 0, INCOMING_KEY_FIELD, mIncomingKeyField ); } if ( StringUtils.isNotEmpty( mIncomingResultField ) ) { rep.saveStepAttribute( idTransformation, idStep, 0, INCOMING_RESULT_FIELD, mIncomingResultField ); } namedClusterLoadSaveUtil.saveRep( rep, metaStore, idTransformation, idStep, namedClusterService, namedCluster, log ); if ( mMapping != null ) { mMapping.saveRep( rep, idTransformation, idStep ); } } /** * Get the UI for this step. * * @param shell * a Shell value * @param meta * a StepMetaInterface value * @param transMeta * a TransMeta value * @param name * a String value * @return a StepDialogInterface value */ public StepDialogInterface getDialog( Shell shell, StepMetaInterface meta, TransMeta transMeta, String name ) { return new HBaseRowDecoderDialog( shell, meta, transMeta, name, namedClusterService, runtimeTestActionService, runtimeTester, namedClusterServiceLocator ); } } ================================================ FILE: kettle-plugins/hbase/core/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: kettle-plugins/hbase/core/src/main/resources/org/pentaho/big/data/kettle/plugins/hbase/input/messages/messages_en_US.properties ================================================ HBaseInput.Name=HBase input HBaseInput.Description=Reads data from a HBase table according to a mapping HBaseInputDialog.Shell.Title=HBase input HBaseInputDialog.StepName.Label=Step name HBaseInputDialog.ConfigTab.TabTitle=Configure query HBaseInputDialog.FilterTab.TabTitle=Filter result set HBaseInputDialog.MappingEditorTab.TabTitle=Create/Edit mappings HBaseInputDialog.Zookeeper.Label=Zookeeper host(s) HBaseInputDialog.ZookeeperPort.Label=Zookeeper port HBaseInputDialog.Zookeeper.TipText=Comma separated list of hosts in the zookeeper quorum HBaseInputDialog.CoreConfig.Label=URL to hbase-site.xml HBaseInputDialog.CoreConfig.TipText=URL to hbase-site.xml (leave blank if in classpath) HBaseInputDialog.DefaultConfig.Label=URL to hbase-default.xml HBaseInputDialog.DefaultConfig.TipText=URL to hbase-default.xml (leave blank if in classpath) HBaseInputDialog.TableName.Label=HBase table name HBaseInputDialog.TableName.TipText=The name of the HBase table to read from HBaseInputDialog.TableName.Button=Get mapped table names HBaseInputDialog.FileType.XML=XML config file HBaseInputDialog.MappingName.Label=Mapping name HBaseInputDialog.MappingName.TipText=Mapping to use for the above HBase table HBaseInputDialog.MappingName.Button=Get mappings HBaseInputDialog.MappingName.Button=Get mappings for the specified table HBaseInputDialog.StoreMapping.Label=Store mapping info in step meta data HBaseInputDialog.StoreMapping.TipText=Store the mapping in the step''s meta data, rather than load it from HBase at runtime HBaseInputDialog.NamedCluster.Label=Hadoop Cluster HBaseInputDialog.NamedCluster.TipText=Hadoop cluster to use for setting ZooKeeper host(s) and port HBaseInputDialog.NamedClusterMissingValues.Msg=The selected Hadoop cluster is missing required values. HBaseInputDialog.NamedClusterNotSelected.Msg=You must select a Hadoop cluster to continue. HBaseInputDialog.KeyStart.Label=Start key value (inclusive) for table scan HBaseInputDialog.KeyStart.TipText=Start key value (inclusive) for table scan. Leave this and stop key value blank for a full scan. HBaseInputDialog.KeyStop.Label=Stop key value (exclusive) for table scan HBaseInputDialog.KeyStop.TipText=Stop key value (exclusive) for table scan. Leave this and start key value blank for a full scan. HBaseInputDialog.ScannerCache.Label=Scanner row cache size HBaseInputDialog.ScannerCache.TipText=Number of rows for caching. More rows = faster scans, but higher memory consumption (leave empty for default). HBaseInputDialog.IncludeKey.Label=Include the key as a column HBaseInputDialog.ErrorMessage.UnableToConnect=Problem connecting to HBase HBaseInputDialog.ErrorMessage.UnableToGetMapping=Unable to retrieve mapping information HBaseInputDialog.ErrorMessage.FailedClosingHBaseConnection=Unable to close HBase connection HBaseInputDialog.Fields.FIELD_ALIAS=Alias HBaseInputDialog.Fields.FIELD_KEY=Key HBaseInputDialog.Fields.FIELD_FAMILY=Column family HBaseInputDialog.Fields.FIELD_NAME=Column name HBaseInputDialog.Fields.FIELD_TYPE=Type HBaseInputDialog.Fields.FIELD_FORMAT=Format HBaseInputDialog.Fields.FIELD_INDEXED=Indexed values HBaseInputDialog.Fields.FIELD_LENGTH=Length HBaseInputDialog.Fields.FIELD_PRECISION=Precision HBaseInputDialog.Fields.FIELD_CURRENCY=Currency HBaseInputDialog.Fields.FIELD_DECIMAL=Decimal HBaseInputDialog.Fields.FIELD_GROUP=Group HBaseInputDialog.Fields.FIELD_TRIM_TYPE=Trim type HBaseInputDialog.Filters.RADIO_ALL=Match all HBaseInputDialog.Filters.RADIO_ANY=Match any HBaseInputDialog.Filters.FIELD_ALIAS=Alias HBaseInputDialog.Filters.FIELD_FAMILY=Column family HBaseInputDialog.Filters.FIELD_NAME=Column name HBaseInputDialog.Filters.FIELD_TYPE=Type HBaseInputDialog.Filters.FIELD_OPERATOR=Operator HBaseInputDialog.Filters.FIELD_COMPARISON=Comparison value HBaseInputDialog.Filters.FIELD_FORMAT=Format HBaseInputDialog.Filters.FIELD_SIGNED=Signed comparison MappingDialog.TableName.Label=HBase table name MappingDialog.TableName.GetTableNames=Get table names MappingDialog.MappingName.Label=Mapping name MappingDialog.SaveMapping=Save mapping MappingDialog.SaveMapping.TipText=Persist the mapping in HBase MappingDialog.DeleteMapping=Delete mapping MappingDialog.GetIncomingFields=Get incoming fields MappingDialog.KeyValueTemplate=Create a tuple template MappingDialog.KeyValueTemplate.TipText=Creates a template mapping for outputting rows MappingDialog.NamedCluster.Label=Hadoop Cluster MappingDialog.Error.Title.MissingTableMappingName=Missing table/mapping name MappingDialog.Error.Message.MissingTableMappingName=You must specify a table and mapping name MappingDialog.Error.Title.NoFieldsDefined=No fields defined MappingDialog.Error.Message.NoFieldsDefined=No fields have been defined for this mapping MappingDialog.Error.Title.NoAliasForKey=Missing alias for key MappingDialog.Error.Message.NoAliasForKey=The key must have an alias defined MappingDialog.Error.Title.NoTypeForKey=Missing type for key MappingDialog.Error.Message.NoTypeForKey=Missing type for key MappingDialog.Error.Title.MoreThanOneKey=More than one key MappingDialog.Error.Message.MoreThanOneKey=More than one key is defined in the list of fields MappingDialog.Error.Title.DuplicateColumn=Duplicate column MappingDialog.Error.Message1.DuplicateColumn=Column " MappingDialog.Error.Message2.DuplicateColumn=" has already been defined for this mapping MappingDialog.Error.Title.NoKeyDefined=No key defined MappingDialog.Error.Message.NoKeyDefined=No key has been defined for this mapping MappingDialog.Error.Title.UnableToConnect=Unable to connect to HBase MappingDialog.Error.Message.UnableToConnect=Unable to connect to HBase MappingDialog.Error.Title.IssuesPreventingSaving=Error(s) in field definitions MappingDialog.Error.Message.IssuesPreventingSaving=The following issues prevent this mapping from being created MappingDialog.Error.Message.FamilyIssue=These fields do not have a column family defined MappingDialog.Error.Message.ColumnIssue=These fields do not have a column name defined MappingDialog.Error.Message.TypeIssue=These fields do not have type information specified MappingDialog.Error.Title.ErrorCreatingTable=Problem creating table MappingDialog.Error.Message.ErrorCreatingTable=A problem occurred while trying to create table HBaseInputDialog.Error.IssuesWithMapping.Title=Problems with mapping HBaseInputDialog.Error.IssuesWithMapping=There are some problems with the mapping that need rectification HBaseInputDialog.Error.IssuesWithMapping.ButtonOK=OK and close HBaseInputDialog.Error.IssuesWithMapping.ButtonCancel=Cancel and rectify MappingDialog.Info.Title.MappingExists=Mapping exists MappingDialog.Info.Message1.MappingExists=A mapping called " MappingDialog.Info.Message2.MappingExists=" already exists for table " MappingDialog.Info.Message3.MappingExists=". Overwrite? MappingDialog.Info.Title.MappingSaved=Mapping saved MappingDialog.Info.Message1.MappingSaved=Mapping " MappingDialog.Info.Message2.MappingSaved=" on table " MappingDialog.Info.Message3.MappingSaved=" saved successfully. MappingDialog.Info.Title.ConfirmDelete=OK to delete? MappingDialog.Info.Message.ConfirmDelete=Delete mapping "{0}" on table "{1}"? MappingDialog.Info.Title.MappingDeleted=Mapping deleted MappingDialog.Info.Message.MappingDeleted=Mapping "{0}" on table "{1}" deleted successfully. MappingDialog.Error.Title.ErrorSaving=Error during save MappingDialog.Error.Message.ErrorSaving=An error occurred while trying to save the mapping MappingDialog.Error.Title.ErrorLoadingMapping=Error during load MappingDialog.Error.Message.ErrorLoadingMapping=An error occurred while trying to load the mapping definition MappingDialog.Error.Message.CantConnectNoConnectionDetailsProvided=Can't connect to HBase as no connection details have been provided MappingDialog.GetFieldsChoice.Title=Question MappingDialog.GetFieldsChoice.Message=Data has already been entered - {0} fields were found.\nHow do you want to add the {1} incoming fields? MappingDialog.Error.Title.DeleteMapping=An error occurred MappingDialog.Error.Message.DeleteMapping=Mapping "{0}" for table "{1}" does not seem to exist! MappingDialog.Error.Message.DeleteMappingIO=An IO error occurred while trying to delete\nmapping "{0}" on table "{1}"\n{2} MappingDialog.AddNew=Add new MappingOutputDialog.Add=Add all MappingOutputDialog.ClearAndAdd=Clear and add MappingOutputDialog.Cancel=Cancel HBaseInput.TableName.Missing=HBase table name is required. HBaseInput.ClosingConnection=Closing connection... HBaseInput.Message.SettingScannerCaching=Set scanner caching to {0} rows. HBaseInput.Error.NoMappingName=Reading mapping from HBase, but no mapping name has been supplied! HBaseInput.Error.UnableToObtainConnection=Unable to obtain a connection to HBase HBaseInput.Error.UnableToCreateAMappingAdminConnection=Unable to create a MappingAdmin connection HBaseInput.Error.SourceTableDoesNotExist=Source table "{0}" does not exist! HBaseInput.Error.SourceTableIsNotAvailable=Source table "{0}" is not available! HBaseInput.Error.AvailabilityReadinessProblem=A problem occurred when trying to check availability/readiness of source table "{0}" HBaseInput.Error.UnableToFindUserSelectedColumn=Unable to find user-selected column "{0}" in the mapping "{1}" HBaseInput.Error.UnableToParseLowerBoundKeyValue=Unable to parse lower bound key value "{0}" HBaseInput.Error.UnableToParseUpperBoundKeyValue=Unable to parse upper bound key value "{0}" HBaseInput.Error.ColumnFilterIsNotInTheMapping=Column filter "{0}" is not in the mapping! HBaseInput.Error.FieldTypeMismatch=Type ({0}) of column filter for "{1}" does not match type specified for this field in the mapping ({2}) HBaseInput.Error.ProblemClosingConnection=Problem closing connection to HBase table "{0}" HBaseInput.Error.ProblemClosingConnection1=A problem occurred while closing connection to HBase: {0} HBaseInput.Error.UnableToLookupQualifier=Unable to lookup qualifier/column "{0}" HBaseInput.Error.ColumnNotDefinedInOutput=HBase column "{0}" doesn't seem to be defined in the output HBaseInput.Error.UnableToParseZookeeperPort=Unable to parse zookeeper port - using default HBaseInput.Error.UnableToRetrieveMapping=Unable to retrieve mapping "{0}" on table "{1}" HBaseInput.Error.UnableToSetSourceTableForScan=Unable to set source table for scan HBaseInput.Error.UnableToConfigureSourceTableScan=Unable to configure a new souce table scan HBaseInput.Error.UnableToAddColumnToScan=Unable to add a column definition to the current scan HBaseInput.Error.UnableToAddColumnFilterToScan=Unable to add column filter to the current scan HBaseInput.Error.UnableToExecuteSourceTableScan=Unable to execute source table scan HBaseInput.Error.FiltersNotApplicableWithTupleMapping=WARNING: server-side column value filtering is not applicable when using a tuple mapping - ignoring filters... HBaseInput.Error.ServiceStatus=Cannot communicate with HBaseService\nSaving the transformation may lose data.\nPlease correct the communication issue before working with this transformation\n Dialog.Error=Error HBaseInput.Injection.HBASE_SITE_XML_URL=The address of the hbase-site.xml file. HBaseInput.Injection.HBASE_DEFAULT_XML_URL=The address of the hbase-default.xml file. HBaseInput.Injection.SOURCE_TABLE_NAME=The name of the HBase table to read from. HBaseInput.Injection.SOURCE_MAPPING_NAME=The name of the HBase table map to use. HBaseInput.Injection.START_KEY_VALUE=The start key value for range scans. HBaseInput.Injection.STOP_KEY_VALUE=The stop key value for range scans. HBaseInput.Injection.SCANNER_ROW_CACHE_SIZE=The number of rows that are cached each time an HBase fetch request is made. HBaseInput.Injection.MATCH_ANY_FILTER=Set this flag to output rows if they match any filter or all filters. HBaseInput.Injection.OUTPUT_FIELDS=Fields HBaseInput.Injection.OUTPUT_FIELD_KEY=This option indicates if the column is the key for the table. HBaseInput.Injection.OUTPUT_FIELD_ALIAS=The name that the field will be given in the output stream. HBaseInput.Injection.OUTPUT_FIELD_COLUMN_NAME=The name of the column in the HBase table. HBaseInput.Injection.OUTPUT_FIELD_FAMILY=The family of the column in the HBase table. HBaseInput.Injection.OUTPUT_FIELD_TYPE=This option will let you specify the type of field (string, date, number). HBaseInput.Injection.OUTPUT_FIELD_FORMAT=The numeric and date format to apply to the output field. HBaseInput.Injection.MAPPING=Mappings HBaseInput.Injection.TABLE_NAME=The name of the HBase table. HBaseInput.Injection.MAPPING_NAME=The name of the map to use for the HBase table. HBaseInput.Injection.MAPPING_ALIAS=The name to assign to the HBase table key. HBaseInput.Injection.MAPPING_KEY=This option indicates if the column is the key for the table. HBaseInput.Injection.MAPPING_COLUMN_FAMILY=The family of the column in the HBase table. HBaseInput.Injection.MAPPING_COLUMN_NAME=The name of the column in the HBase table. HBaseInput.Injection.MAPPING_TYPE=The data type of the column. HBaseInput.Injection.MAPPING_INDEXED_VALUES=Optional comma-separated set of legal values if the column is a String type. HBaseInput.Injection.FILTER=Filters HBaseInput.Injection.ALIAS=The name of the field. HBaseInput.Injection.FIELD_TYPE=This option will let you specify the type of field (string, date, number). HBaseInput.Injection.COMPARISON_TYPE=The type of comparison to perform. HBaseInput.Injection.SIGNED_COMPARISON=This option controls if HBase''s native comparisons should be used. HBaseInput.Injection.COMPARISON_VALUE=The value used for filtering data. HBaseInput.Injection.FORMAT=The numeric and date format to apply to the field. ================================================ FILE: kettle-plugins/hbase/core/src/main/resources/org/pentaho/big/data/kettle/plugins/hbase/mapping/messages/messages_en_US.properties ================================================ MappingDialog.Error.Message.NamedClusterNotSelected.Msg=You must select a named cluster to continue MappingDialog.Error.Title.NamedClusterNotSelected=No named cluster selected ================================================ FILE: kettle-plugins/hbase/core/src/main/resources/org/pentaho/big/data/kettle/plugins/hbase/output/messages/messages_en_US.properties ================================================ HBaseOutput.Name=HBase output HBaseOutput.Description=Writes data to an HBase table according to a mapping HBaseOutputDialog.Shell.Title=HBase output HBaseOutputDialog.StepName.Label=Step name HBaseOutputDialog.ConfigTab.TabTitle=Configure connection HBaseOutputDialog.MappingEditorTab.TabTitle=Create/Edit mappings HBaseOutputDialog.Zookeeper.Label=Zookeeper host(s) HBaseOutputDialog.ZookeeperPort.Label=Zookeeper port HBaseOutputDialog.Zookeeper.TipText=Comma separated list of hosts in the zookeeper quorum HBaseOutputDialog.CoreConfig.Label=URL to hbase-site.xml HBaseOutputDialog.CoreConfig.TipText=URL to hbase-site.xml (leave blank if in classpath) HBaseOutputDialog.DefaultConfig.Label=URL to hbase-default.xml HBaseOutputDialog.DefaultConfig.TipText=URL to hbase-default.xml (leave blank if in classpath) HBaseOutputDialog.TableName.Label=HBase table name HBaseOutputDialog.TableName.TipText=The name of the HBase table to write to HBaseOutputDialog.TableName.Button=Get table names HBaseOutputDialog.FileType.XML=XML config file HBaseOutputDialog.NamedCluster.Label=Hadoop cluster HBaseOutputDialog.NamedCluster.TipText=Hadoop cluster to use for setting ZooKeeper host(s) and port HBaseOutputDialog.NamedClusterMissingValues.Msg=The selected Hadoop cluster is missing required values. HBaseOutputDialog.NamedClusterNotSelected.Msg=You must select a Hadoop cluster to continue. HBaseOutputDialog.MappingName.Label=Mapping name HBaseOutputDialog.MappingName.TipText=Mapping to use for the above HBase table HBaseOutputDialog.MappingName.Button=Get mappings HBaseOutputDialog.DeleteRowKey.Label=Delete rows by mapping key HBaseOutputDialog.DeleteRowKey.TipText=Deletes data from the HBase table based on the row key defined in the mapping HBaseOutputDialog.StoreMapping.Label=Store mapping info in step meta data HBaseOutputDialog.StoreMapping.TipText=Store the mapping in the step''s meta data, rather than load it from HBase at runtime HBaseOutputDialog.DisableWAL.Label=Disable write to WAL HBaseOutputDialog.DisableWAL.TipText=Speeds up loading at the expense of error-recovery HBaseOutputDialog.WriteBufferSize.Label=Size of write buffer (bytes) HBaseOutputDialog.WriteBufferSize.TipText=Larger buffer = faster/greater memory consumption. Leave blank for no buffering. HBaseOutputDialog.ErrorMessage.UnableToConnect=Problem connecting to HBase HBaseOutputDialog.ErrorMessage.UnableToGetMapping=Unable to retrieve mapping information HBaseOutputDialog.ErrorMessage.FailedClosingHBaseConnection=Unable to close HBase connection HBaseOutputDialog.Error.IssuesWithMapping.Title=Problems with mapping HBaseOutputDialog.Error.IssuesWithMapping=There are some problems with the mapping that need rectification HBaseOutputDialog.Error.IssuesWithMapping.ButtonOK=OK and close HBaseOutputDialog.Error.IssuesWithMapping.ButtonCancel=Cancel and rectify MappingDialog.TableName.Label=HBase table name MappingDialog.TableName.GetTableNames=Get table names MappingDialog.MappingName.Label=Mapping name MappingDialog.SaveMapping=Save mapping MappingDialog.SaveMapping.TipText=Persist the mapping in HBase MappingDialog.DeleteMapping=Delete mapping MappingDialog.GetIncomingFields=Get incoming fields MappingDialog.KeyValueTemplate=Create a tuple template MappingDialog.KeyValueTemplate.TipText=Creates a template mapping for outputting rows MappingDialog.Error.Title.MissingTableMappingName=Missing table/mapping name MappingDialog.Error.Message.MissingTableMappingName=You must specify a table and mapping name MappingDialog.Error.Title.NoFieldsDefined=No fields defined MappingDialog.Error.Message.NoFieldsDefined=No fields have been defined for this mapping MappingDialog.Error.Title.MoreThanOneKey=More than one key MappingDialog.Error.Message.MoreThanOneKey=More than one key is defined in the list of fields MappingDialog.Error.Title.DuplicateColumn=Duplicate column MappingDialog.Error.Message1.DuplicateColumn=Column " MappingDialog.Error.Message2.DuplicateColumn=" has already been defined for this mapping MappingDialog.Error.Title.NoKeyDefined=No key defined MappingDialog.Error.Message.NoKeyDefined=No key has been defined for this mapping MappingDialog.Error.Message.CantConnectNoConnectionDetailsProvided=Can't connect to HBase as no connection details have been provided MappingDialog.Error.Title.IssuesPreventingSaving=Error(s) in field definitions MappingDialog.Error.Message.IssuesPreventingSaving=The following issues prevent this mapping from being created MappingDialog.Error.Message.FamilyIssue=These fields do not have a column family defined MappingDialog.Error.Message.ColumnIssue=These fields do not have a column name defined MappingDialog.Error.Message.TypeIssue=These fields do not have type information specified MappingDialog.Error.Title.UnableToConnect=Unable to connect to HBase MappingDialog.Error.Message.UnableToConnect=Unable to connect to HBase MappingDialog.Info.Title.MappingExists=Mapping exists MappingDialog.Info.Message1.MappingExists=A mapping called " MappingDialog.Info.Message2.MappingExists=" already exists for table " MappingDialog.Info.Message3.MappingExists=". Overwrite? MappingDialog.Info.Title.MappingSaved=Mapping saved MappingDialog.Info.Message1.MappingSaved=Mapping " MappingDialog.Info.Message2.MappingSaved=" on table " MappingDialog.Info.Message3.MappingSaved=" saved successfully. MappingDialog.Info.Title.ConfirmDelete=OK to delete? MappingDialog.Info.Message.ConfirmDelete=Delete mapping "{0}" on table "{1}"? MappingDialog.Info.Title.MappingDeleted=Mapping deleted MappingDialog.Info.Message.MappingDeleted=Mapping "{0}" on table "{1}" deleted successfully. MappingDialog.Error.Title.ErrorSaving=Error during save MappingDialog.Error.Message.ErrorSaving=An error occurred while trying to save the mapping MappingDialog.Error.Title.ErrorLoadingMapping=Error during load MappingDialog.Error.Message.ErrorLoadingMapping=An error occurred while trying to load the mapping definition MappingDialog.GetFieldsChoice.Title=Question MappingDialog.GetFieldsChoice.Message=Data has already been entered - {0} fields were found.\nHow do you want to add the {1} incoming fields? MappingDialog.Error.Title.DeleteMapping=An error occurred MappingDialog.Error.Message.DeleteMapping=Mapping "{0}" for table "{1}" does not seem to exist! MappingDialog.Error.Message.DeleteMappingIO=An IO error occurred while trying to delete\nmapping "{0}" on table "{1}"\n{2} MappingDialog.AddNew=Add new MappingOutputDialog.Add=Add all MappingOutputDialog.ClearAndAdd=Clear and add MappingOutputDialog.Cancel=Cancel HBaseOutput.ConnectingToHBase=Connecting to HBase... HBaseOutput.ConnectingToTargetTable=Connecting to target table... HBaseOutput.FlushingWriteBuffer=Flushing write buffer... HBaseOutput.ClosingConnectionToTable=Closing connection to target table HBaseOutput.RetrievingMappingDetails=Retrieving mapping details for target table HBaseOutput.SettingWriteBuffer=Setting the write buffer to {0} bytes HBaseOutput.DisablingWriteToWAL=Disabling write to WAL HBaseOutput.ClosingConnectionToTargetTable=Closing connection to target table HBaseOutput.Error.ProblemFlushingBufferedData=A problem occurred while flushing buffered data: {0} HBaseOutput.Error.ProblemWhenClosingConnection=A problem occurred when closing the connection to the target table: {0} HBaseOutput.Error.UnableToObtainConnection=Unable to obtain a connection to HBase: {0} HBaseOutput.Error.NoTargetTableSpecified=No target table specified! HBaseOutput.Error.TargetTableDoesNotExist=Target table "{0}" does not exist! HBaseOutput.Error.TargetTableIsNotAvailable=Target table "{0}" is not available! HBaseOutput.Error.ProblemWhenCheckingAvailReadiness=A problem occurred when trying to check availability/readiness of target table "{0}": {1} HBaseOutput.Error.ProblemGettingMappingInfo=Problem getting mapping information: {0} HBaseOutput.Error.CantFindIncomingField=Can't find incoming field "{0}" defined in the mapping "{1}" HBaseOutput.Error.TableKeyNotPresentInIncomingFields=The table key "{0}" defined in mapping "{1}" does not seem to be present in the incoming fields HBaseOutput.Error.ProblemConnectingToTargetTable=Problem connecting to target table: {0} HBaseOutput.Error.IncomingRowHasNullKeyValue=Incoming row has null key value! HBaseOutput.Error.ProblemInsertingRowIntoHBase=Problem inserting row into HBase: {0} HBaseOutput.Error.UnableToParseZookeeperPort=Unable to parse zookeeper port - using default HBaseOutput.Error.UnableToSetTargetTable=Unable to set a new target table to write to HBaseOutput.Error.UnableToAddColumnToTargetTablePut=Unable to add a column to the current target table put operation HBaseOutput.Error.ServiceStatus=Cannot communicate with HBaseService\nSaving the transformation may lose data.\nPlease correct the communication issue before working with this transformation\n HBaseOutput.Error.ErrorCreatingDelete=Error creating the HBase delete! HBaseOutput.Error.MissingFieldData=The incoming row tuple has a null value in the "{0}" field! HBaseOutput.Error.ErrorCreatingPut=Error creating the HBase put for tuple row HBaseOutput.Error.NoKeyColumn=No key field was found in the incoming stream HBaseOutput.Error.NoFamilyColumn=No family field was found in the incoming stream HBaseOutput.Error.NoColumnColumn=No column name field was found in the incoming stream HBaseOutput.Error.NoValueColumn=No value field was found in the incoming stream Dialog.Error=Error HBaseOutput.Injection.HBASE_SITE_XML_URL=The address of the hbase-site.xml file. HBaseOutput.Injection.HBASE_DEFAULT_XML_URL=The address of the hbase-default.xml file. HBaseOutput.Injection.TARGET_TABLE_NAME=The name of the HBase table to write. HBaseOutput.Injection.TARGET_MAPPING_NAME=The name of the HBase table map to use. HBaseOutput.Injection.DISABLE_WRITE_TO_WAL=This option will disable writing to the Write Ahead Log (WAL). HBaseOutput.Injection.WRITE_BUFFER_SIZE=Specify the size of the write buffer used to transfer data to HBase. HBaseOutput.Injection.MAPPING=Mappings HBaseOutput.Injection.TABLE_NAME=The name of the HBase table. HBaseOutput.Injection.MAPPING_NAME=The name of the map to use for the HBase table. HBaseOutput.Injection.DELETE_ROW_KEY=Delete the row key (specified in the mapping) from the HBase table. HBaseOutput.Injection.MAPPING_ALIAS=The name to assign to the HBase table key. HBaseOutput.Injection.MAPPING_KEY=This option indicates if the column is the key for the table. HBaseOutput.Injection.MAPPING_COLUMN_FAMILY=The family of the column in the HBase table. HBaseOutput.Injection.MAPPING_COLUMN_NAME=The name of the column in the HBase table. HBaseOutput.Injection.MAPPING_TYPE=The data type of the column. HBaseOutput.Injection.MAPPING_INDEXED_VALUES=Optional comma-separated set of legal values if the column is a String type. ================================================ FILE: kettle-plugins/hbase/core/src/main/resources/org/pentaho/big/data/kettle/plugins/hbase/rowdecoder/messages/messages_en_US.properties ================================================ HBaseRowDecoder.Name=HBase row decoder HBaseRowDecoder.Description=Decodes an incoming key and HBase result object according to a mapping HBaseRowDecoderDialog.Shell.Title=HBase row decoder HBaseRowDecoderDialog.StepName.Label=Step name HBaseRowDecoderDialog.ConfigTab.TabTitle=Configure fields HBaseRowDecoderDialog.MappingEditorTab.TabTitle=Create/Edit mappings HBaseRowDecoderDialog.KeyField.Label=Key field HBaseRowDecoderDialog.ResultField.Label=HBase result field HBaseRowDecoderDialog.Error.IssuesWithMapping.Title=Problems with mapping HBaseRowDecoderDialog.Error.IssuesWithMapping=There are some problems with the mapping that need rectification HBaseRowDecoderDialog.Error.IssuesWithMapping.ButtonOK=OK and close HBaseRowDecoderDialog.Error.IssuesWithMapping.ButtonCancel=Cancel and rectify HBaseRowDecoder.Error.NoMappingInfo=No mapping information defined! HBaseRowDecoder.Error.UnableToFindHBaseKey=Unable to find HBase key field {0} in the incoming stream! HBaseRowDecoder.Error.NotImmutableBytesWritable=HBase key {0} is not ImmutableBytesWritable HBaseRowDecoder.Error.UnableToFindHBaseRow=Unable to find HBase result/row field {0} in the incoming stream! HBaseRowDecoder.Error.NotResult=HBase row {0} is not a Result object! HBaseRowDecoder.Error.UnableToGetRowKey=Unable to get row key from row object HBaseRowDecoder.Error.UnableToGetColumnValue=Unable to get current column value from row object HBaseRowDecoder.Injection.KEY_FIELD=The name of the input key field. HBaseRowDecoder.Injection.HBASE_RESULT_FIELD=The name of the HBase result field. HBaseRowDecoder.Injection.MAPPING=Mappings HBaseRowDecoder.Injection.TABLE_NAME=The name of the HBase table. HBaseRowDecoder.Injection.MAPPING_NAME=The name of the map to use for the HBase table. HBaseRowDecoder.Injection.MAPPING_ALIAS=The name to assign to the HBase table key. HBaseRowDecoder.Injection.MAPPING_KEY=This option indicates if the column is the key for the table. HBaseRowDecoder.Injection.MAPPING_COLUMN_FAMILY=The family of the column in the HBase table. HBaseRowDecoder.Injection.MAPPING_COLUMN_NAME=The name of the column in the HBase table. HBaseRowDecoder.Injection.MAPPING_TYPE=The data type of the column. HBaseRowDecoder.Injection.MAPPING_INDEXED_VALUES=Optional comma-separated set of legal values if the column is a String type. ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/HbaseUtilTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase; import org.junit.Test; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import static org.junit.Assert.*; public class HbaseUtilTest { @Test public void testParseNamespaceFromTableName() { assertEquals( "namespace", HbaseUtil.parseNamespaceFromTableName( "namespace:qualifier" ) ); assertEquals( "namespace", HbaseUtil.parseNamespaceFromTableName( "namespace:qualifier", "other" ) ); assertEquals( "other", HbaseUtil.parseNamespaceFromTableName( "qualifier", "other" ) ); assertEquals( null, HbaseUtil.parseNamespaceFromTableName( "qualifier", null ) ); } @Test public void testParseQualifierFromTableName() { assertEquals( "qualifier", HbaseUtil.parseQualifierFromTableName( "namespace:qualifier" ) ); assertEquals( "qualifier", HbaseUtil.parseQualifierFromTableName( ":qualifier" ) ); assertEquals( "qualifier", HbaseUtil.parseQualifierFromTableName( "qualifier" ) ); assertEquals( "", HbaseUtil.parseQualifierFromTableName( "namespace:" ) ); } @Test public void testExpandTableName() { assertEquals( "default:", HbaseUtil.expandTableName( null ) ); assertEquals( "default:qualifier", HbaseUtil.expandTableName( "qualifier" ) ); assertEquals( "default:qualifier", HbaseUtil.expandTableName( ":qualifier" ) ); assertEquals( "default:qualifier", HbaseUtil.expandTableName( "qualifier" ) ); assertEquals( "namespace:qualifier", HbaseUtil.expandTableName( "namespace","qualifier" ) ); assertEquals( "namespace:qualifier", HbaseUtil.expandTableName( "namespace","other:qualifier" ) ); } @Test(expected = IllegalArgumentException.class) public void testIllegalArgsInExpandTableName() { HbaseUtil.expandTableName( "","" ); } @Test public void expandLegacyTableNameOnLoad() { assertEquals("default:", HbaseUtil.expandLegacyTableNameOnLoad( null ) ); assertEquals( "default:weblogs", HbaseUtil.expandLegacyTableNameOnLoad( "weblogs" ) ); assertEquals( "ns:weblogs", HbaseUtil.expandLegacyTableNameOnLoad( "ns:weblogs" ) ); assertEquals( "ns:${two}", HbaseUtil.expandLegacyTableNameOnLoad( "ns:${two}" ) ); assertEquals( "default:${two}", HbaseUtil.expandLegacyTableNameOnLoad( ":${two}" ) ); assertEquals( "${one}", HbaseUtil.expandLegacyTableNameOnLoad( "${one}" ) ); assertEquals( "%%one%%", HbaseUtil.expandLegacyTableNameOnLoad( "%%one%%" ) ); assertEquals( "${one}:${two}", HbaseUtil.expandLegacyTableNameOnLoad( "${one}:${two}" ) ); assertEquals( "default:", HbaseUtil.expandLegacyTableNameOnLoad( "" ) ); assertEquals( "${one}:two", HbaseUtil.expandLegacyTableNameOnLoad( "${one}:two" ) ); } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/LogInjector.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase; import org.mockito.Mockito; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LoggingBuffer; import java.lang.reflect.Field; import static org.mockito.Mockito.mock; public class LogInjector { public static LoggingBuffer setMockForLoggingBuffer() throws NoSuchFieldException, IllegalAccessException { Field storeReflectionField = KettleLogStore.class.getDeclaredField( "store" ); storeReflectionField.setAccessible( true ); KettleLogStore kettleLogStoreMock = mock( KettleLogStore.class ); storeReflectionField.set( null, kettleLogStoreMock ); Field appenderReflectionField = KettleLogStore.class.getDeclaredField( "appender" ); appenderReflectionField.setAccessible( true ); LoggingBuffer loggingBuffer = Mockito.spy( new LoggingBuffer( 3 ) ); appenderReflectionField.set( kettleLogStoreMock, loggingBuffer ); return loggingBuffer; } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/NamedClusterLoadSaveUtilTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.metastore.api.IMetaStore; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.when; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.never; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; /** * User: Dzmitry Stsiapanau Date: 02/12/2016 Time: 14:10 */ public class NamedClusterLoadSaveUtilTest { public static final String ZOOKEPER_HOST = "someHost"; public static final String ZOOKEEPER_PORT = "2181"; public static final String ZOOKEEPER_HOSTS_KEY = "zookeeper_hosts"; public static final String ZOOKEEPER_PORT_KEY = "zookeeper_port"; private static String xml1 = "<" + ZOOKEEPER_HOSTS_KEY + ">" + ZOOKEPER_HOST + "<" + ZOOKEEPER_PORT_KEY + ">" + ZOOKEEPER_PORT + ""; public static final String SOME_CLUSTER_NAME = "someClusterName"; public static final String CLUSTER_NAME_KEY = "cluster_name"; private static String xml2 = "<" + CLUSTER_NAME_KEY + ">" + SOME_CLUSTER_NAME + "<" + ZOOKEEPER_HOSTS_KEY + ">" + ZOOKEPER_HOST + "<" + ZOOKEEPER_PORT_KEY + ">" + ZOOKEEPER_PORT + ""; // mocks private LogChannelInterface log; private NamedClusterService ncs; private IMetaStore metaStore; private Repository repository; private ObjectId jobId; private ObjectId stepId; private ObjectId transId; private NamedCluster namedCluster; private NamedClusterLoadSaveUtil util; private DocumentBuilder dBuilder; @Before public void setUp() throws Exception { util = new NamedClusterLoadSaveUtil(); dBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); // mocks log = mock( LogChannelInterface.class ); namedCluster = mock( NamedCluster.class ); metaStore = mock( IMetaStore.class ); jobId = mock( ObjectId.class ); stepId = mock( ObjectId.class ); stepId = mock( ObjectId.class ); ncs = mock( NamedClusterService.class ); doReturn( true ).when( ncs ).contains( SOME_CLUSTER_NAME, metaStore ); when( ncs.getClusterTemplate() ).thenReturn( namedCluster ); repository = mock( Repository.class ); doReturn( ZOOKEPER_HOST ).when( repository ).getJobEntryAttributeString( jobId, ZOOKEEPER_HOSTS_KEY ); doReturn( ZOOKEEPER_PORT ).when( repository ).getJobEntryAttributeString( jobId, ZOOKEEPER_PORT_KEY ); } @Test public void testLoadClusterConfigXML_WithoutClusterName() throws Exception { util.loadClusterConfig( ncs, jobId, repository, metaStore, XMLHandler.loadXMLString( dBuilder, xml1 ).getDocumentElement(), log ); verify( ncs ).getClusterTemplate(); verify( namedCluster ).setZooKeeperHost( ZOOKEPER_HOST ); verify( namedCluster ).setZooKeeperPort( ZOOKEEPER_PORT ); } @Test public void testLoadClusterConfigXML_WithClusterName() throws Exception { util.loadClusterConfig( ncs, jobId, repository, metaStore, XMLHandler.loadXMLString( dBuilder, xml2 ).getDocumentElement(), log ); verify( ncs ).getNamedClusterByName( SOME_CLUSTER_NAME, metaStore ); verify( namedCluster ).setZooKeeperHost( ZOOKEPER_HOST ); verify( namedCluster ).setZooKeeperPort( ZOOKEEPER_PORT ); } @Test public void testLoadClusterConfigRepo_WithoutClusterName() throws Exception { doReturn( null ).when( repository ).getJobEntryAttributeString( jobId, CLUSTER_NAME_KEY ); util.loadClusterConfig( ncs, jobId, repository, metaStore, null, mock( LogChannelInterface.class ) ); verify( ncs ).getClusterTemplate(); verify( namedCluster ).setZooKeeperHost( ZOOKEPER_HOST ); verify( namedCluster ).setZooKeeperPort( ZOOKEEPER_PORT ); } @Test public void testLoadClusterConfigRepo_WithClusterName() throws Exception { doReturn( SOME_CLUSTER_NAME ).when( repository ).getJobEntryAttributeString( jobId, CLUSTER_NAME_KEY ); util.loadClusterConfig( ncs, jobId, repository, metaStore, null, mock( LogChannelInterface.class ) ); verify( ncs ).getNamedClusterByName( SOME_CLUSTER_NAME, metaStore ); verify( namedCluster ).setZooKeeperHost( ZOOKEPER_HOST ); verify( namedCluster ).setZooKeeperPort( ZOOKEEPER_PORT ); } @Test public void testGetXml_WithoutClusterName() throws Exception { when( namedCluster.getZooKeeperHost() ).thenReturn( ZOOKEPER_HOST ); when( namedCluster.getZooKeeperPort() ).thenReturn( ZOOKEEPER_PORT ); StringBuilder retval = new StringBuilder(); util.getXml( retval, ncs, namedCluster, metaStore, log ); assertTrue( retval.toString().contains( ZOOKEEPER_PORT ) ); assertTrue( retval.toString().contains( ZOOKEPER_HOST ) ); } @Test public void testGetXml_WithClusterName() throws Exception { when( namedCluster.getName() ).thenReturn( SOME_CLUSTER_NAME ); when( namedCluster.getZooKeeperHost() ).thenReturn( ZOOKEPER_HOST ); when( namedCluster.getZooKeeperPort() ).thenReturn( ZOOKEEPER_PORT ); StringBuilder retval = new StringBuilder(); util.getXml( retval, ncs, namedCluster, metaStore, log ); assertTrue( retval.toString().contains( ZOOKEEPER_PORT ) ); assertTrue( retval.toString().contains( ZOOKEPER_HOST ) ); assertTrue( retval.toString().contains( SOME_CLUSTER_NAME ) ); } @Test public void testGetXml_WithoutZooKeeper() throws Exception { when( namedCluster.getName() ).thenReturn( SOME_CLUSTER_NAME ); StringBuilder retval = new StringBuilder(); util.getXml( retval, ncs, namedCluster, metaStore, log ); assertFalse( retval.toString().contains( ZOOKEEPER_PORT ) ); assertFalse( retval.toString().contains( ZOOKEPER_HOST ) ); assertTrue( retval.toString().contains( SOME_CLUSTER_NAME ) ); } @Test public void testGetXml_readFromMetastore() throws Exception { when( namedCluster.getName() ).thenReturn( SOME_CLUSTER_NAME ); when( namedCluster.getZooKeeperHost() ).thenReturn( ZOOKEPER_HOST ); when( namedCluster.getZooKeeperPort() ).thenReturn( ZOOKEEPER_PORT ); when( ncs.read( SOME_CLUSTER_NAME, metaStore ) ).thenReturn( namedCluster ); StringBuilder retval = new StringBuilder(); util.getXml( retval, ncs, namedCluster, metaStore, log ); verify( ncs ).read( SOME_CLUSTER_NAME, metaStore ); assertTrue( retval.toString().contains( ZOOKEEPER_PORT ) ); assertTrue( retval.toString().contains( ZOOKEPER_HOST ) ); assertTrue( retval.toString().contains( SOME_CLUSTER_NAME ) ); } @Test public void testSaveRep_WithoutClusterName() throws Exception { when( namedCluster.getZooKeeperHost() ).thenReturn( ZOOKEPER_HOST ); when( namedCluster.getZooKeeperPort() ).thenReturn( ZOOKEEPER_PORT ); util.saveRep( repository, metaStore, transId, stepId, ncs, namedCluster, log ); verify( repository ).saveStepAttribute( eq( transId ), eq( stepId ), anyInt(), eq( ZOOKEEPER_HOSTS_KEY ), eq( ZOOKEPER_HOST ) ); verify( repository ).saveStepAttribute( eq( transId ), eq( stepId ), anyInt(), eq( ZOOKEEPER_PORT_KEY ), eq( ZOOKEEPER_PORT ) ); } @Test public void testSaveRep_WithClusterName() throws Exception { when( namedCluster.getName() ).thenReturn( SOME_CLUSTER_NAME ); when( namedCluster.getZooKeeperHost() ).thenReturn( ZOOKEPER_HOST ); when( namedCluster.getZooKeeperPort() ).thenReturn( ZOOKEEPER_PORT ); when( ncs.read( SOME_CLUSTER_NAME, metaStore ) ).thenReturn( namedCluster ); util.saveRep( repository, metaStore, transId, stepId, ncs, namedCluster, log ); verify( repository ).saveStepAttribute( eq( transId ), eq( stepId ), anyInt(), eq( ZOOKEEPER_HOSTS_KEY ), eq( ZOOKEPER_HOST ) ); verify( repository ).saveStepAttribute( eq( transId ), eq( stepId ), anyInt(), eq( ZOOKEEPER_PORT_KEY ), eq( ZOOKEEPER_PORT ) ); verify( repository ).saveStepAttribute( eq( transId ), eq( stepId ), eq( CLUSTER_NAME_KEY ), eq( SOME_CLUSTER_NAME ) ); } @Test public void testSaveRep_WithoutZooKeeper() throws Exception { when( namedCluster.getName() ).thenReturn( SOME_CLUSTER_NAME ); when( ncs.read( SOME_CLUSTER_NAME, metaStore ) ).thenReturn( namedCluster ); util.saveRep( repository, metaStore, transId, stepId, ncs, namedCluster, log ); verify( repository ).saveStepAttribute( eq( transId ), eq( stepId ), eq( CLUSTER_NAME_KEY ), eq( SOME_CLUSTER_NAME ) ); verify( repository, never() ).saveStepAttribute( eq( transId ), eq( stepId ), anyInt(), eq( ZOOKEEPER_HOSTS_KEY ), eq( ZOOKEPER_HOST ) ); verify( repository, never() ).saveStepAttribute( eq( transId ), eq( stepId ), anyInt(), eq( ZOOKEEPER_PORT_KEY ), eq( ZOOKEEPER_PORT ) ); } @Test public void testSaveRep_readFromMetastore() throws Exception { when( namedCluster.getName() ).thenReturn( SOME_CLUSTER_NAME ); when( namedCluster.getZooKeeperHost() ).thenReturn( ZOOKEPER_HOST ); when( namedCluster.getZooKeeperPort() ).thenReturn( ZOOKEEPER_PORT ); when( ncs.read( SOME_CLUSTER_NAME, metaStore ) ).thenReturn( namedCluster ); util.saveRep( repository, metaStore, transId, stepId, ncs, namedCluster, log ); verify( repository ).saveStepAttribute( eq( transId ), eq( stepId ), anyInt(), eq( ZOOKEEPER_HOSTS_KEY ), eq( ZOOKEPER_HOST ) ); verify( repository ).saveStepAttribute( eq( transId ), eq( stepId ), anyInt(), eq( ZOOKEEPER_PORT_KEY ), eq( ZOOKEEPER_PORT ) ); verify( repository ).saveStepAttribute( eq( transId ), eq( stepId ), eq( CLUSTER_NAME_KEY ), eq( SOME_CLUSTER_NAME ) ); } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/input/HBaseInputMetaInjectionTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.input; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.di.core.injection.BaseMetadataInjectionTest; import org.pentaho.di.core.osgi.api.MetastoreLocatorOsgi; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; public class HBaseInputMetaInjectionTest extends BaseMetadataInjectionTest { @Before public void setup() { NamedClusterService namedClusterService = Mockito.mock( NamedClusterService.class ); NamedClusterServiceLocator namedClusterServiceLocator = Mockito.mock( NamedClusterServiceLocator.class ); RuntimeTestActionService runtimeTestActionService = Mockito.mock( RuntimeTestActionService.class ); RuntimeTester runtimeTester = Mockito.mock( RuntimeTester.class ); MetastoreLocator metaStore = Mockito.mock( MetastoreLocator.class ); setup( new HBaseInputMeta( namedClusterService, namedClusterServiceLocator, runtimeTestActionService, runtimeTester, metaStore ) ); } @Test public void test() throws Exception { check( "HBASE_SITE_XML_URL", new StringGetter() { public String get() { return meta.getCoreConfigURL(); } } ); check( "HBASE_DEFAULT_XML_URL", new StringGetter() { public String get() { return meta.getDefaultConfigURL(); } } ); check( "SOURCE_TABLE_NAME", new StringGetter() { public String get() { return meta.getSourceTableName(); } } ); check( "SOURCE_MAPPING_NAME", new StringGetter() { public String get() { return meta.getSourceMappingName(); } } ); check( "START_KEY_VALUE", new StringGetter() { public String get() { return meta.getKeyStartValue(); } } ); check( "STOP_KEY_VALUE", new StringGetter() { public String get() { return meta.getKeyStopValue(); } } ); check( "SCANNER_ROW_CACHE_SIZE", new StringGetter() { public String get() { return meta.getScannerCacheSize(); } } ); check( "MATCH_ANY_FILTER", new BooleanGetter() { public boolean get() { return meta.getMatchAnyFilter(); } } ); check( "OUTPUT_FIELD_KEY", new BooleanGetter() { public boolean get() { return meta.getOutputFieldsDefinition().get( 0 ).isKey(); } } ); check( "OUTPUT_FIELD_ALIAS", new StringGetter() { public String get() { return meta.getOutputFieldsDefinition().get( 0 ).getAlias(); } } ); check( "OUTPUT_FIELD_COLUMN_NAME", new StringGetter() { public String get() { return meta.getOutputFieldsDefinition().get( 0 ).getColumnName(); } } ); check( "OUTPUT_FIELD_FAMILY", new StringGetter() { public String get() { return meta.getOutputFieldsDefinition().get( 0 ).getFamily(); } } ); check( "OUTPUT_FIELD_TYPE", new StringGetter() { public String get() { return meta.getOutputFieldsDefinition().get( 0 ).getHbaseType(); } } ); check( "OUTPUT_FIELD_FORMAT", new StringGetter() { public String get() { return meta.getOutputFieldsDefinition().get( 0 ).getFormat(); } } ); check( "ALIAS", new StringGetter() { public String get() { return meta.getFiltersDefinition().get( 0 ).getAlias(); } } ); check( "FIELD_TYPE", new StringGetter() { public String get() { return meta.getFiltersDefinition().get( 0 ).getFieldType(); } } ); check( "SIGNED_COMPARISON", new BooleanGetter() { public boolean get() { return meta.getFiltersDefinition().get( 0 ).isSignedComparison(); } } ); check( "COMPARISON_VALUE", new StringGetter() { public String get() { return meta.getFiltersDefinition().get( 0 ).getConstant(); } } ); check( "FORMAT", new StringGetter() { public String get() { return meta.getFiltersDefinition().get( 0 ).getFormat(); } } ); check( "TABLE_NAME", new StringGetter() { public String get() { return meta.getMappingDefinition().getTableName(); } } ); check( "MAPPING_NAME", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingName(); } } ); check( "MAPPING_ALIAS", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getAlias(); } } ); check( "MAPPING_KEY", new BooleanGetter() { public boolean get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).isKey(); } } ); check( "MAPPING_COLUMN_FAMILY", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getColumnFamily(); } } ); check( "MAPPING_COLUMN_NAME", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getColumnName(); } } ); check( "MAPPING_TYPE", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getType(); } } ); check( "MAPPING_INDEXED_VALUES", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getIndexedValues(); } } ); skipPropertyTest( "COMPARISON_TYPE" ); } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/input/HBaseInputMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.input; import org.apache.commons.io.IOUtils; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.big.data.kettle.plugins.hbase.LogInjector; import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition; import org.pentaho.big.data.kettle.plugins.hbase.NamedClusterLoadSaveUtil; import org.pentaho.big.data.kettle.plugins.hbase.ServiceStatus; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LoggingBuffer; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.trans.steps.loadsave.MemoryRepository; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.mapping.MappingFactory; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterfaceFactory; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import javax.imageio.metadata.IIOMetadataNode; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith( MockitoJUnitRunner.class ) public class HBaseInputMetaTest { @InjectMocks HBaseInputMeta hBaseInputMeta; @Mock NamedCluster namedCluster; @Mock NamedClusterServiceLocator namedClusterServiceLocator; @Mock HBaseService hBaseService; @Mock MappingDefinition mappingDefinition; @Mock NamedClusterLoadSaveUtil namedClusterLoadSaveUtil; @Mock IMetaStore metaStore; @Mock NamedClusterService namedClusterService; /** * actual for bug BACKLOG-9529 */ @Test public void testLogSuccessfulForGetXml() throws Exception { HBaseInputMeta spy = Mockito.spy( hBaseInputMeta ); spy.setNamedCluster( namedCluster ); LoggingBuffer loggingBuffer = LogInjector.setMockForLoggingBuffer(); Mockito.doThrow( new KettleException( "Unexpected error occured" ) ).when( spy ).applyInjection( any() ); spy.getXML(); verify( loggingBuffer, atLeast( 1 ) ).addLogggingEvent( any() ); } /** * actual for bug BACKLOG-9629 */ @SuppressWarnings( "unchecked" ) @Test public void testApplyInjectionDefinitionsExists() throws Exception { HBaseInputMeta hBaseInputMetaSpy = Mockito.spy( hBaseInputMeta ); hBaseInputMetaSpy.setNamedCluster( namedCluster ); when( namedClusterServiceLocator.getService( namedCluster, HBaseService.class, null ) ).thenReturn( hBaseService ); hBaseInputMetaSpy.setMappingDefinition( mappingDefinition ); List list = mock( List.class ); hBaseInputMetaSpy.setOutputFieldsDefinition( list ); hBaseInputMetaSpy.setFiltersDefinition( list ); Mockito.doReturn( list ).when( hBaseInputMetaSpy ).createOutputFieldsDefinition( any(), any() ); Mockito.doReturn( list ).when( hBaseInputMetaSpy ).createColumnFiltersFromDefinition( any() ); Mockito.doReturn( null ).when( hBaseInputMetaSpy ).getMapping( any(), any() ); hBaseInputMetaSpy.getXML(); verify( hBaseInputMetaSpy, times( 1 ) ).setMapping( any() ); verify( hBaseInputMetaSpy, times( 1 ) ).setOutputFields( any() ); verify( hBaseInputMetaSpy, times( 1 ) ).setColumnFilters( any() ); } /** * actual for bug BACKLOG-9629 */ @Test public void testApplyInjectionDefinitionsNull() throws Exception { HBaseInputMeta hBaseInputMetaSpy = Mockito.spy( hBaseInputMeta ); hBaseInputMetaSpy.setNamedCluster( namedCluster ); hBaseInputMetaSpy.setMappingDefinition( null ); hBaseInputMetaSpy.setOutputFieldsDefinition( null ); hBaseInputMetaSpy.setFiltersDefinition( null ); hBaseInputMetaSpy.getXML(); verify( hBaseInputMetaSpy, times( 0 ) ).setMapping( any() ); verify( hBaseInputMetaSpy, times( 0 ) ).getMapping(); verify( hBaseInputMetaSpy, times( 0 ) ).setOutputFields( any() ); verify( hBaseInputMetaSpy, times( 0 ) ).setColumnFilters( any() ); } @Test public void testLoadXmlDoesntBubleUpException() throws Exception { KettleLogStore.init(); ClusterInitializationException exception = new ClusterInitializationException( new Exception() ); hBaseInputMeta.setNamedCluster( namedCluster ); when( namedClusterServiceLocator.getService( namedCluster, HBaseService.class, null ) ).thenThrow( exception ); when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); IIOMetadataNode node = new IIOMetadataNode(); IIOMetadataNode child = new IIOMetadataNode( "disable_wal" ); IIOMetadataNode grandChild = new IIOMetadataNode(); grandChild.setNodeValue( "N" ); child.appendChild( grandChild ); node.appendChild( child ); hBaseInputMeta.loadXML( node, new ArrayList<>(), metaStore ); ServiceStatus serviceStatus = hBaseInputMeta.getServiceStatus(); assertNotNull( serviceStatus ); assertFalse( serviceStatus.isOk() ); assertEquals( exception, serviceStatus.getException() ); } @Test public void testLoadXmlServiceStatusOk() throws Exception { KettleLogStore.init(); hBaseInputMeta.setNamedCluster( namedCluster ); when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); IIOMetadataNode node = new IIOMetadataNode(); IIOMetadataNode child = new IIOMetadataNode( "disable_wal" ); IIOMetadataNode grandChild = new IIOMetadataNode(); grandChild.setNodeValue( "N" ); child.appendChild( grandChild ); node.appendChild( child ); hBaseInputMeta.loadXML( node, new ArrayList<>(), metaStore ); ServiceStatus serviceStatus = hBaseInputMeta.getServiceStatus(); assertNotNull( serviceStatus ); assertTrue( serviceStatus.isOk() ); } @Test public void testReadRepDoesntBubleUpException() throws Exception { KettleLogStore.init(); ClusterInitializationException exception = new ClusterInitializationException( new Exception() ); hBaseInputMeta.setNamedCluster( namedCluster ); when( namedClusterServiceLocator.getService( namedCluster, HBaseService.class, null ) ).thenThrow( exception ); when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); hBaseInputMeta.readRep( new MemoryRepository(), metaStore, mock( ObjectId.class ), new ArrayList<>() ); ServiceStatus serviceStatus = hBaseInputMeta.getServiceStatus(); assertNotNull( serviceStatus ); assertFalse( serviceStatus.isOk() ); assertEquals( exception, serviceStatus.getException() ); } @Test public void testReadRepServiceStatusOk() throws Exception { KettleLogStore.init(); hBaseInputMeta.setNamedCluster( namedCluster ); when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); MappingFactory mappingFactory = mock( MappingFactory.class ); hBaseInputMeta.readRep( new MemoryRepository(), metaStore, mock( ObjectId.class ), new ArrayList<>() ); ServiceStatus serviceStatus = hBaseInputMeta.getServiceStatus(); assertNotNull( serviceStatus ); assertTrue( serviceStatus.isOk() ); } @Test public void testLoadingAELMappingFromStepNode() throws Exception { KettleLogStore.init(); hBaseInputMeta.setMapping( null ); hBaseInputMeta.setNamedCluster( namedCluster ); when( namedClusterService.getClusterTemplate() ).thenReturn( namedCluster ); hBaseInputMeta.loadXML( getMappingNode(), new ArrayList<>(), metaStore ); assertNotNull( hBaseInputMeta.m_mapping ); } private Node getMappingNode() throws IOException, ParserConfigurationException, SAXException { String xml = IOUtils.toString( getClass().getClassLoader().getResourceAsStream( "StubMapping.xml" ) ); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.parse( new InputSource( new StringReader( xml ) ) ); return doc.getDocumentElement(); } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/mapping/MappingAdminTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.mapping; import com.pentaho.big.data.bundles.impl.shim.hbase.ByteConversionUtilImpl; import com.pentaho.big.data.bundles.impl.shim.hbase.mapping.MappingFactoryImpl; import com.pentaho.big.data.bundles.impl.shim.hbase.meta.HBaseValueMetaInterfaceFactoryImpl; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.Result; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.mapping.MappingFactory; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.table.HBaseDelete; import org.pentaho.hadoop.shim.api.hbase.table.HBasePut; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTable; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTableWriteOperationManager; import org.pentaho.hadoop.shim.api.hbase.table.ResultScanner; import org.pentaho.hadoop.shim.api.hbase.table.ResultScannerBuilder; import org.pentaho.hadoop.shim.api.internal.hbase.HBaseBytesUtilShim; import java.io.IOException; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Set; import java.util.TreeMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by Aliaksandr_Zhuk on 2/14/2018. */ @RunWith( MockitoJUnitRunner.class ) public class MappingAdminTest { private TransMeta transMeta; private BaseStepMeta stepMeta; private StepMeta parentStepMeta; private MappingAdmin mappingAdmin; @Mock private HBaseConnection mockHbaseConnection; @Mock private HBaseTable mockPopulatedMappingTable; @Mock HBaseDelete mockHBaseDelete; @Mock HBasePut mockHBasePut; private HBaseBytesUtilShim hBaseBytesUtilShim = new MockHBaseByteConverterUsingJavaByteBuffer(); private ByteConversionUtil mockByteConversionUtil = new ByteConversionUtilImpl( hBaseBytesUtilShim ); private final static String MAPPING_TABLE_NAME = "pentaho_mappings"; @Before public void setUp() throws KettleException { KettleEnvironment.init(); transMeta = Mockito.spy( new TransMeta() ); stepMeta = Mockito.spy( new BaseStepMeta() ); parentStepMeta = Mockito.spy( new StepMeta() ); parentStepMeta.setParentTransMeta( transMeta ); stepMeta.setParentStepMeta( parentStepMeta ); when( mockHbaseConnection.getByteConversionUtil() ).thenReturn( mockByteConversionUtil ); mappingAdmin = new MappingAdmin( mockHbaseConnection ); } @Test public void testGetTableNameFromVariable_whenVariableValueExists() { String expectedTableName = "hbweblogs"; transMeta.setVariable( "hb_weblogs", "hbweblogs" ); String tableName = MappingAdmin.getTableNameFromVariable( stepMeta, "${hb_weblogs}" ); assertEquals( expectedTableName, tableName ); } @Test public void testGetTableNameFromVariable_whenNoVariable() { String expectedTableName = "hbweblogs"; String expectedResult = "${hb_weblogs}"; String tableName = MappingAdmin.getTableNameFromVariable( stepMeta, "${hb_weblogs}" ); assertNotEquals( expectedTableName, tableName ); assertEquals( expectedResult, tableName ); } @Test public void setAndGetMappingTableName() { mappingAdmin.setMappingTableName( "mappingtbl" ); assertEquals( "mappingtbl", mappingAdmin.getMappingTableName() ); } @Test public void createMappingTable() throws Exception { HBaseTable mockHbaseMappingTable = mock( HBaseTable.class ); when( mockHbaseConnection.getTable( "ns:" + MAPPING_TABLE_NAME ) ).thenReturn( mockHbaseMappingTable ); when( mockHbaseMappingTable.exists() ).thenReturn( false ); mappingAdmin.createMappingTable( "ns:tablename" ); verify( mockHbaseMappingTable, times( 1 ) ).create( any(), any() ); } @Test( expected = IOException.class ) public void createMappingTableWhenExists() throws Exception { HBaseTable mockHbaseMappingTable = mock( HBaseTable.class ); when( mockHbaseConnection.getTable( "ns:" + MAPPING_TABLE_NAME ) ).thenReturn( mockHbaseMappingTable ); when( mockHbaseMappingTable.exists() ).thenReturn( true ); mappingAdmin.createMappingTable( "ns:tablename" ); } @Test public void mappingExists() throws Exception { setupMappingStructure(); assertTrue( mappingAdmin.mappingExists( "populated:table1", "map1" ) ); } @Test public void testMappingExistsNegative() throws Exception { setupMappingStructure(); assertFalse( mappingAdmin.mappingExists( "populated:table1", "mapx" ) ); } @Test public void getMappedTables() throws Exception { setupMappingStructure(); Set mappedTables = mappingAdmin.getMappedTables( null ); assertEquals( 2, mappedTables.size() ); assertTrue( mappedTables.contains( "populated:table1" ) ); assertTrue( mappedTables.contains( "populated:table2" ) ); } private void setupMappingStructure() throws Exception { when( mockHbaseConnection.listNamespaces() ).thenReturn( Arrays.asList( "populated", "unpopulated" ) ); when( mockHbaseConnection.getTable( "populated:" + MAPPING_TABLE_NAME ) ).thenReturn( mockPopulatedMappingTable ); when( mockPopulatedMappingTable.exists() ).thenReturn( true ); when( mockPopulatedMappingTable.keyExists( "table1,map1".getBytes() ) ).thenReturn( true ); ResultScannerBuilder mockResultScannerBuilder = mock( ResultScannerBuilder.class ); when( mockPopulatedMappingTable.createScannerBuilder( any(), any() ) ).thenReturn( mockResultScannerBuilder ); ResultScanner mockResultScanner = mock( ResultScanner.class ); when( mockResultScannerBuilder.build() ).thenReturn( mockResultScanner ); Result result1 = mock( Result.class ); when( result1.getRow() ).thenReturn( "table1,map1".getBytes() ); Result result2 = mock( Result.class ); when( result2.getRow() ).thenReturn( "table1,map2".getBytes() ); Result result3 = mock( Result.class ); when( result3.getRow() ).thenReturn( "table2,map1".getBytes() ); when( mockResultScanner.next() ).thenReturn( result1, result2, result3, null ); HBaseTable mockTwoMappingTable = mock( HBaseTable.class ); when( mockHbaseConnection.getTable( "unpopulated:" + MAPPING_TABLE_NAME ) ).thenReturn( mockTwoMappingTable ); when( mockTwoMappingTable.exists() ).thenReturn( false ); // From here down added for getMapping test NavigableMap keyFamilyMap = new TreeMap<>( new ByteArrayComparator() ); keyFamilyMap.put( "key".getBytes(), "String".getBytes() ); when( result1.getFamilyMap( "key" ) ).thenReturn( keyFamilyMap ); NavigableMap columnFamilyMap = new TreeMap<>( new ByteArrayComparator() ); columnFamilyMap.put( "colFamily,colName1,aliascol1".getBytes(), "String".getBytes() ); columnFamilyMap.put( "colFamily,colName2,aliascol2".getBytes(), "Integer".getBytes() ); when( result1.getFamilyMap( "columns" ) ).thenReturn( columnFamilyMap ); HBaseValueMetaInterfaceFactoryImpl hBaseValueMetaInterfaceFactory = new HBaseValueMetaInterfaceFactoryImpl( hBaseBytesUtilShim ); when( mockHbaseConnection.getHBaseValueMetaInterfaceFactory() ).thenReturn( hBaseValueMetaInterfaceFactory ); MappingFactory mappingFactory = new MappingFactoryImpl( hBaseBytesUtilShim, hBaseValueMetaInterfaceFactory ); when( mockHbaseConnection.getMappingFactory() ).thenReturn( mappingFactory ); // From here down added for deleteMapping test HBaseTableWriteOperationManager mockHBaseTableWriteOperationManager = mock( HBaseTableWriteOperationManager.class ); when( mockPopulatedMappingTable.createWriteOperationManager( null ) ) .thenReturn( mockHBaseTableWriteOperationManager ); when( mockHBaseTableWriteOperationManager.createDelete( "table1,map1".getBytes() ) ).thenReturn( mockHBaseDelete ); // From here down added for putMapping test when( mockHBaseTableWriteOperationManager.createPut( "table1,map1".getBytes() ) ).thenReturn( mockHBasePut ); } @Test public void getMappingNames() throws Exception { setupMappingStructure(); List mappingNames = mappingAdmin.getMappingNames( "populated:table1" ); assertEquals( 2, mappingNames.size() ); assertTrue( mappingNames.contains( "map1" ) ); assertTrue( mappingNames.contains( "map2" ) ); } @Test public void getMapping() throws Exception { setupMappingStructure(); Mapping mapping = mappingAdmin.getMapping( "populated:table1", "map1" ); assertEquals( "map1", mapping.getMappingName() ); assertEquals( "populated:table1", mapping.getTableName() ); assertEquals( "key", mapping.getKeyName() ); assertEquals( Mapping.KeyType.STRING, mapping.getKeyType() ); Map mappedColumns = mapping.getMappedColumns(); assertTrue( mappedColumns.containsKey( "aliascol1" ) ); assertTrue( mappedColumns.containsKey( "aliascol2" ) ); assertEquals( "map1", mappedColumns.get( "aliascol1" ).getMappingName() ); assertEquals( "colFamily", mappedColumns.get( "aliascol1" ).getColumnFamily() ); assertEquals( "colName1", mappedColumns.get( "aliascol1" ).getColumnName() ); } @Test public void deleteMapping() throws Exception { setupMappingStructure(); Mapping mapping = mappingAdmin.getMapping( "populated:table1", "map1" ); assertNotNull( mapping ); mappingAdmin.deleteMapping( mapping ); verify( mockHBaseDelete ).execute(); } @Test public void putMapping() throws Exception { setupMappingStructure(); Mapping mapping = mappingAdmin.getMapping( "populated:table1", "map1" ); assertNotNull( mapping ); mappingAdmin.putMapping( mapping, true ); verify( mockHBasePut, times( 1 ) ).createColumnName( "colFamily", "colName1", "aliascol1" ); verify( mockHBasePut, times( 1 ) ).createColumnName( "colFamily", "colName2", "aliascol2" ); verify( mockHBasePut, times( 1 ) ).createColumnName( "key" ); verify( mockHBasePut, times( 1 ) ).execute(); } @Test public void describeMapping() throws Exception { setupMappingStructure(); Mapping mapping = mappingAdmin.getMapping( "populated:table1", "map1" ); assertNotNull( mapping ); String desc = mappingAdmin.describeMapping( mapping ); assertNotNull( desc ); assertTrue( !desc.isEmpty() ); } @Test public void close() throws Exception { mappingAdmin.close(); verify( mockHbaseConnection ).close(); } @Test public void getConnection() { assertEquals( mockHbaseConnection, mappingAdmin.getConnection() ); } static class ByteArrayComparator implements Comparator { @Override public int compare( byte[] a, byte[] b ) { if ( a == b ) { return 0; } if ( a == null || b == null ) { throw new NullPointerException(); } int length = a.length; int cmp; if ( ( cmp = Integer.compare( length, b.length ) ) != 0 ) { return cmp; } for ( int i = 0; i < length; i++ ) { if ( ( cmp = Byte.compare( a[ i ], b[ i ] ) ) != 0 ) { return cmp; } } return 0; } } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/mapping/MappingUtilsTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.mapping; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.pentaho.big.data.kettle.plugins.hbase.HBaseConnectionException; import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition; import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition.MappingColumn; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.HBaseConnection; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.mapping.MappingFactory; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterfaceFactory; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * @author Tatsiana_Kasiankova */ public class MappingUtilsTest { private static final String STRING_TYPE = "String"; private static final String TEST_TABLE_NAME = "TEST_TABLE_NAME"; private static final String TEST_MAPPING_NAME = "TEST_MAPPING_NAME"; private static final String ALIAS_STRING = "alias"; private static final String VALUE_STRING = "value"; private static final String KEY_STRING = "key"; private static final int FAMILIY_ARG_INDEX = 0; private static final int NAME_ARG_INDEX = 1; private static final int ALIAS_ARG_INDEX = 2; /** * */ private static final String UNABLE_TO_CONNECT_TO_H_BASE = "Unable to connect to HBase"; private ConfigurationProducer cProducerMock = mock( ConfigurationProducer.class ); private HBaseConnection hbConnectionMock = mock( HBaseConnection.class ); @Test public void testGetMappingAdmin_NoException() { try { when( cProducerMock.getHBaseConnection() ).thenReturn( hbConnectionMock ); MappingAdmin mappingAdmin = MappingUtils.getMappingAdmin( cProducerMock ); assertNotNull( mappingAdmin ); assertSame( hbConnectionMock, mappingAdmin.getConnection() ); verify( hbConnectionMock ).checkHBaseAvailable(); } catch ( Exception e ) { fail( "No exception expected but it occurs!" ); } } @Test public void testGetMappingAdmin_ClusterInitializationExceptionToHBaseConnectionException() throws Exception { ClusterInitializationException clusterInitializationException = new ClusterInitializationException( new Exception( "ClusterInitializationException" ) ); try { when( cProducerMock.getHBaseConnection() ).thenThrow( clusterInitializationException ); MappingUtils.getMappingAdmin( cProducerMock ); fail( "Expected HBaseConnectionException but it doen not occur!" ); } catch ( HBaseConnectionException e ) { assertEquals( UNABLE_TO_CONNECT_TO_H_BASE, e.getMessage() ); assertSame( clusterInitializationException, e.getCause() ); } } @Test public void testGetMappingAdmin_IOExceptionToHBaseConnectionException() throws Exception { IOException ioException = new IOException( "IOException" ); try { when( cProducerMock.getHBaseConnection() ).thenThrow( ioException ); MappingUtils.getMappingAdmin( cProducerMock ); fail( "Expected HBaseConnectionException but it doen not occur!" ); } catch ( HBaseConnectionException e ) { assertEquals( UNABLE_TO_CONNECT_TO_H_BASE, e.getMessage() ); assertSame( ioException, e.getCause() ); } } @Test public void testIsTupleMappingColumn() { for ( Mapping.TupleMapping tupleColumn : Mapping.TupleMapping.values() ) { boolean result = MappingUtils.isTupleMappingColumn( tupleColumn.toString() ); assertTrue( result ); } } @Test public void testIsTupleMappingColumn_NotTupleColumn() { boolean result = MappingUtils.isTupleMappingColumn( "NOT_A_TUPLE_COLUMN" ); assertFalse( result ); } @Test public void testIsTupleMapping() { MappingDefinition tupleMappingDefinition = new MappingDefinition(); tupleMappingDefinition.setMappingColumns( buildTupleMapping() ); boolean result = MappingUtils.isTupleMapping( tupleMappingDefinition ); assertTrue( result ); } @Test public void testIsTupleMapping_NoTupleMapping() { MappingDefinition tupleMappingDefinition = new MappingDefinition(); tupleMappingDefinition.setMappingColumns( buildNoTupleMapping() ); boolean result = MappingUtils.isTupleMapping( tupleMappingDefinition ); assertFalse( result ); } @Test public void testGetMappingAdmin() throws IOException { HBaseService hBaseService = mock( HBaseService.class ); HBaseConnection hBaseConnection = mock( HBaseConnection.class ); when( hBaseService.getHBaseConnection( any( VariableSpace.class ), anyString(), anyString(), any( LogChannelInterface.class ) ) ).thenReturn( hBaseConnection ); VariableSpace variableSpace = mock( VariableSpace.class ); MappingUtils.getMappingAdmin( hBaseService, variableSpace, "SITE_CONFIG", "DEFAULT_CONFIG" ); } @Test public void testBuildNonKeyValueMeta() throws KettleException { HBaseService hBaseService = mock( HBaseService.class ); ByteConversionUtil byteConversionUtil = mock( ByteConversionUtil.class ); when( hBaseService.getByteConversionUtil() ).thenReturn( byteConversionUtil ); HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = mock( HBaseValueMetaInterfaceFactory.class ); when( hBaseService.getHBaseValueMetaInterfaceFactory() ).thenReturn( valueMetaInterfaceFactory ); HBaseValueMetaInterface valueMeta = mock( HBaseValueMetaInterface.class ); when( valueMeta.isString() ).thenReturn( true ); when( valueMetaInterfaceFactory.createHBaseValueMetaInterface( same( "FAMILY" ), same( "COLUMN_NAME" ), same( "ALIAS" ), anyInt(), anyInt(), anyInt() ) ).thenReturn( valueMeta ); HBaseValueMetaInterface column = MappingUtils.buildNonKeyValueMeta( "ALIAS", "FAMILY", "COLUMN_NAME", STRING_TYPE, "INDEXED_VALS", hBaseService ); assertNotNull( column ); verify( valueMeta ).setHBaseTypeFromString( STRING_TYPE ); verify( valueMeta ).setStorageType( ValueMetaInterface.STORAGE_TYPE_INDEXED ); } @Test( expected = KettleException.class ) public void testGetMapping_UndefinedMappingName() throws Exception { HBaseService hBaseService = mock( HBaseService.class ); MappingDefinition mappingDefinition = buildMappingDefinitionForGetMapping(); mappingDefinition.setMappingName( "" ); MappingUtils.getMapping( mappingDefinition, hBaseService ); } @Test( expected = KettleException.class ) public void testGetMapping_UndefinedColumns() throws Exception { HBaseService hBaseService = mock( HBaseService.class ); MappingDefinition mappingDefinition = buildMappingDefinitionForGetMapping(); mappingDefinition.setMappingColumns( Collections.emptyList() ); MappingUtils.getMapping( mappingDefinition, hBaseService ); } @Test( expected = KettleException.class ) public void testGetMapping_NoKeyDefined() throws Exception { HBaseService hBaseService = mockHBaseService(); MappingUtils.getMapping( buildMappingDefinitionWithoutKey(), hBaseService ); } @Test( expected = KettleException.class ) public void testGetMapping_TwoKeysDefined() throws Exception { HBaseService hBaseService = mockHBaseService(); MappingUtils.getMapping( buildMappingDefinitionWithTwoKeys(), hBaseService ); } @Test( expected = KettleException.class ) public void testGetMapping_keyColumnWithoutAlias() throws Exception { HBaseService hBaseService = mockHBaseService(); MappingDefinition mappingDefinition = createMappingDefinition(); MappingColumn keyColumn = buildKeyColumn( null, STRING_TYPE ); mappingDefinition.setMappingColumns( Collections.singletonList( keyColumn ) ); MappingUtils.getMapping( mappingDefinition, hBaseService ); } @Test( expected = KettleException.class ) public void testGetMapping_keyColumnWithoutType() throws Exception { HBaseService hBaseService = mockHBaseService(); MappingDefinition mappingDefinition = createMappingDefinition(); MappingColumn keyColumn = buildKeyColumn( KEY_STRING, null ); mappingDefinition.setMappingColumns( Collections.singletonList( keyColumn ) ); MappingUtils.getMapping( mappingDefinition, hBaseService ); } @Test( expected = KettleException.class ) public void testGetMapping_columnWithoutFamilyName() throws Exception { HBaseService hBaseService = mockHBaseService(); MappingDefinition mappingDefinition = createMappingDefinition(); List columns = new ArrayList(); MappingColumn keyColumn = buildKeyColumn( KEY_STRING, STRING_TYPE ); columns.add( keyColumn ); MappingColumn otherColumn = buildNoKeyColumn( ALIAS_STRING, null, "columnName", STRING_TYPE ); columns.add( otherColumn ); mappingDefinition.setMappingColumns( columns ); MappingUtils.getMapping( mappingDefinition, hBaseService ); } @Test( expected = KettleException.class ) public void testGetMapping_columnWithoutColumnName() throws Exception { HBaseService hBaseService = mockHBaseService(); MappingDefinition mappingDefinition = createMappingDefinition(); List columns = new ArrayList(); MappingColumn keyColumn = buildKeyColumn( KEY_STRING, STRING_TYPE ); columns.add( keyColumn ); MappingColumn otherColumn = buildNoKeyColumn( ALIAS_STRING, "family", null, STRING_TYPE ); columns.add( otherColumn ); mappingDefinition.setMappingColumns( columns ); MappingUtils.getMapping( mappingDefinition, hBaseService ); } @Test public void testGetMapping() throws Exception { HBaseService hBaseService = mock( HBaseService.class ); HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = mock( HBaseValueMetaInterfaceFactory.class ); when( hBaseService.getHBaseValueMetaInterfaceFactory() ).thenReturn( valueMetaInterfaceFactory ); HBaseValueMetaInterface keyValueMeta = mock( HBaseValueMetaInterface.class ); when( keyValueMeta.isString() ).thenReturn( true ); when( valueMetaInterfaceFactory.createHBaseValueMetaInterface( nullable( String.class ), nullable( String.class ), same( KEY_STRING ), anyInt(), anyInt(), anyInt() ) ).thenReturn( keyValueMeta ); HBaseValueMetaInterface valueValueMeta = mock( HBaseValueMetaInterface.class ); when( keyValueMeta.isString() ).thenReturn( true ); when( valueMetaInterfaceFactory.createHBaseValueMetaInterface( nullable( String.class ), nullable( String.class ), same( VALUE_STRING ), anyInt(), anyInt(), anyInt() ) ).thenReturn( valueValueMeta ); MappingFactory mappingFactory = mock( MappingFactory.class ); when( hBaseService.getMappingFactory() ).thenReturn( mappingFactory ); Mapping mapping = mock( Mapping.class ); when( mappingFactory.createMapping( TEST_TABLE_NAME, TEST_MAPPING_NAME ) ).thenReturn( mapping ); Mapping result = MappingUtils.getMapping( buildMappingDefinitionForGetMapping(), hBaseService ); assertNotNull( result ); verify( mapping ).setKeyName( KEY_STRING ); verify( mapping ).addMappedColumn( valueValueMeta, false ); } private static HBaseService mockHBaseService() { HBaseService hBaseService = mock( HBaseService.class ); HBaseValueMetaInterfaceFactory valueMetaInterfaceFactory = mock( HBaseValueMetaInterfaceFactory.class ); when( hBaseService.getHBaseValueMetaInterfaceFactory() ).thenReturn( valueMetaInterfaceFactory ); when( valueMetaInterfaceFactory.createHBaseValueMetaInterface( nullable( String.class ), nullable( String.class ), anyString(), anyInt(), anyInt(), anyInt() ) ).thenAnswer( new Answer() { @Override public HBaseValueMetaInterface answer( InvocationOnMock invocation ) throws Throwable { Object[] args = invocation.getArguments(); String columnFamily = (String) args[ FAMILIY_ARG_INDEX ]; String columnName = (String) args[ NAME_ARG_INDEX ]; String alias = (String) args[ ALIAS_ARG_INDEX ]; HBaseValueMetaInterface valueMeta = mock( HBaseValueMetaInterface.class ); when( valueMeta.getAlias() ).thenReturn( alias ); when( valueMeta.getColumnFamily() ).thenReturn( columnFamily ); when( valueMeta.getColumnName() ).thenReturn( columnName ); return valueMeta; } } ); MappingFactory mappingFactory = mock( MappingFactory.class ); when( hBaseService.getMappingFactory() ).thenReturn( mappingFactory ); Mapping mapping = mock( Mapping.class ); when( mappingFactory.createMapping( TEST_TABLE_NAME, TEST_MAPPING_NAME ) ).thenReturn( mapping ); return hBaseService; } private static MappingDefinition buildMappingDefinitionWithoutKey() { MappingDefinition mappingDefinition = createMappingDefinition(); MappingColumn valueColumn = new MappingColumn(); valueColumn.setAlias( VALUE_STRING ); valueColumn.setType( STRING_TYPE ); valueColumn.setColumnFamily( "family" ); valueColumn.setColumnName( "name" ); mappingDefinition.setMappingColumns( Collections.singletonList( valueColumn ) ); return mappingDefinition; } private static MappingDefinition buildMappingDefinitionWithTwoKeys() { MappingDefinition mappingDefinition = createMappingDefinition(); List mappingColumns = new ArrayList(); MappingColumn keyColumn = new MappingColumn(); keyColumn.setAlias( KEY_STRING ); keyColumn.setKey( true ); keyColumn.setType( STRING_TYPE ); mappingColumns.add( keyColumn ); MappingColumn keyColumn2 = new MappingColumn(); keyColumn2.setAlias( "key2" ); keyColumn2.setKey( true ); keyColumn2.setType( STRING_TYPE ); mappingColumns.add( keyColumn2 ); mappingDefinition.setMappingColumns( mappingColumns ); return mappingDefinition; } private static MappingDefinition buildMappingDefinitionForGetMapping() { MappingDefinition mappingDefinition = createMappingDefinition(); List mappingColumns = new ArrayList(); MappingColumn keyColumn = buildKeyColumn( KEY_STRING, STRING_TYPE ); mappingColumns.add( keyColumn ); MappingColumn valueColumn = buildNoKeyColumn( VALUE_STRING, "family", "name", STRING_TYPE ); mappingColumns.add( valueColumn ); mappingDefinition.setMappingColumns( mappingColumns ); return mappingDefinition; } private static MappingColumn buildKeyColumn( String alias, String type ) { MappingColumn keyColumn = new MappingColumn(); keyColumn.setAlias( alias ); keyColumn.setKey( true ); keyColumn.setType( type ); return keyColumn; } public static MappingColumn buildNoKeyColumn( String alias, String family, String name, String type ) { MappingColumn valueColumn = new MappingColumn(); valueColumn.setAlias( alias ); valueColumn.setType( STRING_TYPE ); valueColumn.setColumnFamily( family ); valueColumn.setColumnName( name ); return valueColumn; } private static MappingDefinition createMappingDefinition() { MappingDefinition mappingDefinition = new MappingDefinition(); mappingDefinition.setTableName( TEST_TABLE_NAME ); mappingDefinition.setMappingName( TEST_MAPPING_NAME ); return mappingDefinition; } private static List buildTupleMapping() { List mappingColumns = new ArrayList(); MappingColumn keyColumn = new MappingColumn(); keyColumn.setAlias( "KEY" ); mappingColumns.add( keyColumn ); MappingColumn familyColumn = new MappingColumn(); familyColumn.setAlias( "Family" ); mappingColumns.add( familyColumn ); MappingColumn columnColumn = new MappingColumn(); columnColumn.setAlias( "Column" ); mappingColumns.add( columnColumn ); MappingColumn valueColumn = new MappingColumn(); valueColumn.setAlias( "Value" ); mappingColumns.add( valueColumn ); MappingColumn timestampColumn = new MappingColumn(); timestampColumn.setAlias( "Timestamp" ); mappingColumns.add( timestampColumn ); return mappingColumns; } private static List buildNoTupleMapping() { List mappingColumns = new ArrayList(); MappingColumn keyColumn = new MappingColumn(); keyColumn.setAlias( KEY_STRING ); mappingColumns.add( keyColumn ); MappingColumn valueColumn = new MappingColumn(); valueColumn.setAlias( VALUE_STRING ); mappingColumns.add( valueColumn ); return mappingColumns; } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/mapping/MockHBaseByteConverterUsingJavaByteBuffer.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.mapping; import org.pentaho.hadoop.shim.api.internal.hbase.HBaseBytesUtilShim; import java.nio.ByteBuffer; /** * @author Vasilina Terehova */ public class MockHBaseByteConverterUsingJavaByteBuffer implements HBaseBytesUtilShim { @Override public int getSizeOfFloat() { return Float.SIZE / Byte.SIZE; } @Override public int getSizeOfDouble() { return Double.SIZE / Byte.SIZE; } @Override public int getSizeOfInt() { return Integer.SIZE / Byte.SIZE; } @Override public int getSizeOfLong() { return Long.SIZE / Byte.SIZE; } @Override public int getSizeOfShort() { return Short.SIZE / Byte.SIZE; } @Override public int getSizeOfByte() { return 1; } @Override public byte[] toBytes( String aString ) { return aString.getBytes(); } @Override public byte[] toBytes( int anInt ) { return ByteBuffer.allocate( getSizeOfInt() ).putInt( anInt ).array(); } @Override public byte[] toBytes( long aLong ) { return ByteBuffer.allocate( getSizeOfLong() ).putLong( aLong ).array(); } @Override public byte[] toBytes( float aFloat ) { return ByteBuffer.allocate( getSizeOfFloat() ).putFloat( aFloat ).array(); } @Override public byte[] toBytes( double aDouble ) { return ByteBuffer.allocate( getSizeOfDouble() ).putDouble( aDouble ).array(); } @Override public byte[] toBytesBinary( String value ) { return value.getBytes(); } @Override public String toString( byte[] value ) { return new String( value ); } @Override public long toLong( byte[] value ) { return ByteBuffer.wrap( value ).getLong(); } @Override public int toInt( byte[] value ) { return ByteBuffer.wrap( value ).getInt(); } @Override public float toFloat( byte[] value ) { return ByteBuffer.wrap( value ).getFloat(); } @Override public double toDouble( byte[] value ) { return ByteBuffer.wrap( value ).getDouble(); } @Override public short toShort( byte[] value ) { return ByteBuffer.wrap( value ).getShort(); } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/output/HBaseOutputMetaInjectionTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.output; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.pentaho.big.data.kettle.plugins.hbase.NamedClusterLoadSaveUtil; import org.pentaho.di.core.injection.BaseMetadataInjectionTest; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; public class HBaseOutputMetaInjectionTest extends BaseMetadataInjectionTest { @Before public void setup() { NamedClusterService namedClusterService = Mockito.mock( NamedClusterService.class ); NamedClusterServiceLocator namedClusterServiceLocator = Mockito.mock( NamedClusterServiceLocator.class ); RuntimeTestActionService runtimeTestActionService = Mockito.mock( RuntimeTestActionService.class ); RuntimeTester runtimeTester = Mockito.mock( RuntimeTester.class ); MetastoreLocator metaStore = Mockito.mock( MetastoreLocator.class ); setup( new HBaseOutputMeta( namedClusterService, namedClusterServiceLocator, runtimeTestActionService, runtimeTester, new NamedClusterLoadSaveUtil(), metaStore ) ); } @Test public void test() throws Exception { check( "HBASE_SITE_XML_URL", new StringGetter() { public String get() { return meta.getCoreConfigURL(); } } ); check( "HBASE_DEFAULT_XML_URL", new StringGetter() { public String get() { return meta.getDefaultConfigURL(); } } ); check( "TARGET_TABLE_NAME", new StringGetter() { public String get() { return meta.getTargetTableName(); } } ); check( "TARGET_MAPPING_NAME", new StringGetter() { public String get() { return meta.getTargetMappingName(); } } ); check( "DELETE_ROW_KEY", new BooleanGetter() { @Override public boolean get() { return meta.getDeleteRowKey(); } } ); check( "DISABLE_WRITE_TO_WAL", new BooleanGetter() { public boolean get() { return meta.getDisableWriteToWAL(); } } ); check( "WRITE_BUFFER_SIZE", new StringGetter() { public String get() { return meta.getWriteBufferSize(); } } ); check( "TABLE_NAME", new StringGetter() { public String get() { return meta.getMappingDefinition().getTableName(); } } ); check( "MAPPING_NAME", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingName(); } } ); check( "MAPPING_ALIAS", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getAlias(); } } ); check( "MAPPING_KEY", new BooleanGetter() { public boolean get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).isKey(); } } ); check( "MAPPING_COLUMN_FAMILY", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getColumnFamily(); } } ); check( "MAPPING_COLUMN_NAME", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getColumnName(); } } ); check( "MAPPING_TYPE", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getType(); } } ); check( "MAPPING_INDEXED_VALUES", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getIndexedValues(); } } ); } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/output/HBaseOutputMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.output; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.big.data.kettle.plugins.hbase.LogInjector; import org.pentaho.big.data.kettle.plugins.hbase.MappingDefinition; import org.pentaho.big.data.kettle.plugins.hbase.NamedClusterLoadSaveUtil; import org.pentaho.big.data.kettle.plugins.hbase.ServiceStatus; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LoggingBuffer; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.loadsave.MemoryRepository; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.hbase.HBaseService; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.mapping.MappingFactory; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import javax.imageio.metadata.IIOMetadataNode; import java.util.ArrayList; import java.util.List; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith( MockitoJUnitRunner.class ) public class HBaseOutputMetaTest { @Mock NamedClusterService namedClusterService; @Mock NamedClusterServiceLocator namedClusterServiceLocator; @Mock RuntimeTestActionService runtimeTestActionService; @Mock RuntimeTester runtimeTester; @Mock NamedClusterLoadSaveUtil namedClusterLoadSaveUtil; @Mock NamedCluster namedCluster; @Mock MetastoreLocator metastoreLocatorOsgi; @Mock Repository rep; @Mock IMetaStore metaStore; @Mock ObjectId id_step; @Mock HBaseService hBaseService; @Mock MappingDefinition mappingDefinition; List databases = new ArrayList<>(); @InjectMocks HBaseOutputMeta hBaseOutputMeta; @Test public void testReadRepSetsNamedCluster() throws Exception { when( namedClusterLoadSaveUtil.loadClusterConfig( any(), any(), any(), any(), any(), any() ) ) .thenReturn( namedCluster ); when( namedClusterServiceLocator.getService( namedCluster, HBaseService.class, null ) ).thenReturn( hBaseService ); when( hBaseService.getMappingFactory() ) .thenReturn( mock( MappingFactory.class ) ); Mapping mapping = mock( Mapping.class ); when( mapping.readRep( rep, id_step ) ).thenReturn( true ); when( hBaseService.getMappingFactory().createMapping() ).thenReturn( mapping ); hBaseOutputMeta.readRep( rep, metaStore, id_step, databases ); assertThat( hBaseOutputMeta.getNamedCluster(), is( namedCluster ) ); assertThat( hBaseOutputMeta.getMapping(), is( mapping ) ); } /** * actual for bug BACKLOG-9529 */ @Test public void testLogSuccessfulForGetXml() throws Exception { HBaseOutputMeta hBaseOutputMetaSpy = Mockito.spy( this.hBaseOutputMeta ); Mockito.doThrow( new KettleException( "Unexpected error occured" ) ).when( hBaseOutputMetaSpy ) .applyInjection( any() ); LoggingBuffer loggingBuffer = LogInjector.setMockForLoggingBuffer(); hBaseOutputMetaSpy.getXML(); verify( loggingBuffer, atLeast( 1 ) ).addLogggingEvent( any() ); } /** * actual for bug BACKLOG-9629 */ @Test public void testApplyInjectionDefinitionExists() throws Exception { HBaseOutputMeta hBaseOutputMetaSpy = Mockito.spy( this.hBaseOutputMeta ); when( namedClusterServiceLocator.getService( namedCluster, HBaseService.class, null ) ).thenReturn( hBaseService ); hBaseOutputMetaSpy.setMappingDefinition( mappingDefinition ); hBaseOutputMetaSpy.setNamedCluster( namedCluster ); Mockito.doReturn( null ).when( hBaseOutputMetaSpy ).getMapping( any(), any() ); hBaseOutputMetaSpy.getXML(); verify( hBaseOutputMetaSpy, times( 1 ) ).setMapping( any() ); } /** * actual for bug BACKLOG-9629 */ @Test public void testApplyInjectionDefinitionNull() throws Exception { HBaseOutputMeta hBaseOutputMetaSpy = Mockito.spy( this.hBaseOutputMeta ); hBaseOutputMetaSpy.setMappingDefinition( null ); hBaseOutputMetaSpy.setNamedCluster( namedCluster ); hBaseOutputMetaSpy.getXML(); verify( hBaseOutputMetaSpy, times( 0 ) ).getMapping( any(), any() ); verify( hBaseOutputMetaSpy, times( 0 ) ).setMapping( any() ); } @Test public void testLoadXmlDoesntBubleUpException() throws Exception { KettleLogStore.init(); ClusterInitializationException exception = new ClusterInitializationException( new Exception() ); hBaseOutputMeta.setNamedCluster( namedCluster ); when( namedClusterServiceLocator.getService( namedCluster, HBaseService.class, null ) ).thenThrow( exception ); when( namedClusterLoadSaveUtil.loadClusterConfig( any(), any(), any(), any(), any(), any() ) ) .thenReturn( namedCluster ); IIOMetadataNode node = new IIOMetadataNode(); IIOMetadataNode child = new IIOMetadataNode( "disable_wal" ); IIOMetadataNode grandChild = new IIOMetadataNode(); grandChild.setNodeValue( "N" ); child.appendChild( grandChild ); node.appendChild( child ); hBaseOutputMeta.loadXML( node, new ArrayList<>(), metaStore ); ServiceStatus serviceStatus = hBaseOutputMeta.getServiceStatus(); assertNotNull( serviceStatus ); assertFalse( serviceStatus.isOk() ); assertEquals( exception, serviceStatus.getException() ); } @Test public void testLoadXmlServiceStatusOk() throws Exception { KettleLogStore.init(); hBaseOutputMeta.setNamedCluster( namedCluster ); when( namedClusterServiceLocator.getService( namedCluster, HBaseService.class, null ) ).thenReturn( hBaseService ); when( namedClusterLoadSaveUtil.loadClusterConfig( any(), any(), any(), any(), any(), any() ) ) .thenReturn( namedCluster ); IIOMetadataNode node = new IIOMetadataNode(); IIOMetadataNode child = new IIOMetadataNode( "disable_wal" ); IIOMetadataNode grandChild = new IIOMetadataNode(); grandChild.setNodeValue( "N" ); child.appendChild( grandChild ); node.appendChild( child ); hBaseOutputMeta.loadXML( node, new ArrayList<>(), metaStore ); ServiceStatus serviceStatus = hBaseOutputMeta.getServiceStatus(); assertNotNull( serviceStatus ); assertTrue( serviceStatus.isOk() ); } @Test public void testReadRepDoesntBubleUpException() throws Exception { KettleLogStore.init(); ClusterInitializationException exception = new ClusterInitializationException( new Exception() ); hBaseOutputMeta.setNamedCluster( namedCluster ); when( namedClusterServiceLocator.getService( namedCluster, HBaseService.class, null ) ).thenThrow( exception ); when( namedClusterLoadSaveUtil.loadClusterConfig( any(), any(), any(), any(), any(), any() ) ) .thenReturn( namedCluster ); hBaseOutputMeta.readRep( new MemoryRepository(), metaStore, mock( ObjectId.class ), new ArrayList<>() ); ServiceStatus serviceStatus = hBaseOutputMeta.getServiceStatus(); assertNotNull( serviceStatus ); assertFalse( serviceStatus.isOk() ); assertEquals( exception, serviceStatus.getException() ); } @Test public void testReadRepServiceStatusOk() throws Exception { KettleLogStore.init(); hBaseOutputMeta.setNamedCluster( namedCluster ); when( namedClusterServiceLocator.getService( namedCluster, HBaseService.class, null ) ).thenReturn( hBaseService ); when( namedClusterLoadSaveUtil.loadClusterConfig( any(), any(), any(), any(), any(), any() ) ) .thenReturn( namedCluster ); hBaseOutputMeta.readRep( new MemoryRepository(), metaStore, mock( ObjectId.class ), new ArrayList<>() ); ServiceStatus serviceStatus = hBaseOutputMeta.getServiceStatus(); assertNotNull( serviceStatus ); assertTrue( serviceStatus.isOk() ); } @Test public void testInjectWithEmbeddedMetastoreProviderKey() throws Exception { KettleLogStore.init(); hBaseOutputMeta.setNamedCluster( namedCluster ); when( namedCluster.getName() ).thenReturn( "ClusterName" ); NamedCluster embeddedNamedCluster = mock( NamedCluster.class ); when( embeddedNamedCluster.getShimIdentifier() ).thenReturn( "shim" ); StepMeta mockStepMeta = mock( StepMeta.class ); TransMeta mockTransMeta = mock( TransMeta.class ); when( mockTransMeta.getEmbeddedMetastoreProviderKey() ).thenReturn( "key" ); hBaseOutputMeta.setParentStepMeta( mockStepMeta ); when( mockStepMeta.getParentTransMeta() ).thenReturn( mockTransMeta ); when( metastoreLocatorOsgi.getExplicitMetastore( "key" ) ).thenReturn( metaStore ); when( namedClusterService.getNamedClusterByName( "ClusterName", metaStore ) ).thenReturn( embeddedNamedCluster ); hBaseOutputMeta.applyInjection( new Variables() ); assertEquals( embeddedNamedCluster, hBaseOutputMeta.getNamedCluster() ); } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/output/KettleRowToHBaseTupleTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.output; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.big.data.kettle.plugins.hbase.mapping.MappingUtils; import org.pentaho.big.data.kettle.plugins.hbase.output.KettleRowToHBaseTuple.FieldException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping.KeyType; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping.TupleMapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.hadoop.shim.api.hbase.table.HBasePut; import org.pentaho.hadoop.shim.api.hbase.table.HBaseTableWriteOperationManager; import java.util.HashMap; import java.util.Map; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith( MockitoJUnitRunner.class ) public class KettleRowToHBaseTupleTest { private Mapping tupleMapping; @Before public void setup() { tupleMapping = Mockito.mock( Mapping.class ); when( tupleMapping.getKeyName() ).thenReturn( Mapping.TupleMapping.KEY.toString() ); when( tupleMapping.getKeyType() ).thenReturn( KeyType.STRING ); } @Test public void testRowConversion() throws Exception { RowMetaInterface inputRowMeta = Mockito.mock( RowMetaInterface.class ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.KEY.toString() ) ).thenReturn( 0 ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.FAMILY.toString() ) ).thenReturn( 1 ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.COLUMN.toString() ) ).thenReturn( 2 ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.VALUE.toString() ) ).thenReturn( 3 ); when( inputRowMeta.indexOfValue( MappingUtils.TUPLE_MAPPING_VISIBILITY ) ).thenReturn( 4 ); ValueMetaString keyMeta = new ValueMetaString( Mapping.TupleMapping.KEY.toString() ); ValueMetaString familyMeta = new ValueMetaString( Mapping.TupleMapping.FAMILY.toString() ); ValueMetaString columnMeta = new ValueMetaString( Mapping.TupleMapping.COLUMN.toString() ); ValueMetaString valueMeta = new ValueMetaString( Mapping.TupleMapping.VALUE.toString() ); ValueMetaString visMeta = new ValueMetaString( MappingUtils.TUPLE_MAPPING_VISIBILITY ); when( inputRowMeta.getValueMeta( 0 ) ).thenReturn( keyMeta ); when( inputRowMeta.getValueMeta( 1 ) ).thenReturn( familyMeta ); when( inputRowMeta.getValueMeta( 2 ) ).thenReturn( columnMeta ); when( inputRowMeta.getValueMeta( 3 ) ).thenReturn( valueMeta ); when( inputRowMeta.getValueMeta( 4 ) ).thenReturn( visMeta ); Map columnMap = new HashMap<>(); HBaseValueMetaInterface hvmi = Mockito.mock( HBaseValueMetaInterface.class ); columnMap.put( valueMeta.getName(), hvmi ); HBaseValueMetaInterface hvmiv = Mockito.mock( HBaseValueMetaInterface.class ); columnMap.put( visMeta.getName(), hvmiv ); KettleRowToHBaseTuple rowConverter = new KettleRowToHBaseTuple( inputRowMeta, tupleMapping, columnMap ); ByteConversionUtil byteConversionUtil = Mockito.mock( ByteConversionUtil.class ); String[] row = { "key", "family", "@@@binary@@@column", "value", "public" }; HBaseTableWriteOperationManager writeManager = Mockito.mock( HBaseTableWriteOperationManager.class ); HBasePut put = Mockito.mock( HBasePut.class ); when( writeManager.createPut( row[ 0 ].getBytes() ) ).thenReturn( put ); when( byteConversionUtil.encodeKeyValue( row[ 0 ], keyMeta, KeyType.STRING ) ).thenReturn( row[ 0 ].getBytes() ); rowConverter.createTuplePut( writeManager, byteConversionUtil, row, true ); verify( put, times( 1 ) ).addColumn( eq( row[ 1 ] ), eq( "column" ), eq( true ), any() ); verify( put, times( 1 ) ).addColumn( eq( row[ 1 ] ), eq( MappingUtils.TUPLE_MAPPING_VISIBILITY ), eq( false ), any() ); verify( put, times( 1 ) ).setWriteToWAL( true ); try { rowConverter.createTuplePut( null, null, new String[] { null, null, null, null, null }, true ); } catch ( FieldException fe ) { Assert.assertEquals( fe.getFieldString(), TupleMapping.KEY.toString() ); } try { rowConverter.createTuplePut( null, null, new String[] { "key", null, null, null, null }, true ); } catch ( FieldException fe ) { Assert.assertEquals( fe.getFieldString(), TupleMapping.FAMILY.toString() ); } try { rowConverter.createTuplePut( null, null, new String[] { "key", "family", null, null, null }, true ); } catch ( FieldException fe ) { Assert.assertEquals( fe.getFieldString(), TupleMapping.COLUMN.toString() ); } try { rowConverter.createTuplePut( null, null, new String[] { "key", "family", "column", null, null }, true ); } catch ( FieldException fe ) { Assert.assertEquals( fe.getFieldString(), TupleMapping.VALUE.toString() ); } } @Test public void testMissingValues() { try { RowMetaInterface inputRowMeta = Mockito.mock( RowMetaInterface.class ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.KEY.toString() ) ).thenReturn( -1 ); new KettleRowToHBaseTuple( inputRowMeta, tupleMapping, null ); Assert.fail(); } catch ( KettleException e ) { } try { RowMetaInterface inputRowMeta = Mockito.mock( RowMetaInterface.class ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.KEY.toString() ) ).thenReturn( 0 ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.FAMILY.toString() ) ).thenReturn( -1 ); new KettleRowToHBaseTuple( inputRowMeta, tupleMapping, null ); Assert.fail(); } catch ( KettleException e ) { } try { RowMetaInterface inputRowMeta = Mockito.mock( RowMetaInterface.class ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.KEY.toString() ) ).thenReturn( 0 ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.FAMILY.toString() ) ).thenReturn( 1 ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.COLUMN.toString() ) ).thenReturn( -1 ); new KettleRowToHBaseTuple( inputRowMeta, tupleMapping, null ); Assert.fail(); } catch ( KettleException e ) { } try { RowMetaInterface inputRowMeta = Mockito.mock( RowMetaInterface.class ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.KEY.toString() ) ).thenReturn( 0 ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.FAMILY.toString() ) ).thenReturn( 1 ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.COLUMN.toString() ) ).thenReturn( 2 ); when( inputRowMeta.indexOfValue( Mapping.TupleMapping.VALUE.toString() ) ).thenReturn( -1 ); new KettleRowToHBaseTuple( inputRowMeta, tupleMapping, null ); Assert.fail(); } catch ( KettleException e ) { } } @Test public void testException() { FieldException fieldException = new FieldException( TupleMapping.KEY ); Assert.assertEquals( fieldException.getFieldString(), TupleMapping.KEY.toString() ); } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/rowdecoder/HBaseRowDecoderMetaInjectionTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.rowdecoder; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.pentaho.di.core.osgi.api.MetastoreLocatorOsgi; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.di.core.injection.BaseMetadataInjectionTest; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; public class HBaseRowDecoderMetaInjectionTest extends BaseMetadataInjectionTest { @Before public void setup() { NamedClusterService namedClusterService = Mockito.mock( NamedClusterService.class ); NamedClusterServiceLocator namedClusterServiceLocator = Mockito.mock( NamedClusterServiceLocator.class ); RuntimeTestActionService runtimeTestActionService = Mockito.mock( RuntimeTestActionService.class ); RuntimeTester runtimeTester = Mockito.mock( RuntimeTester.class ); MetastoreLocator metaStore = Mockito.mock( MetastoreLocator.class ); setup( new HBaseRowDecoderMeta( namedClusterServiceLocator, namedClusterService, runtimeTestActionService, runtimeTester, metaStore ) ); } @Test public void test() throws Exception { check( "KEY_FIELD", new StringGetter() { public String get() { return meta.getIncomingKeyField(); } } ); check( "HBASE_RESULT_FIELD", new StringGetter() { public String get() { return meta.getIncomingResultField(); } } ); check( "TABLE_NAME", new StringGetter() { public String get() { return meta.getMappingDefinition().getTableName(); } } ); check( "MAPPING_NAME", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingName(); } } ); check( "MAPPING_ALIAS", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getAlias(); } } ); check( "MAPPING_KEY", new BooleanGetter() { public boolean get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).isKey(); } } ); check( "MAPPING_COLUMN_FAMILY", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getColumnFamily(); } } ); check( "MAPPING_COLUMN_NAME", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getColumnName(); } } ); check( "MAPPING_TYPE", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getType(); } } ); check( "MAPPING_INDEXED_VALUES", new StringGetter() { public String get() { return meta.getMappingDefinition().getMappingColumns().get( 0 ).getIndexedValues(); } } ); } } ================================================ FILE: kettle-plugins/hbase/core/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/rowdecoder/HBaseRowDecoderMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.rowdecoder; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.pentaho.di.core.bowl.DefaultBowl; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import java.util.HashMap; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * @author Tatsiana_Kasiankova * */ public class HBaseRowDecoderMetaTest { private static final String MAPPING_NAME = "MappingName"; private static final String TABLE_NAME = "TableName"; private static final String COLUMN_NAME = "ColumnName"; private static final String FAMILY_NAME = "fm"; private static final String ALIAS = "alias"; private static final String MAPPING_KEY_NAME = "mappingKeyName"; private static final String ORIGIN = "HBase Row Decoder"; private HBaseRowDecoderMeta hbRowDecoderMeta; private RowMeta rowMeta; @Before public void setup() { hbRowDecoderMeta = new HBaseRowDecoderMeta( mock( NamedClusterServiceLocator.class ), mock( NamedClusterService.class ), mock( RuntimeTestActionService.class ), mock( RuntimeTester.class ), mock( MetastoreLocator.class ) ); rowMeta = new RowMeta(); } @After public void tearDown() { rowMeta.clear(); } @Test public void testRowMetaIsFilled_WhenMappingHasTableNameAndMappingName() throws Exception { // Mapping from HBase: having both table name and mapping name hbRowDecoderMeta.setMapping( getMapping( TABLE_NAME, MAPPING_NAME ) ); hbRowDecoderMeta.getFields( DefaultBowl.getInstance(), rowMeta, ORIGIN, null, null, null ); assertRowMetaIsFilledWithFields(); } @Test public void testRowMetaIsFilled_WhenMappingHasNoMappingName() throws Exception { // "local" Mapping: no mapping name hbRowDecoderMeta.setMapping( getMapping( null, null ) ); hbRowDecoderMeta.getFields( DefaultBowl.getInstance(), rowMeta, ORIGIN, null, null, null ); assertRowMetaIsFilledWithFields(); } private void assertRowMetaIsFilledWithFields() { assertEquals( 2, rowMeta.getValueMetaList().size() ); ValueMetaInterface vmi = rowMeta.getValueMeta( 0 ); assertEquals( MAPPING_KEY_NAME, vmi.getName() ); vmi = rowMeta.getValueMeta( 1 ); assertEquals( ALIAS, vmi.getName() ); } private Mapping getMapping( String tableName, String mappingName ) throws Exception { Mapping maping = mock( Mapping.class ); when( maping.getKeyName() ).thenReturn( MAPPING_KEY_NAME ); Map map = new HashMap<>(); HBaseValueMetaInterface value = mock( HBaseValueMetaInterface.class ); when( value.getName() ).thenReturn( ALIAS ); map.put( ALIAS, value ); when( maping.getMappedColumns() ).thenReturn( map ); return maping; } } ================================================ FILE: kettle-plugins/hbase/core/src/test/resources/StubMapping.xml ================================================ Test HBaseInput N 1 none Local Sandbox sandbox-hdp.hortonworks.com 2181 iemployee iemployee simple input map Rowkey Rowkey Y Integer iemployee simple input map fname personal fname N Float iemployee simple input map lname personal lname N Double iemployee simple input map salary payroll salary N Float N simple input map iemployee Rowkey Integer fname personal fname Integer lname personal lname Long salary payroll salary Float 160 144 Y ================================================ FILE: kettle-plugins/hbase/pom.xml ================================================ 4.0.0 pentaho-big-data-kettle-plugins pentaho 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-hbase 11.1.0.0-SNAPSHOT pom Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com Apache License, Version 2.0 https://www.apache.org/licenses/LICENSE-2.0.txt repo A business-friendly OSS license assemblies core ================================================ FILE: kettle-plugins/hbase-meta/pom.xml ================================================ 4.0.0 1.4.8 pentaho pentaho-big-data-kettle-plugins 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-hbase-meta 11.1.0.0-SNAPSHOT jar org.apache.hbase hbase-client ${hbase.version} provided com.google.protobuf protobuf-java org.apache.hadoop.thirdparty hadoop-shaded-protobuf_3_25 ${hadoop-shaded-protobuf_3_25.version} org.pentaho shim-api ${pentaho-hadoop-shims.version} pentaho-kettle kettle-core ${pdi.version} pentaho-kettle kettle-engine ${pdi.version} org.mockito mockito-all 1.10.19 test ================================================ FILE: kettle-plugins/hbase-meta/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/meta/AELHBaseMappingImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.meta; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.hadoop.shim.api.hbase.mapping.Mapping; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import org.w3c.dom.Node; import java.io.Serializable; import java.util.HashMap; import java.util.Map; public class AELHBaseMappingImpl implements Mapping, Serializable { private static final long serialVersionUID = 1L; private String tableName; private String mappingName; private String keyName; private KeyType keyType; private String keyTypeAsString; private int numMappedColumns; private Map mappedColumns; public AELHBaseMappingImpl() { } @Override public String addMappedColumn( HBaseValueMetaInterface hBaseValueMetaInterface, boolean b ) throws Exception { if ( mappedColumns == null ) { mappedColumns = new HashMap<>(); } mappedColumns.put( hBaseValueMetaInterface.getAlias(), hBaseValueMetaInterface ); this.numMappedColumns++; return hBaseValueMetaInterface.getAlias(); } @Override public String getTableName() { return tableName; } @Override public void setTableName( String tableName ) { this.tableName = tableName; } @Override public String getMappingName() { return mappingName; } @Override public void setMappingName( String mappingName ) { this.mappingName = mappingName; } @Override public String getKeyName() { return keyName; } @Override public void setKeyName( String keyName ) { this.keyName = keyName; } @Override public KeyType getKeyType() { return keyType; } @Override public void setKeyType( KeyType keyType ) { this.keyType = keyType; } @Override public Map getMappedColumns() { return mappedColumns; } @Override public void setMappedColumns( Map mappedColumns ) { this.mappedColumns = mappedColumns; } @Override public void setKeyTypeAsString( String s ) throws Exception { this.keyTypeAsString = s; } @Override public boolean isTupleMapping() { return false; } @Override public void setTupleMapping( boolean b ) { } @Override public String getTupleFamilies() { return null; } @Override public String[] getTupleFamiliesSplit() { return new String[0]; } @Override public void setTupleFamilies( String s ) { } @Override public int numMappedColumns() { return this.numMappedColumns; } @Override public void saveRep( Repository repository, ObjectId objectId, ObjectId objectId1 ) throws KettleException { //noop on AEL } @Override public String getXML() { if ( Const.isEmpty( getKeyName() ) ) { return ""; // nothing defined } String retString = ""; retString += XMLHandler.openTag( "mapping" ); retString += XMLHandler.addTagValue( "mapping_name", getMappingName() ); retString += XMLHandler.addTagValue( "table_name", getTableName() ); retString += XMLHandler.addTagValue( "key", getKeyName() ); retString += XMLHandler.addTagValue( "key_type", getKeyType().toString() ); if ( mappedColumns.size() > 0 ) { retString += XMLHandler.openTag( "mapped_columns" ); for ( String alias : mappedColumns.keySet() ) { HBaseValueMetaInterface vm = mappedColumns.get( alias ); retString += XMLHandler.openTag( "mapped_column" ); retString += XMLHandler.addTagValue( "alias", alias ); retString += XMLHandler.addTagValue( "column_family", vm.getColumnFamily() ); retString += XMLHandler.addTagValue( "column_name", vm.getColumnName() ); retString += XMLHandler.addTagValue( "type", vm.getHBaseTypeDesc() ); retString += XMLHandler.closeTag( "mapped_column" ); } retString += XMLHandler.closeTag( "mapped_columns" ); } retString += XMLHandler.closeTag( "mapping" ); return retString; } @Override public boolean loadXML( Node node ) throws KettleXMLException { node = XMLHandler.getSubNode( node, "mapping" ); if ( node == null || Const.isEmpty( XMLHandler.getTagValue( node, "key" ) ) ) { return false; // no mapping info in XML } setMappingName( XMLHandler.getTagValue( node, "mapping_name" ) ); setTableName( XMLHandler.getTagValue( node, "table_name" ) ); String keyName = XMLHandler.getTagValue( node, "key" ); if ( keyName.indexOf( ',' ) > 0 ) { setTupleMapping( true ); setKeyName( keyName.substring( 0, keyName.indexOf( ',' ) ) ); if ( keyName.indexOf( ',' ) != keyName.length() - 1 ) { // specific families have been supplied String familiesList = keyName.substring( keyName.indexOf( ',' ) + 1, keyName.length() ); if ( !Const.isEmpty( familiesList.trim() ) ) { setTupleFamilies( familiesList ); } } } else { setKeyName( keyName ); } String keyTypeS = XMLHandler.getTagValue( node, "key_type" ); for ( KeyType k : KeyType.values() ) { if ( k.toString().equalsIgnoreCase( keyTypeS ) ) { setKeyType( k ); break; } } Node fields = XMLHandler.getSubNode( node, "mapped_columns" ); if ( fields != null && XMLHandler.countNodes( fields, "mapped_column" ) > 0 ) { int nrfields = XMLHandler.countNodes( fields, "mapped_column" ); for ( int i = 0; i < nrfields; i++ ) { Node fieldNode = XMLHandler.getSubNodeByNr( fields, "mapped_column", i ); String alias = XMLHandler.getTagValue( fieldNode, "alias" ); String colFam = XMLHandler.getTagValue( fieldNode, "column_family" ); if ( colFam == null ) { colFam = ""; } String colName = XMLHandler.getTagValue( fieldNode, "column_name" ); if ( colName == null ) { colName = ""; } String type = XMLHandler.getTagValue( fieldNode, "type" ); AELHBaseValueMetaImpl vm = new AELHBaseValueMetaImpl( false, alias, colName, colFam, getMappingName(), getTableName() ); vm.setHBaseTypeFromString( type ); try { addMappedColumn( vm, isTupleMapping() ); } catch ( Exception ex ) { throw new KettleXMLException( ex ); } } } return true; } @Override public boolean readRep( Repository repository, ObjectId objectId ) throws KettleException { return false; } @Override public String getFriendlyName() { return null; } @Override public Object decodeKeyValue( byte[] bytes ) throws KettleException { return null; } } ================================================ FILE: kettle-plugins/hbase-meta/src/main/java/org/pentaho/big/data/kettle/plugins/hbase/meta/AELHBaseValueMetaImpl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.meta; import org.apache.hadoop.hbase.util.Bytes; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBase; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.hadoop.shim.api.hbase.meta.HBaseValueMetaInterface; import java.math.BigDecimal; import java.util.Date; public class AELHBaseValueMetaImpl extends ValueMetaBase implements HBaseValueMetaInterface { private boolean isKey; private String alias; private String columnName; private String columnFamily; private String mappingName; private String tableName; private boolean isLongOrDouble = true; public AELHBaseValueMetaImpl( boolean isKey, String alias, String columnName, String columnFamily, String mappingName, String tableName ) { super( alias ); this.isKey = isKey; this.alias = alias; this.columnName = columnName; this.columnFamily = columnFamily; this.mappingName = mappingName; this.tableName = tableName; } @Override public boolean isKey() { return isKey; } @Override public void setKey( boolean key ) { isKey = key; } @Override public String getAlias() { return getName(); } @Override public void setAlias( String alias ) { this.alias = alias; setName( alias ); } @Override public String getColumnName() { return columnName; } @Override public void setColumnName( String columnName ) { this.columnName = columnName; } @Override public String getColumnFamily() { return columnFamily; } @Override public void setColumnFamily( String columnFamily ) { this.columnFamily = columnFamily; } @Override public void setHBaseTypeFromString( String hbaseType ) throws IllegalArgumentException { if ( hbaseType.equalsIgnoreCase( "Integer" ) ) { setType( ValueMeta.getType( hbaseType ) ); setIsLongOrDouble( false ); return; } if ( hbaseType.equalsIgnoreCase( "Long" ) ) { setType( ValueMeta.getType( "Integer" ) ); setIsLongOrDouble( true ); return; } if ( hbaseType.equals( "Float" ) ) { setType( ValueMeta.getType( "Number" ) ); setIsLongOrDouble( false ); return; } if ( hbaseType.equals( "Double" ) ) { setType( ValueMeta.getType( "Number" ) ); setIsLongOrDouble( true ); return; } // default int type = ValueMeta.getType( hbaseType ); if ( type == ValueMetaInterface.TYPE_NONE ) { throw new IllegalArgumentException( BaseMessages.getString( PKG, "HBaseValueMeta.Error.UnknownType", hbaseType ) ); } setType( type ); } @Override public String getHBaseTypeDesc() { if ( isInteger() ) { return ( getIsLongOrDouble() ? "Long" : "Integer" ); } if ( isNumber() ) { return ( getIsLongOrDouble() ? "Double" : "Float" ); } return ValueMeta.getTypeDesc( getType() ); } @Override public Object decodeColumnValue( byte[] rawColValue ) throws KettleException { // just return null if this column doesn't have a value for the row if ( rawColValue == null ) { return null; } if ( isString() ) { String convertedString = Bytes.toString( rawColValue ); if ( getStorageType() == ValueMetaInterface.STORAGE_TYPE_INDEXED ) { // need to return the integer index of this value Object[] legalVals = getIndex(); int foundIndex = -1; for ( int i = 0; i < legalVals.length; i++ ) { if ( legalVals[ i ].toString().trim().equals( convertedString.trim() ) ) { foundIndex = i; break; } } if ( foundIndex >= 0 ) { return new Integer( foundIndex ); } throw new KettleException( BaseMessages.getString( PKG, "HBaseValueMeta.Error.IllegalIndexedColumnValue", convertedString, getAlias() ) ); } else { return convertedString; } } if ( isNumber() ) { if ( rawColValue.length == Bytes.SIZEOF_FLOAT ) { float floatResult = Bytes.toFloat( rawColValue ); return new Double( floatResult ); } if ( rawColValue.length == Bytes.SIZEOF_DOUBLE ) { return new Double( Bytes.toDouble( rawColValue ) ); } } if ( isInteger() ) { if ( rawColValue.length == Bytes.SIZEOF_INT ) { int intResult = Bytes.toInt( rawColValue ); return new Long( intResult ); } if ( rawColValue.length == Bytes.SIZEOF_LONG ) { return new Long( Bytes.toLong( rawColValue ) ); } if ( rawColValue.length == Bytes.SIZEOF_SHORT ) { // be lenient on reading from HBase - accept and convert shorts // even though our mapping defines only longs and integers // TODO add short to the types that can be mapped? short tempShort = Bytes.toShort( rawColValue ); return new Long( tempShort ); } throw new KettleException( BaseMessages.getString( PKG, "HBaseValueMeta.Error.IllegalIntegerLength" ) ); } if ( isBigNumber() ) { String temp = Bytes.toString( rawColValue ); BigDecimal result = new BigDecimal( temp ); if ( result == null ) { throw new KettleException( BaseMessages.getString( PKG, "HBaseValueMeta.Error.UnableToDecodeBigDecimal" ) ); } return result; } if ( isBinary() ) { // just return the raw array of bytes return rawColValue; } if ( isBoolean() ) { // try as a string first Boolean result = decodeBoolFromString( rawColValue ); if ( result == null ) { // try as a number result = decodeBoolFromNumber( rawColValue ); } if ( result != null ) { return result; } throw new KettleException( BaseMessages.getString( PKG, "HBaseValueMeta.Error.UnableToDecodeBoolean" ) ); } if ( isDate() ) { if ( rawColValue.length != Bytes.SIZEOF_LONG ) { throw new KettleException( BaseMessages.getString( PKG, "HBaseValueMeta.Error.DateValueLengthNotEqualToLong" ) ); } long millis = Bytes.toLong( rawColValue ); Date d = new Date( millis ); return d; } throw new KettleException( BaseMessages.getString( PKG, "HBaseValueMeta.Error.UnknownTypeForColumn" ) ); } @Override public byte[] encodeColumnValue( Object columnValue, ValueMetaInterface colMeta ) throws KettleException { if ( columnValue == null ) { return null; } byte[] encoded = null; /** * BACKLOG-26151 - * When doing type conversions, the type of this HBase value * is given by outputType, the colMeta then converts based on * the type of the incoming value */ int outputType = this.getType(); switch ( outputType ) { case TYPE_STRING: String toEncode = colMeta.getString( columnValue ); encoded = Bytes.toBytes( toEncode ); break; case TYPE_INTEGER: Long l = colMeta.getInteger( columnValue ); if ( getIsLongOrDouble() ) { encoded = Bytes.toBytes( l.longValue() ); } else { encoded = Bytes.toBytes( l.intValue() ); } break; case TYPE_NUMBER: Double d = colMeta.getNumber( columnValue ); if ( getIsLongOrDouble() ) { encoded = Bytes.toBytes( d.doubleValue() ); } else { encoded = Bytes.toBytes( d.floatValue() ); } break; case TYPE_DATE: Date date = colMeta.getDate( columnValue ); encoded = Bytes.toBytes( date.getTime() ); break; case TYPE_BOOLEAN: Boolean b = colMeta.getBoolean( columnValue ); String boolString = ( b.booleanValue() ) ? "Y" : "N"; encoded = Bytes.toBytes( boolString ); break; case TYPE_BIGNUMBER: BigDecimal bd = colMeta.getBigNumber( columnValue ); String bds = bd.toString(); encoded = Bytes.toBytes( bds ); break; case TYPE_BINARY: encoded = colMeta.getBinary( columnValue ); break; } return encoded; } @Override public String getMappingName() { return mappingName; } @Override public void setMappingName( String mappingName ) { this.mappingName = mappingName; } @Override public String getTableName() { return tableName; } @Override public void setTableName( String tableName ) { this.tableName = tableName; } @Override public boolean getIsLongOrDouble() { return isLongOrDouble; } @Override public void setIsLongOrDouble( boolean ld ) { this.isLongOrDouble = ld; } @Override public void getXml( StringBuilder retval ) { String format = getConversionMask(); retval.append( "\n " ).append( XMLHandler.openTag( "field" ) ); retval.append( "\n " ).append( XMLHandler.addTagValue( "table_name", getTableName() ) ); retval.append( "\n " ).append( XMLHandler.addTagValue( "mapping_name", getMappingName() ) ); retval.append( "\n " ).append( XMLHandler.addTagValue( "alias", getAlias() ) ); retval.append( "\n " ).append( XMLHandler.addTagValue( "family", getColumnFamily() ) ); retval.append( "\n " ).append( XMLHandler.addTagValue( "column", getColumnName() ) ); retval.append( "\n " ).append( XMLHandler.addTagValue( "key", isKey() ) ); retval.append( "\n " ).append( XMLHandler.addTagValue( "type", ValueMetaBase.getTypeDesc( getType() ) ) ); retval.append( "\n " ).append( XMLHandler.addTagValue( "format", format ) ); retval.append( "\n " ).append( XMLHandler.closeTag( "field" ) ); } @Override public void saveRep( Repository rep, ObjectId id_transformation, ObjectId id_step, int count ) throws KettleException { //noop in AEL } /** * Decodes a boolean value from an array of bytes that is assumed to hold a string.] * Lifted from Shim to support AEL conversions * * @param rawEncoded an array of bytes holding the string representation of a boolean value * @return a Boolean object or null if it can't be decoded from the supplied array of bytes. */ public static Boolean decodeBoolFromString( byte[] rawEncoded ) { String tempString = Bytes.toString( rawEncoded ); if ( tempString.equalsIgnoreCase( "Y" ) || tempString.equalsIgnoreCase( "N" ) || tempString.equalsIgnoreCase( "YES" ) || tempString.equalsIgnoreCase( "NO" ) || tempString.equalsIgnoreCase( "TRUE" ) || tempString.equalsIgnoreCase( "FALSE" ) || tempString.equalsIgnoreCase( "T" ) || tempString.equalsIgnoreCase( "F" ) || tempString.equalsIgnoreCase( "1" ) || tempString.equalsIgnoreCase( "0" ) ) { return Boolean.valueOf( tempString.equalsIgnoreCase( "Y" ) || tempString.equalsIgnoreCase( "YES" ) || tempString.equalsIgnoreCase( "TRUE" ) || tempString.equalsIgnoreCase( "T" ) || tempString.equalsIgnoreCase( "1" ) ); } return null; } /** * Decodes a boolean value from an array of bytes that is assumed to hold a number. * Lifted from Shim to support AEL conversions * * @param rawEncoded an array of bytes holding the numerical representation of a boolean value * @return a Boolean object or null if it can't be decoded from the supplied array of bytes. */ public static Boolean decodeBoolFromNumber( byte[] rawEncoded ) { if ( rawEncoded == null ) { return null; } if ( rawEncoded.length == Bytes.SIZEOF_BYTE ) { byte val = rawEncoded[ 0 ]; if ( val == 0 || val == 1 ) { return new Boolean( val == 1 ); } } if ( rawEncoded.length == Bytes.SIZEOF_SHORT ) { short tempShort = Bytes.toShort( rawEncoded ); if ( tempShort == 0 || tempShort == 1 ) { return new Boolean( tempShort == 1 ); } } if ( rawEncoded.length == Bytes.SIZEOF_INT || rawEncoded.length == Bytes.SIZEOF_FLOAT ) { int tempInt = Bytes.toInt( rawEncoded ); if ( tempInt == 1 || tempInt == 0 ) { return new Boolean( tempInt == 1 ); } float tempFloat = Bytes.toFloat( rawEncoded ); if ( tempFloat == 0.0f || tempFloat == 1.0f ) { return new Boolean( tempFloat == 1.0f ); } } if ( rawEncoded.length == Bytes.SIZEOF_LONG || rawEncoded.length == Bytes.SIZEOF_DOUBLE ) { long tempLong = Bytes.toLong( rawEncoded ); if ( tempLong == 0L || tempLong == 1L ) { return new Boolean( tempLong == 1L ); } double tempDouble = Bytes.toDouble( rawEncoded ); if ( tempDouble == 0.0 || tempDouble == 1.0 ) { return new Boolean( tempDouble == 1.0 ); } } // not identifiable from a number return null; } } ================================================ FILE: kettle-plugins/hbase-meta/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/meta/AELHBaseMappingTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.meta; import org.apache.commons.io.IOUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.runners.MockitoJUnitRunner; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.IOException; import java.io.StringReader; import static org.junit.Assert.fail; @RunWith( MockitoJUnitRunner.class ) public class AELHBaseMappingTest { private AELHBaseMappingImpl stubMapping; @Before public void setup() throws Exception { stubMapping = new AELHBaseMappingImpl(); Node mappingNode = null; try { mappingNode = getMappingNode(); } catch( Exception ex ) { fail(); } stubMapping.loadXML( mappingNode ); } @Test public void inflateFromXmlTest() { Assert.assertEquals( stubMapping.getTableName(), "iemployee" ); Assert.assertEquals( stubMapping.getMappingName(), "simple input map" ); Assert.assertEquals( stubMapping.getMappedColumns().size(), 3 ); } @Test public void serializeToXmlTest() throws IOException { String serializedStub = stubMapping.getXML(); Assert.assertTrue( serializedStub.contains( "iemployee" ) ); Assert.assertTrue( serializedStub.contains( "simple input map" ) ); } private Node getMappingNode() throws IOException, ParserConfigurationException, SAXException { String xml = IOUtils.toString( getClass().getClassLoader().getResourceAsStream( "StubMapping.xml" ) ); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.parse( new InputSource( new StringReader( xml ) ) ); return doc.getDocumentElement(); } } ================================================ FILE: kettle-plugins/hbase-meta/src/test/java/org/pentaho/big/data/kettle/plugins/hbase/meta/AELHBaseValueMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hbase.meta; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.runners.MockitoJUnitRunner; import org.pentaho.di.core.exception.KettleException; import java.math.BigDecimal; import java.util.Date; @RunWith( MockitoJUnitRunner.class ) public class AELHBaseValueMetaTest { private AELHBaseValueMetaImpl stubValueMeta; @Before public void setup() throws Exception { stubValueMeta = new AELHBaseValueMetaImpl( true, "testAlias", "testColumnName", "testColumnFamily", "testMappingName", "testTableName" ); stubValueMeta.setMappingName( "testMappingName" ); stubValueMeta.setTableName( "testTableName" ); stubValueMeta.setType( 5 ); stubValueMeta.setIsLongOrDouble( false ); } @Test public void getXmlSerializationTest() { StringBuilder sb = new StringBuilder( ); stubValueMeta.getXml( sb ); Assert.assertTrue( sb.toString().contains( "Y" ) ); Assert.assertTrue( sb.toString().contains( "testAlias" ) ); Assert.assertTrue( sb.toString().contains( "testColumnName" ) ); } @Test public void getHBaseTypeDescTest() { String stubType = stubValueMeta.getHBaseTypeDesc(); Assert.assertEquals( "Integer", stubType ); } @Test public void getHBaseTypeDescNumberTest() { stubValueMeta.setType( 1 ); String stubType = stubValueMeta.getHBaseTypeDesc(); Assert.assertEquals( "Float", stubType ); } @Test public void decodeNullBytesTest() throws KettleException { Object shouldBeNull = stubValueMeta.decodeColumnValue( null ); Assert.assertNull( shouldBeNull ); } @Test public void decodeStringIntoObject() throws KettleException { stubValueMeta.setType( 2 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( "stubString" ) ); Assert.assertNotNull( str ); } @Test public void decodeNumberIntoObject() throws KettleException { stubValueMeta.setType( 1 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( 2.2 ) ); Assert.assertNotNull( str ); } @Test public void decodeFloadIntoObject() throws KettleException { stubValueMeta.setType( 1 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( 2.2f ) ); Assert.assertNotNull( str ); } @Test public void decodeIntegerIntoObject() throws KettleException { stubValueMeta.setType( 5 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( 1 ) ); Assert.assertNotNull( str ); } @Test public void decodeLongIntoObject() throws KettleException { stubValueMeta.setType( 5 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( 1L ) ); Assert.assertNotNull( str ); } @Test public void decodeShortIntoObject() throws KettleException { stubValueMeta.setType( 5 ); short i = 1; Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( i ) ); Assert.assertNotNull( str ); } @Test public void decodeBigNumberIntoObject() throws KettleException { stubValueMeta.setType( 6 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( "9.9999999" ) ); Assert.assertNotNull( str ); } @Test public void decodeBooleanStringIntoObject() throws KettleException { stubValueMeta.setType( 4 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( "1" ) ); Assert.assertNotNull( str ); } @Test public void decodeBooleanFloatIntoObject() throws KettleException { stubValueMeta.setType( 4 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( 1.0f ) ); Assert.assertNotNull( str ); } @Test public void decodeBooleanLongIntoObject() throws KettleException { stubValueMeta.setType( 4 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( 1L ) ); Assert.assertNotNull( str ); } @Test public void decodeBooleanDoubleIntoObject() throws KettleException { stubValueMeta.setType( 4 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( 1.0 ) ); Assert.assertNotNull( str ); } @Test public void decodeBooleanBytesIntoObject() throws KettleException { stubValueMeta.setType( 4 ); byte i = 1; Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( i ) ); Assert.assertNotNull( str ); } public void decodeBooleanShortIntoObject() throws KettleException { stubValueMeta.setType( 4 ); short i = 1; Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( i ) ); Assert.assertNotNull( str ); } @Test public void decodeBooleanIntoObject() throws KettleException { stubValueMeta.setType( 4 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( 1 ) ); Assert.assertNotNull( str ); } @Test public void decodeBytesIntoObject() throws KettleException { stubValueMeta.setType( 8 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( 1010 ) ); Assert.assertNotNull( str ); } @Test public void decodeDateIntoObject() throws KettleException { stubValueMeta.setType( 3 ); Object str = stubValueMeta.decodeColumnValue( Bytes.toBytes( 1539717565559l ) ); Assert.assertNotNull( str ); } @Test public void encodeNullBytesTest() throws KettleException { Object shouldBeNull = stubValueMeta.encodeColumnValue( null, stubValueMeta ); Assert.assertNull( shouldBeNull ); } @Test public void encodeStringIntoBytes() throws KettleException { stubValueMeta.setType( 2 ); Object str = stubValueMeta.encodeColumnValue( "stubString", stubValueMeta ); Assert.assertNotNull( str ); } @Test public void encodeNumberIntoBytes() throws KettleException { stubValueMeta.setType( 1 ); Object str = stubValueMeta.encodeColumnValue(2.2, stubValueMeta ); Assert.assertNotNull( str ); } @Test public void encodeIntegerIntoBytes() throws KettleException { stubValueMeta.setType( 5 ); Object str = stubValueMeta.encodeColumnValue(1L, stubValueMeta ); Assert.assertNotNull( str ); } @Test public void encodeBigNumberIntoBytes() throws KettleException { stubValueMeta.setType( 6 ); Object str = stubValueMeta.encodeColumnValue( new BigDecimal( 9.9999999 ), stubValueMeta ); Assert.assertNotNull( str ); } @Test public void encodeDateIntoBytes() throws KettleException { stubValueMeta.setType( 3 ); Object str = stubValueMeta.encodeColumnValue( new Date(), stubValueMeta ); Assert.assertNotNull( str ); } @Test public void encodeBooleanIntoBytes() throws KettleException { stubValueMeta.setType( 4 ); Object str = stubValueMeta.encodeColumnValue( Boolean.TRUE, stubValueMeta ); Assert.assertNotNull( str ); } @Test public void encodeBinaryIntoBytes() throws KettleException { stubValueMeta.setType( 8 ); Object str = stubValueMeta.encodeColumnValue( new byte[]{ 1, 0, 1 }, stubValueMeta ); Assert.assertNotNull( str ); } @Test public void integerIsNotLongOrDoubleTest() { stubValueMeta.setHBaseTypeFromString( "Integer" ); Assert.assertFalse( stubValueMeta.getIsLongOrDouble() ); } @Test public void longIsLongOrDouble() { stubValueMeta.setHBaseTypeFromString( "Long" ); Assert.assertTrue( stubValueMeta.getIsLongOrDouble() ); } @Test public void floatIsNotLongOrDouble() { stubValueMeta.setHBaseTypeFromString( "Float" ); Assert.assertFalse( stubValueMeta.getIsLongOrDouble() ); } @Test public void doubleIsLongOrDouble() { stubValueMeta.setHBaseTypeFromString( "Double" ); Assert.assertTrue( stubValueMeta.getIsLongOrDouble() ); } } ================================================ FILE: kettle-plugins/hbase-meta/src/test/resources/StubMapping.xml ================================================ Test HBaseInput N 1 none Local Sandbox sandbox-hdp.hortonworks.com 2181 iemployee iemployee simple input map Rowkey Rowkey Y Integer iemployee simple input map fname personal fname N Float iemployee simple input map lname personal lname N Double iemployee simple input map salary payroll salary N Float N simple input map iemployee Rowkey Integer fname personal fname Integer lname personal lname Long salary payroll salary Float 160 144 Y ================================================ FILE: kettle-plugins/hdfs/assemblies/plugin/pom.xml ================================================ 4.0.0 hdfs-assemblies pentaho 11.1.0.0-SNAPSHOT pdi-hdfs-plugin pom PDI Hdfs Plugin Distribution ${project.basedir}/src/main/resources ${project.build.directory}/assembly pentaho pdi-hdfs-core ${project.version} ================================================ FILE: kettle-plugins/hdfs/assemblies/plugin/src/assembly/assembly.xml ================================================ zip zip ${resources.directory} . true ${assembly.dir} . . pentaho:pdi-hdfs-core:jar false runtime . false false pentaho:pdi-hdfs-core:jar ================================================ FILE: kettle-plugins/hdfs/assemblies/plugin/src/main/resources/version.xml ================================================ ${project.version} ================================================ FILE: kettle-plugins/hdfs/assemblies/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-hdfs 11.1.0.0-SNAPSHOT hdfs-assemblies pom PDI Hdfs Plugin Assemblies plugin ================================================ FILE: kettle-plugins/hdfs/core/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-hdfs 11.1.0.0-SNAPSHOT pdi-hdfs-core PDI Hdfs Core site 11.1.0.0-SNAPSHOT 1.1.3 2.6.0 src/main/resources false src/main/resources-filtered true org.pentaho shim-api ${pentaho-hadoop-shims.version} provided pentaho pentaho-big-data-kettle-plugins-common-ui ${project.version} provided org.mockito mockito-all pentaho-kettle kettle-core ${pdi.version} provided pentaho-kettle kettle-engine ${pdi.version} provided pentaho-kettle kettle-ui-swt ${pdi.version} provided com.tinkerpop.blueprints blueprints-core ${dependency.com.tinkerpop.blueprints.version} test pentaho pentaho-platform-core ${platform.version} provided junit junit ${dependency.junit.revision} test org.mockito mockito-core ${mockito.version} test org.mockito mockito-inline ${mockito-inline.version} test pentaho-kettle kettle-engine ${pdi.version} tests test org.mockito mockito-all pentaho pentaho-big-data-legacy ${project.version} org.mockito mockito-all pentaho pentaho-big-data-impl-cluster ${project.version} provided org.mockito mockito-all pentaho pentaho-big-data-api-runtimeTest ${project.version} provided pentaho pentaho-metaverse-api ${pentaho-metaverse.version} provided org.jdom jdom ${jdom.version} test org.apache.logging.log4j log4j-core ${log4j.version} test commons-httpclient commons-httpclient ${dependency.commons-httpclient.revision} test pentaho-kettle kettle-core ${pdi.version} tests test org.mockito mockito-all ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/HdfsLifecycleListener.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.big.data.kettle.plugins.hdfs.vfs.HadoopVfsFileChooserDialog; import org.pentaho.big.data.kettle.plugins.hdfs.vfs.MapRFSFileChooserDialog; import org.pentaho.big.data.kettle.plugins.hdfs.vfs.NamedClusterVfsFileChooserDialog; import org.pentaho.big.data.kettle.plugins.hdfs.vfs.Schemes; import org.pentaho.di.core.annotations.LifecyclePlugin; import org.pentaho.di.core.lifecycle.LifeEventHandler; import org.pentaho.di.core.lifecycle.LifecycleException; import org.pentaho.di.core.lifecycle.LifecycleListener; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.runtime.test.action.impl.RuntimeTestActionServiceImpl; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import org.pentaho.vfs.ui.VfsFileChooserDialog; import org.pentaho.big.data.impl.cluster.NamedClusterManager; /** * Created by bryan on 11/23/15. */ @LifecyclePlugin( id = "HdfsLifecycleListener", name = "HdfsLifecycleListener" ) public class HdfsLifecycleListener implements LifecycleListener { private final int hdfsPriority = 150; private final int maprPriority = 160; private final int ncPriority = 110; private final NamedClusterService ncService; private final RuntimeTestActionService rtTestActServ; private final RuntimeTester rtTester; private HadoopVfsFileChooserDialog hdfsFileChooserDialog; private MapRFSFileChooserDialog mapRFSFileChooserDialog; private NamedClusterVfsFileChooserDialog ncFileChooserDialog; public HdfsLifecycleListener() { this.ncService = NamedClusterManager.getInstance(); this.rtTestActServ = RuntimeTestActionServiceImpl.getInstance(); this.rtTester = RuntimeTesterImpl.getInstance(); } @Deprecated // This OSGI constructor should be removed public HdfsLifecycleListener( NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) { this.ncService = namedClusterService; this.rtTestActServ = runtimeTestActionService; this.rtTester = runtimeTester; } @Override public void onStart( LifeEventHandler lifeEventHandler ) throws LifecycleException { final Spoon spoon = Spoon.getInstance(); // Add dialogs on display thread spoon.getDisplay().asyncExec( new Runnable() { @Override public void run() { VfsFileChooserDialog dialog = spoon.getVfsFileChooserDialog( null, null ); hdfsFileChooserDialog = new HadoopVfsFileChooserDialog( Schemes.HDFS_SCHEME, Schemes.HDFS_SCHEME_DISPLAY_NAME, dialog, null, null, ncService, rtTestActServ, rtTester ); dialog.addVFSUIPanel( hdfsPriority, hdfsFileChooserDialog ); mapRFSFileChooserDialog = new MapRFSFileChooserDialog( Schemes.MAPRFS_SCHEME, Schemes.MAPRFS_SCHEME_DISPLAY_NAME, dialog ); dialog.addVFSUIPanel( maprPriority, mapRFSFileChooserDialog ); ncFileChooserDialog = new NamedClusterVfsFileChooserDialog( Schemes.NAMED_CLUSTER_SCHEME, Schemes.NAMED_CLUSTER_SCHEME_DISPLAY_NAME, dialog, null, null, ncService, rtTestActServ, rtTester ); dialog.addVFSUIPanel( ncPriority, ncFileChooserDialog ); } } ); } @Override public void onExit( LifeEventHandler lifeEventHandler ) throws LifecycleException { } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/job/JobEntryHadoopCopyFiles.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.job; import com.google.common.annotations.VisibleForTesting; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.di.core.annotations.JobEntry; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.job.entries.copyfiles.JobEntryCopyFiles; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.runtime.test.action.impl.RuntimeTestActionServiceImpl; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import java.util.HashMap; import java.util.Map; import java.util.Objects; @JobEntry( id = "HadoopCopyFilesPlugin", image = "HDM.svg", name = "HadoopCopyFilesPlugin.Name", description = "HadoopCopyFilesPlugin.Description", categoryDescription = "i18n:org.pentaho.di.job:JobCategory.Category.BigData", i18nPackageName = "org.pentaho.di.job.entries.hadoopcopyfiles" ) public class JobEntryHadoopCopyFiles extends JobEntryCopyFiles { public static final String S3_SOURCE_FILE = "S3-SOURCE-FILE-"; public static final String S3_DEST_FILE = "S3-DEST-FILE-"; private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; public JobEntryHadoopCopyFiles() { this.namedClusterService = NamedClusterManager.getInstance(); this.runtimeTestActionService = RuntimeTestActionServiceImpl.getInstance(); this.runtimeTester = RuntimeTesterImpl.getInstance(); this.fileFolderUrlMappings = new HashMap<>(); } /** * Hold mapping to go back to unresolved or original URL stored in the xml. *

* Mapping legend: *

    *
  • Key: return value from {@link #loadURL(String, String, IMetaStore, Map)}
  • *
  • Value: stored URL from fields ( {@link #SOURCE_FILE_FOLDER } and {@link #DESTINATION_FILE_FOLDER} ) or first parameter * * of {@link #loadURL(String, String, IMetaStore, Map)}
  • *
*/ protected final Map fileFolderUrlMappings; public JobEntryHadoopCopyFiles( NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) { this.namedClusterService = namedClusterService; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; this.fileFolderUrlMappings = new HashMap<>(); } @Override public String loadURL( String url, String ncName, IMetaStore metastore, Map mappings ) { NamedCluster c = namedClusterService.getNamedClusterByName( ncName, metastore ); String origUrl = url; boolean saveArgumentUrl = false; String pref = null; if ( url != null && url.indexOf( SOURCE_URL ) > -1 ) { origUrl = url; url = origUrl.substring( origUrl.indexOf( "-", origUrl.indexOf( SOURCE_URL ) + SOURCE_URL.length() ) + 1 ); pref = origUrl.substring( 0, origUrl.indexOf( "-", origUrl.indexOf( SOURCE_URL ) + SOURCE_URL.length() ) + 1 ); } else if ( url != null && url.indexOf( DEST_URL ) > -1 ) { origUrl = url; url = origUrl.substring( origUrl.indexOf( "-", origUrl.indexOf( DEST_URL ) + DEST_URL.length() ) + 1 ); pref = origUrl.substring( 0, origUrl.indexOf( "-", origUrl.indexOf( DEST_URL ) + DEST_URL.length() ) + 1 ); } if ( c != null ) { String valueBeforeCall = url; url = c.processURLsubstitution( url, metastore, getVariables() ); saveArgumentUrl = !Objects.equals( valueBeforeCall, url ); } if ( pref != null ) { url = pref + url; } if ( saveArgumentUrl ) { fileFolderUrlMappings.put( url, origUrl ); } return super.loadURL( url, ncName, metastore, mappings ); } /** * Preserve the original URL input argument from {@link #loadURL(String, String, IMetaStore, Map)} and don't save the * "resolved" URL, otherwise call normal logic from super class. * @see JobEntryCopyFiles#loadURL(String, String, IMetaStore, Map) * @param url * @param ncName * @param metastore * @param mappings * @return original URL if it has changed otherwise, the result from super class */ @Override public String saveURL( String url, String ncName, IMetaStore metastore, Map mappings ) { return !Objects.isNull( url ) && fileFolderUrlMappings.containsKey( url ) ? fileFolderUrlMappings.get( url ) : super.saveURL( url, ncName, metastore, mappings ); } @VisibleForTesting @Override protected VariableSpace getVariables() { return super.getVariables(); } public NamedClusterService getNamedClusterService() { return namedClusterService; } public RuntimeTestActionService getRuntimeTestActionService() { return runtimeTestActionService; } public RuntimeTester getRuntimeTester() { return runtimeTester; } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/job/JobEntryHadoopCopyFilesDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.job; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.di.core.plugins.ParentFirst; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.big.data.kettle.plugins.hdfs.vfs.HadoopVfsFileChooserDialog; import org.pentaho.big.data.kettle.plugins.hdfs.vfs.Schemes; import org.pentaho.big.data.plugins.common.ui.NamedClusterWidgetImpl; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.logging.LogChannel; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entries.copyfiles.JobEntryCopyFiles; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.Repository; import org.pentaho.di.ui.core.ConstUI; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.job.entries.copyfiles.JobEntryCopyFilesDialog; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.vfs.ui.CustomVfsUiPanel; import org.pentaho.vfs.ui.VfsFileChooserDialog; import java.io.File; import java.util.HashMap; import java.util.List; import java.util.Map; @PluginDialog( id = "HadoopCopyFilesPlugin", image = "HDM.svg", pluginType = PluginDialog.PluginType.JOBENTRY, documentationUrl = "pdi-job-entries-reference-overview/hadoop-copy-files" ) //@ParentFirst( patterns = { "../../lib" } ) public class JobEntryHadoopCopyFilesDialog extends JobEntryCopyFilesDialog { private static Class BASE_PKG = JobEntryCopyFiles.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private static Class PKG = JobEntryHadoopCopyFiles.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private LogChannel log = new LogChannel( this ); private JobEntryHadoopCopyFiles jobEntryHadoopCopyFiles; private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; public static final String S3_ENVIRONMENT = "S3"; public JobEntryHadoopCopyFilesDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta ) { super( parent, jobEntryInt, rep, jobMeta ); jobEntry = (JobEntryCopyFiles) jobEntryInt; jobEntryHadoopCopyFiles = (JobEntryHadoopCopyFiles) jobEntry; namedClusterService = jobEntryHadoopCopyFiles.getNamedClusterService(); runtimeTestActionService = jobEntryHadoopCopyFiles.getRuntimeTestActionService(); runtimeTester = jobEntryHadoopCopyFiles.getRuntimeTester(); if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( BaseMessages.getString( BASE_PKG, "JobCopyFiles.Name.Default" ) ); } } protected void initUI() { super.initUI(); shell.setText( BaseMessages.getString( PKG, "JobHadoopCopyFiles.Title" ) ); } protected SelectionAdapter getFileSelectionAdapter() { return new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { String path = wFields.getActiveTableItem().getText( wFields.getActiveTableColumn() ); String clusterName = wFields.getActiveTableItem().getText( wFields.getActiveTableColumn() - 1 ); setSelectedFile( path, clusterName ); } }; } /** * Copy information from the meta-data input to the dialog fields. */ public void getData() { if ( jobEntry.getName() != null ) { wName.setText( jobEntry.getName() ); } wName.selectAll(); wCopyEmptyFolders.setSelection( jobEntry.copy_empty_folders ); if ( jobEntry.source_filefolder != null ) { for ( int i = 0; i < jobEntry.source_filefolder.length; i++ ) { TableItem ti = wFields.table.getItem( i ); if ( jobEntry.source_filefolder[i] != null ) { String sourceUrl = jobEntry.source_filefolder[i]; String clusterName = jobEntry.getConfigurationBy( sourceUrl ); if ( clusterName != null ) { clusterName = clusterName.startsWith( JobEntryCopyFiles.LOCAL_SOURCE_FILE ) ? LOCAL_ENVIRONMENT : clusterName; clusterName = clusterName.startsWith( JobEntryCopyFiles.STATIC_SOURCE_FILE ) ? STATIC_ENVIRONMENT : clusterName; clusterName = clusterName.startsWith( JobEntryHadoopCopyFiles.S3_SOURCE_FILE ) ? S3_ENVIRONMENT : clusterName; ti.setText( 1, clusterName ); sourceUrl = clusterName.equals( LOCAL_ENVIRONMENT ) || clusterName.equals( STATIC_ENVIRONMENT ) || clusterName.equals( S3_ENVIRONMENT ) ? sourceUrl : jobEntry.getUrlPath( sourceUrl.replace( JobEntryCopyFiles.SOURCE_URL + i + "-", "" ) ); } if ( sourceUrl != null ) { sourceUrl = sourceUrl.replace( JobEntryCopyFiles.SOURCE_URL + i + "-", "" ); } else { sourceUrl = ""; } ti.setText( 2, sourceUrl ); } if ( jobEntry.wildcard[i] != null ) { ti.setText( 3, jobEntry.wildcard[i] ); } if ( jobEntry.destination_filefolder[i] != null ) { String destinationURL = jobEntry.destination_filefolder[i]; String clusterName = jobEntry.getConfigurationBy( destinationURL ); if ( clusterName != null ) { clusterName = clusterName.startsWith( JobEntryCopyFiles.LOCAL_DEST_FILE ) ? LOCAL_ENVIRONMENT : clusterName; clusterName = clusterName.startsWith( JobEntryCopyFiles.STATIC_DEST_FILE ) ? STATIC_ENVIRONMENT : clusterName; clusterName = clusterName.startsWith( JobEntryHadoopCopyFiles.S3_DEST_FILE ) ? S3_ENVIRONMENT : clusterName; ti.setText( 4, clusterName ); destinationURL = clusterName.equals( LOCAL_ENVIRONMENT ) || clusterName.equals( STATIC_ENVIRONMENT ) || clusterName.equals( S3_ENVIRONMENT ) ? destinationURL : jobEntry.getUrlPath( destinationURL.replace( JobEntryCopyFiles.DEST_URL + i + "-", "" ) ); } if ( destinationURL != null ) { destinationURL = destinationURL.replace( JobEntryCopyFiles.DEST_URL + i + "-", "" ); } else { destinationURL = ""; } ti.setText( 5, destinationURL != null ? destinationURL : "" ); } } wFields.setRowNums(); wFields.optWidth( true ); } wPrevious.setSelection( jobEntry.arg_from_previous ); wOverwriteFiles.setSelection( jobEntry.overwrite_files ); wIncludeSubfolders.setSelection( jobEntry.include_subfolders ); wRemoveSourceFiles.setSelection( jobEntry.remove_source_files ); wDestinationIsAFile.setSelection( jobEntry.destination_is_a_file ); wCreateDestinationFolder.setSelection( jobEntry.create_destination_folder ); wAddFileToResult.setSelection( jobEntry.add_result_filesname ); } protected void ok() { if ( Utils.isEmpty( wName.getText() ) ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( BaseMessages.getString( BASE_PKG, "System.StepJobEntryNameMissing.Title" ) ); mb.setMessage( BaseMessages.getString( BASE_PKG, "System.JobEntryNameMissing.Msg" ) ); mb.open(); return; } jobEntry.setName( wName.getText() ); jobEntry.setCopyEmptyFolders( wCopyEmptyFolders.getSelection() ); jobEntry.setoverwrite_files( wOverwriteFiles.getSelection() ); jobEntry.setIncludeSubfolders( wIncludeSubfolders.getSelection() ); jobEntry.setArgFromPrevious( wPrevious.getSelection() ); jobEntry.setRemoveSourceFiles( wRemoveSourceFiles.getSelection() ); jobEntry.setAddresultfilesname( wAddFileToResult.getSelection() ); jobEntry.setDestinationIsAFile( wDestinationIsAFile.getSelection() ); jobEntry.setCreateDestinationFolder( wCreateDestinationFolder.getSelection() ); int nritems = wFields.nrNonEmpty(); Map namedClusterURLMappings = new HashMap(); jobEntry.source_filefolder = new String[nritems]; jobEntry.destination_filefolder = new String[nritems]; jobEntry.wildcard = new String[nritems]; for ( int i = 0; i < nritems; i++ ) { String sourceNc = wFields.getNonEmpty( i ).getText( 1 ); sourceNc = sourceNc.equals( LOCAL_ENVIRONMENT ) ? JobEntryCopyFiles.LOCAL_SOURCE_FILE + i : sourceNc; sourceNc = sourceNc.equals( STATIC_ENVIRONMENT ) ? JobEntryCopyFiles.STATIC_SOURCE_FILE + i : sourceNc; sourceNc = sourceNc.equals( S3_ENVIRONMENT ) ? JobEntryHadoopCopyFiles.S3_SOURCE_FILE + i : sourceNc; String source = wFields.getNonEmpty( i ).getText( 2 ); String wild = wFields.getNonEmpty( i ).getText( 3 ); String destNc = wFields.getNonEmpty( i ).getText( 4 ); destNc = destNc.equals( LOCAL_ENVIRONMENT ) ? JobEntryCopyFiles.LOCAL_DEST_FILE + i : destNc; destNc = destNc.equals( STATIC_ENVIRONMENT ) ? JobEntryCopyFiles.STATIC_DEST_FILE + i : destNc; destNc = destNc.equals( S3_ENVIRONMENT ) ? JobEntryHadoopCopyFiles.S3_DEST_FILE + i : destNc; String dest = wFields.getNonEmpty( i ).getText( 5 ); source = JobEntryCopyFiles.SOURCE_URL + i + "-" + source; dest = JobEntryCopyFiles.DEST_URL + i + "-" + dest; jobEntry.source_filefolder[i] = jobEntry.loadURL( source, sourceNc, getMetaStore(), namedClusterURLMappings ); jobEntry.destination_filefolder[i] = jobEntry.loadURL( dest, destNc, getMetaStore(), namedClusterURLMappings ); jobEntry.wildcard[i] = wild; } jobEntry.setConfigurationMappings( namedClusterURLMappings ); dispose(); } private FileObject setSelectedFile( String path, String clusterName ) { FileObject selectedFile = null; try { // Get current file FileObject rootFile = null; FileObject initialFile = null; FileObject defaultInitialFile = null; if ( !clusterName.equals( LOCAL_ENVIRONMENT ) && !clusterName.equals( S3_ENVIRONMENT ) ) { NamedCluster namedCluster = namedClusterService.getNamedClusterByName( clusterName, getMetaStore() ); if ( Utils.isEmpty( path ) ) { path = "/"; } if ( namedCluster == null ) { return null; } path = namedCluster.processURLsubstitution( path, getMetaStore(), jobMeta ); } boolean resolvedInitialFile = false; if ( clusterName.equals( S3_ENVIRONMENT ) && !path.startsWith( Schemes.S3_SCHEME + "://" ) ) { path = Schemes.S3_SCHEME + "://"; } if ( path != null ) { String fileName = jobMeta.environmentSubstitute( path ); if ( fileName != null && !fileName.equals( "" ) ) { try { initialFile = KettleVFS.getInstance( jobMeta.getBowl() ).getFileObject( fileName ); resolvedInitialFile = true; } catch ( Exception e ) { showMessageAndLog( BaseMessages.getString( PKG, "JobHadoopCopyFiles.Connection.Error.title" ), BaseMessages.getString( PKG, "JobHadoopCopyFiles.Connection.error" ), e.getMessage() ); return null; } File startFile = new File( System.getProperty( "user.home" ) ); defaultInitialFile = KettleVFS.getInstance( jobMeta.getBowl() ).getFileObject( startFile.getAbsolutePath() ); rootFile = initialFile.getFileSystem().getRoot(); } else { defaultInitialFile = KettleVFS.getInstance( jobMeta.getBowl() ) .getFileObject( Spoon.getInstance().getLastFileOpened() ); } } if ( rootFile == null ) { if ( defaultInitialFile == null ) { return null; } rootFile = defaultInitialFile.getFileSystem().getRoot(); initialFile = defaultInitialFile; } VfsFileChooserDialog fileChooserDialog = Spoon.getInstance().getVfsFileChooserDialog( rootFile, initialFile ); fileChooserDialog.defaultInitialFile = defaultInitialFile; NamedClusterWidgetImpl namedClusterWidget = null; if ( clusterName.equals( LOCAL_ENVIRONMENT ) ) { selectedFile = fileChooserDialog.open( shell, new String[] { "file" }, "file", true, path, new String[] { "*.*" }, FILETYPES, false, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE_OR_DIRECTORY, false, false ); } else if ( clusterName.equals( S3_ENVIRONMENT ) ) { selectedFile = fileChooserDialog.open( shell, new String[] { Schemes.S3_SCHEME, Schemes.S3N_SCHEME }, Schemes.S3_SCHEME, true, path, new String[] { "*.*" }, FILETYPES, false, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE_OR_DIRECTORY, false, true ); } else { NamedCluster namedCluster = namedClusterService.getNamedClusterByName( clusterName, getMetaStore() ); if ( namedCluster != null ) { if ( namedCluster.isMapr() ) { selectedFile = fileChooserDialog.open( shell, new String[] { Schemes.MAPRFS_SCHEME }, Schemes.MAPRFS_SCHEME, false, path, new String[] { "*.*" }, FILETYPES, true, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE_OR_DIRECTORY, false, false ); } else { List customPanels = fileChooserDialog.getCustomVfsUiPanels(); for ( CustomVfsUiPanel panel : customPanels ) { if ( panel instanceof HadoopVfsFileChooserDialog ) { HadoopVfsFileChooserDialog hadoopDialog = ( (HadoopVfsFileChooserDialog) panel ); namedClusterWidget = hadoopDialog.getNamedClusterWidget(); namedClusterWidget.initiate(); hadoopDialog.setNamedCluster( clusterName ); hadoopDialog.initializeConnectionPanel( initialFile ); } } if ( resolvedInitialFile ) { fileChooserDialog.initialFile = initialFile; } selectedFile = fileChooserDialog.open( shell, new String[] { Schemes.HDFS_SCHEME }, Schemes.HDFS_SCHEME, false, path, new String[] { "*.*" }, FILETYPES, true, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE_OR_DIRECTORY, false, false ); } } } CustomVfsUiPanel currentPanel = fileChooserDialog.getCurrentPanel(); if ( currentPanel instanceof HadoopVfsFileChooserDialog ) { namedClusterWidget = ( (HadoopVfsFileChooserDialog) currentPanel ).getNamedClusterWidget(); } if ( selectedFile != null ) { String url = selectedFile.getURL().toString(); if ( currentPanel != null ) { if ( currentPanel.getVfsSchemeDisplayText().equals( LOCAL_ENVIRONMENT ) ) { wFields.getActiveTableItem().setText( wFields.getActiveTableColumn() - 1, LOCAL_ENVIRONMENT ); } else if ( currentPanel.getVfsSchemeDisplayText().equals( S3_ENVIRONMENT ) ) { wFields.getActiveTableItem().setText( wFields.getActiveTableColumn() - 1, S3_ENVIRONMENT ); } else if ( namedClusterWidget != null && namedClusterWidget.getSelectedNamedCluster() != null ) { url = jobEntry.getUrlPath( url ); wFields.getActiveTableItem().setText( wFields.getActiveTableColumn() - 1, namedClusterWidget.getSelectedNamedCluster().getName() ); } } wFields.getActiveTableItem().setText( wFields.getActiveTableColumn(), url ); } return selectedFile; } catch ( KettleFileException ex ) { log.logError( BaseMessages.getString( PKG, "HadoopFileInputDialog.FileBrowser.KettleFileException" ) ); return selectedFile; } catch ( FileSystemException ex ) { log.logError( BaseMessages.getString( PKG, "HadoopFileInputDialog.FileBrowser.FileSystemException" ) ); return selectedFile; } } private void showMessageAndLog( String title, String message, String messageToLog ) { MessageBox box = new MessageBox( shell ); box.setText( title ); //$NON-NLS-1$ box.setMessage( message ); log.logError( messageToLog ); box.open(); } protected Image getImage() { return GUIResource.getInstance().getImage( "HDM.svg", getClass().getClassLoader(), ConstUI.ICON_SIZE, ConstUI.ICON_SIZE ); } public boolean showFileButtons() { return false; } protected void setComboValues( ColumnInfo colInfo ) { try { super.setComboValues( colInfo ); String[] superValues = colInfo.getComboValues(); String[] s3value = { S3_ENVIRONMENT }; String[] comboValues = (String[]) ArrayUtils.addAll( superValues, s3value ); String[] namedClusters = namedClusterService.listNames( getMetaStore() ).toArray( new String[0] ); String[] values = (String[]) ArrayUtils.addAll( comboValues, namedClusters ); colInfo.setComboValues( values ); } catch ( MetaStoreException e ) { log.logError( e.getMessage() ); } } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/HadoopFileInputDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileType; import org.eclipse.jface.window.Window; import org.eclipse.jface.wizard.Wizard; import org.eclipse.jface.wizard.WizardDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.custom.ScrolledComposite; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.Cursor; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.eclipse.swt.widgets.ToolBar; import org.eclipse.swt.widgets.ToolItem; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.big.data.kettle.plugins.hdfs.vfs.HadoopVfsFileChooserDialog; import org.pentaho.big.data.kettle.plugins.hdfs.vfs.Schemes; import org.pentaho.big.data.plugins.common.ui.NamedClusterWidgetImpl; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.compress.CompressionProvider; import org.pentaho.di.core.compress.CompressionProviderFactory; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.fileinput.FileInputList; import org.pentaho.di.core.gui.TextFileInputFieldInterface; import org.pentaho.di.core.logging.LogChannel; import org.pentaho.di.core.row.value.ValueMetaBase; import org.pentaho.di.core.util.EnvUtil; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.entries.copyfiles.JobEntryCopyFiles; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.TransPreviewFactory; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.file.BaseFileField; import org.pentaho.di.trans.steps.fileinput.text.BufferedInputStreamReader; import org.pentaho.di.trans.steps.fileinput.text.EncodingType; import org.pentaho.di.trans.steps.fileinput.text.TextFileFilter; import org.pentaho.di.trans.steps.fileinput.text.TextFileInputMeta; import org.pentaho.di.trans.steps.fileinput.text.TextFileInputUtils; import org.pentaho.di.ui.core.dialog.EnterNumberDialog; import org.pentaho.di.ui.core.dialog.EnterSelectionDialog; import org.pentaho.di.ui.core.dialog.EnterTextDialog; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.dialog.PreviewRowsDialog; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.core.widget.VariableButtonListenerFactory; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.trans.dialog.TransPreviewProgressDialog; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.di.ui.trans.steps.fileinput.text.TextFileCSVImportProgressDialog; import org.pentaho.di.ui.trans.steps.fileinput.text.TextFileImportWizardPage1; import org.pentaho.di.ui.trans.steps.fileinput.text.TextFileImportWizardPage2; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.vfs.ui.CustomVfsUiPanel; import org.pentaho.vfs.ui.VfsFileChooserDialog; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URI; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Vector; @PluginDialog( id = "HadoopFileInputPlugin", image = "HDI.svg", pluginType = PluginDialog.PluginType.STEP, documentationUrl = "pdi-transformation-steps-reference-overview/hadoop-file-input-cp-main-page" ) public class HadoopFileInputDialog extends BaseStepDialog implements StepDialogInterface { private static final Class BASE_PKG = TextFileInputMeta.class; // for i18n purposes, needed by Translator2!! private static final Class PKG = HadoopFileInputMeta.class; // for i18n purposes, needed by Translator2!! private static final String[] ALL_FILES_TYPE = new String[] { BaseMessages.getString( PKG, "System.FileType.AllFiles" ) }; private LogChannel log = new LogChannel( this ); private static final String COMBO_NO = BaseMessages.getString( BASE_PKG, "System.Combo.No" ); private static final String COMBO_YES = BaseMessages.getString( BASE_PKG, "System.Combo.Yes" ); private static final String[] YES_NO_COMBO = new String[] { COMBO_NO, COMBO_YES }; public static final String LOCAL_ENVIRONMENT = "Local"; public static final String STATIC_ENVIRONMENT = ""; public static final String S3_ENVIRONMENT = "S3"; public static final String BUTTON_BROWSE = BaseMessages.getString( BASE_PKG, "System.Button.Browse" ); public static final String BUTTON_VARIABLE = BaseMessages.getString( BASE_PKG, "System.Button.Variable" ); public static final String ERROR_TITLE = BaseMessages.getString( BASE_PKG, "System.Dialog.Error.Title" ); public static final String TOOLTIP_VARIABLE = BaseMessages.getString( BASE_PKG, "System.Tooltip.VariableToDir" ); public static final String LABEL_EXTENSION = BaseMessages.getString( BASE_PKG, "System.Label.Extension" ); private CTabFolder wTabFolder; private Button wAccFilenames; private Label wlPassThruFields; private Button wPassThruFields; private Label wlAccField; private Text wAccField; private Label wlAccStep; private CCombo wAccStep; private Label wlFilenameList; private TableView wFilenameList; private Button wbShowFiles; private Button wFirst; private Button wFirstHeader; private CCombo wFiletype; private Button wbSeparator; private TextVar wSeparator; private Text wEnclosure; private Text wEscape; private Button wHeader; private Label wlNrHeader; private Text wNrHeader; private Button wFooter; private Label wlNrFooter; private Text wNrFooter; private Button wWraps; private Label wlNrWraps; private Text wNrWraps; private Button wLayoutPaged; private Label wlNrLinesPerPage; private Text wNrLinesPerPage; private Label wlNrLinesDocHeader; private Text wNrLinesDocHeader; private CCombo wCompression; private Button wNoempty; private Button wInclFilename; private Label wlInclFilenameField; private Text wInclFilenameField; private Button wInclRownum; private Label wlRownumByFileField; private Button wRownumByFile; private Label wlInclRownumField; private Text wInclRownumField; private CCombo wFormat; private CCombo wEncoding; private Text wLimit; private Button wDateLenient; private CCombo wDateLocale; // ERROR HANDLING... private Button wErrorIgnored; private Label wlSkipErrorLines; private Button wSkipErrorLines; private Label wlErrorCount; private Text wErrorCount; private Label wlErrorFields; private Text wErrorFields; private Label wlErrorText; private Text wErrorText; // New entries for intelligent error handling AKA replay functionality // Bad files destination directory private Label wlWarnDestDir; private Button wbbWarnDestDir; // Browse: add file or directory private Button wbvWarnDestDir; // Variable private Text wWarnDestDir; private Label wlWarnExt; private Text wWarnExt; // Error messages files destination directory private Label wlErrorDestDir; private Button wbbErrorDestDir; // Browse: add file or directory private Button wbvErrorDestDir; // Variable private Text wErrorDestDir; private Label wlErrorExt; private Text wErrorExt; // Line numbers files destination directory private Label wlLineNrDestDir; private Button wbbLineNrDestDir; // Browse: add file or directory private Button wbvLineNrDestDir; // Variable private Text wLineNrDestDir; private Label wlLineNrExt; private Text wLineNrExt; private TableView wFilter; private TableView wFields; private Button wAddResult; private HadoopFileInputMeta input; // Wizard info... private Vector fields; private int middle; private int margin; private ModifyListener lsMod; public static final int[] dateLengths = new int[] { 23, 19, 14, 10, 10, 10, 10, 8, 8, 8, 8, 6, 6 }; private boolean gotEncodings = false; protected boolean firstClickOnDateLocale; private final NamedClusterService namedClusterService; public HadoopFileInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ) { super( parent, (BaseStepMeta) in, transMeta, sname ); input = (HadoopFileInputMeta) in; namedClusterService = input.getNamedClusterService(); input.setVariableSpace( variables ); firstClickOnDateLocale = true; } @Override public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN ); props.setLook( shell ); setShellImage( shell, input ); lsMod = e -> input.setChanged(); changed = input.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( BaseMessages.getString( PKG, "HadoopFileInputDialog.DialogTitle" ) ); middle = props.getMiddlePct(); margin = Const.MARGIN; // Stepname line wlStepname = new Label( shell, SWT.RIGHT ); wlStepname.setText( BaseMessages.getString( BASE_PKG, "System.Label.StepName" ) ); props.setLook( wlStepname ); fdlStepname = new FormData(); fdlStepname.left = new FormAttachment( 0, 0 ); fdlStepname.top = new FormAttachment( 0, margin ); fdlStepname.right = new FormAttachment( middle, -margin ); wlStepname.setLayoutData( fdlStepname ); wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wStepname.setText( stepname ); props.setLook( wStepname ); wStepname.addModifyListener( lsMod ); fdStepname = new FormData(); fdStepname.left = new FormAttachment( middle, 0 ); fdStepname.top = new FormAttachment( 0, margin ); fdStepname.right = new FormAttachment( 100, 0 ); wStepname.setLayoutData( fdStepname ); wTabFolder = new CTabFolder( shell, SWT.BORDER ); props.setLook( wTabFolder, Props.WIDGET_STYLE_TAB ); wTabFolder.setSimple( false ); addFilesTab(); addContentTab(); addErrorTab(); addFiltersTabs(); addFieldsTabs(); FormData fdTabFolder = new FormData(); fdTabFolder.left = new FormAttachment( 0, 0 ); fdTabFolder.top = new FormAttachment( wStepname, margin ); fdTabFolder.right = new FormAttachment( 100, 0 ); fdTabFolder.bottom = new FormAttachment( 100, -50 ); wTabFolder.setLayoutData( fdTabFolder ); wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( BASE_PKG, "System.Button.OK" ) ); wPreview = new Button( shell, SWT.PUSH ); wPreview.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Preview.Button" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( BASE_PKG, "System.Button.Cancel" ) ); positionBottomRightButtons( shell, new Button[] { wOK, wPreview, wCancel }, margin, wTabFolder ); // Add listeners lsOK = e -> ok(); Listener lsFirst = e -> first( false ); Listener lsFirstHeader = e -> first( true ); lsGet = e -> get(); lsPreview = e -> preview(); lsCancel = e -> cancel(); wOK.addListener( SWT.Selection, lsOK ); wFirst.addListener( SWT.Selection, lsFirst ); wFirstHeader.addListener( SWT.Selection, lsFirstHeader ); wGet.addListener( SWT.Selection, lsGet ); wPreview.addListener( SWT.Selection, lsPreview ); wCancel.addListener( SWT.Selection, lsCancel ); lsDef = new SelectionAdapter() { @Override public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; wAccFilenames.addSelectionListener( lsDef ); wStepname.addSelectionListener( lsDef ); wSeparator.addSelectionListener( lsDef ); wLimit.addSelectionListener( lsDef ); wInclRownumField.addSelectionListener( lsDef ); wInclFilenameField.addSelectionListener( lsDef ); wNrHeader.addSelectionListener( lsDef ); wNrFooter.addSelectionListener( lsDef ); wNrWraps.addSelectionListener( lsDef ); wWarnDestDir.addSelectionListener( lsDef ); wWarnExt.addSelectionListener( lsDef ); wErrorDestDir.addSelectionListener( lsDef ); wErrorExt.addSelectionListener( lsDef ); wLineNrDestDir.addSelectionListener( lsDef ); wLineNrExt.addSelectionListener( lsDef ); wAccField.addSelectionListener( lsDef ); // Show the files that are selected at this time... wbShowFiles.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { showFiles(); } } ); // Allow the insertion of tabs as separator... wbSeparator.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent se ) { wSeparator.getTextWidget().insert( "\t" ); } } ); SelectionAdapter lsFlags = new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { setFlags(); } }; // Enable/disable the right fields... wInclFilename.addSelectionListener( lsFlags ); wInclRownum.addSelectionListener( lsFlags ); wRownumByFile.addSelectionListener( lsFlags ); wErrorIgnored.addSelectionListener( lsFlags ); wHeader.addSelectionListener( lsFlags ); wFooter.addSelectionListener( lsFlags ); wWraps.addSelectionListener( lsFlags ); wLayoutPaged.addSelectionListener( lsFlags ); wAccFilenames.addSelectionListener( lsFlags ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { @Override public void shellClosed( ShellEvent e ) { cancel(); } } ); wTabFolder.setSelection( 0 ); // Set the shell size, based upon previous time... getData( input ); setSize(); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } /** * Replaces the password present in each file URI with '***' before displaying it in the UI. * * @param files List of files to be processed * @return The list of files to be processed with the password replaced with '***' */ protected String[] getFriendlyURIs( String[] files ) { for ( int i = 0; i < files.length; i++ ) { String userinfo = URI.create( files[ i ] ).getUserInfo(); if ( userinfo != null ) { String[] credentials = userinfo.split( ":", 2 ); if ( credentials.length == 2 ) { credentials[ 1 ] = "***"; files[ i ] = files[ i ].replaceFirst( userinfo, String.join( ":", credentials ) ); } } } return files; } private void showFiles() { HadoopFileInputMeta tfii = new HadoopFileInputMeta(); getInfo( tfii ); String[] files = tfii.getFilePaths( transMeta.getBowl(), transMeta ); if ( files != null && files.length > 0 ) { EnterSelectionDialog esd = new EnterSelectionDialog( shell, getFriendlyURIs( files ), "Files read", "Files read:" ); esd.setViewOnly(); esd.open(); } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NoFilesFound.DialogMessage" ) ); mb.setText( ERROR_TITLE ); mb.open(); } } private void addFilesTab() { // //////////////////////// // START OF FILE TAB /// // //////////////////////// CTabItem wFileTab = new CTabItem( wTabFolder, SWT.NONE ); wFileTab.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FileTab.TabTitle" ) ); ScrolledComposite wFileSComp = new ScrolledComposite( wTabFolder, SWT.V_SCROLL | SWT.H_SCROLL ); wFileSComp.setLayout( new FillLayout() ); Composite wFileComp = new Composite( wFileSComp, SWT.NONE ); props.setLook( wFileComp ); FormLayout fileLayout = new FormLayout(); fileLayout.marginWidth = 3; fileLayout.marginHeight = 3; wFileComp.setLayout( fileLayout ); // Filename list line wlFilenameList = new Label( wFileComp, SWT.RIGHT ); wlFilenameList.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FilenameList.Label" ) ); props.setLook( wlFilenameList ); FormData fdlFilenameList = new FormData(); fdlFilenameList.left = new FormAttachment( 0, 0 ); fdlFilenameList.top = new FormAttachment( wFileComp, 15 ); wlFilenameList.setLayoutData( fdlFilenameList ); ToolBar tb = new ToolBar( wFileComp, SWT.HORIZONTAL | SWT.FLAT ); props.setLook( tb ); FormData fdTb = new FormData(); fdTb.right = new FormAttachment( 100, 0 ); fdTb.top = new FormAttachment( wFileComp, margin ); tb.setLayoutData( fdTb ); ToolItem deleteToolItem = new ToolItem( tb, SWT.PUSH ); deleteToolItem.setImage( GUIResource.getInstance().getImageDelete() ); deleteToolItem .setToolTipText( BaseMessages.getString( JobEntryCopyFiles.class, "JobCopyFiles.FilenameDelete.Tooltip" ) ); deleteToolItem.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent arg0 ) { int[] idx = wFilenameList.getSelectionIndices(); wFilenameList.remove( idx ); wFilenameList.removeEmptyRows(); wFilenameList.setRowNums(); } } ); wbShowFiles = new Button( wFileComp, SWT.PUSH | SWT.CENTER ); props.setLook( wbShowFiles ); wbShowFiles.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ShowFiles.Button" ) ); FormData fdbShowFiles = new FormData(); fdbShowFiles.left = new FormAttachment( middle, 0 ); fdbShowFiles.bottom = new FormAttachment( 100, 0 ); wbShowFiles.setLayoutData( fdbShowFiles ); wFirst = new Button( wFileComp, SWT.PUSH ); wFirst.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.First.Button" ) ); FormData fdFirst = new FormData(); fdFirst.left = new FormAttachment( wbShowFiles, margin * 2 ); fdFirst.bottom = new FormAttachment( 100, 0 ); wFirst.setLayoutData( fdFirst ); wFirstHeader = new Button( wFileComp, SWT.PUSH ); wFirstHeader.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FirstHeader.Button" ) ); FormData fdFirstHeader = new FormData(); fdFirstHeader.left = new FormAttachment( wFirst, margin * 2 ); fdFirstHeader.bottom = new FormAttachment( 100, 0 ); wFirstHeader.setLayoutData( fdFirstHeader ); // Accepting filenames group // Group gAccepting = new Group( wFileComp, SWT.SHADOW_ETCHED_IN ); gAccepting.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.AcceptingGroup.Label" ) ); FormLayout acceptingLayout = new FormLayout(); acceptingLayout.marginWidth = 3; acceptingLayout.marginHeight = 3; gAccepting.setLayout( acceptingLayout ); props.setLook( gAccepting ); // Accept filenames from previous steps? // Label wlAccFilenames = new Label( gAccepting, SWT.RIGHT ); wlAccFilenames.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.AcceptFilenames.Label" ) ); props.setLook( wlAccFilenames ); FormData fdlAccFilenames = new FormData(); fdlAccFilenames.top = new FormAttachment( 0, margin ); fdlAccFilenames.left = new FormAttachment( 0, 0 ); fdlAccFilenames.right = new FormAttachment( middle, -margin ); wlAccFilenames.setLayoutData( fdlAccFilenames ); wAccFilenames = new Button( gAccepting, SWT.CHECK ); wAccFilenames.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.AcceptFilenames.Tooltip" ) ); props.setLook( wAccFilenames ); FormData fdAccFilenames = new FormData(); fdAccFilenames.top = new FormAttachment( 0, margin ); fdAccFilenames.left = new FormAttachment( middle, 0 ); fdAccFilenames.right = new FormAttachment( 100, 0 ); wAccFilenames.setLayoutData( fdAccFilenames ); // Accept filenames from previous steps? // wlPassThruFields = new Label( gAccepting, SWT.RIGHT ); wlPassThruFields.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.PassThruFields.Label" ) ); props.setLook( wlPassThruFields ); FormData fdlPassThruFields = new FormData(); fdlPassThruFields.top = new FormAttachment( wAccFilenames, margin ); fdlPassThruFields.left = new FormAttachment( 0, 0 ); fdlPassThruFields.right = new FormAttachment( middle, -margin ); wlPassThruFields.setLayoutData( fdlPassThruFields ); wPassThruFields = new Button( gAccepting, SWT.CHECK ); wPassThruFields.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.PassThruFields.Tooltip" ) ); props.setLook( wPassThruFields ); FormData fdPassThruFields = new FormData(); fdPassThruFields.top = new FormAttachment( wAccFilenames, margin ); fdPassThruFields.left = new FormAttachment( middle, 0 ); fdPassThruFields.right = new FormAttachment( 100, 0 ); wPassThruFields.setLayoutData( fdPassThruFields ); // Which step to read from? wlAccStep = new Label( gAccepting, SWT.RIGHT ); wlAccStep.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.AcceptStep.Label" ) ); props.setLook( wlAccStep ); FormData fdlAccStep = new FormData(); fdlAccStep.top = new FormAttachment( wPassThruFields, margin ); fdlAccStep.left = new FormAttachment( 0, 0 ); fdlAccStep.right = new FormAttachment( middle, -margin ); wlAccStep.setLayoutData( fdlAccStep ); wAccStep = new CCombo( gAccepting, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wAccStep.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.AcceptStep.Tooltip" ) ); props.setLook( wAccStep ); FormData fdAccStep = new FormData(); fdAccStep.top = new FormAttachment( wPassThruFields, margin ); fdAccStep.left = new FormAttachment( middle, 0 ); fdAccStep.right = new FormAttachment( 100, 0 ); wAccStep.setLayoutData( fdAccStep ); // Which field? // wlAccField = new Label( gAccepting, SWT.RIGHT ); wlAccField.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.AcceptField.Label" ) ); props.setLook( wlAccField ); FormData fdlAccField = new FormData(); fdlAccField.top = new FormAttachment( wAccStep, margin ); fdlAccField.left = new FormAttachment( 0, 0 ); fdlAccField.right = new FormAttachment( middle, -margin ); wlAccField.setLayoutData( fdlAccField ); wAccField = new Text( gAccepting, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wAccField.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.AcceptField.Tooltip" ) ); props.setLook( wAccField ); FormData fdAccField = new FormData(); fdAccField.top = new FormAttachment( wAccStep, margin ); fdAccField.left = new FormAttachment( middle, 0 ); fdAccField.right = new FormAttachment( 100, 0 ); wAccField.setLayoutData( fdAccField ); // Fill in the source steps... List prevSteps = transMeta.findPreviousSteps( transMeta.findStep( stepname ) ); for ( StepMeta prevStep : prevSteps ) { wAccStep.add( prevStep.getName() ); } FormData fdAccepting = new FormData(); fdAccepting.left = new FormAttachment( 0, 0 ); fdAccepting.right = new FormAttachment( 100, 0 ); fdAccepting.bottom = new FormAttachment( wFirstHeader, -margin * 2 ); gAccepting.setLayoutData( fdAccepting ); ColumnInfo[] colinfo = new ColumnInfo[] { new ColumnInfo( BaseMessages.getString( PKG, "HadoopFileInputDialog.Environment" ), ColumnInfo.COLUMN_TYPE_CCOMBO, false, true ), new ColumnInfo( BaseMessages.getString( PKG, "HadoopFileInputDialog.FileFolderColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT_BUTTON, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.WildcardColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.RequiredColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, YES_NO_COMBO ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.IncludeSubDirs.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, YES_NO_COMBO ) }; setComboValues( colinfo[ 0 ] ); colinfo[ 1 ].setUsingVariables( true ); colinfo[ 1 ].setTextVarButtonSelectionListener( getFileDirectoryListener() ); colinfo[ 2 ].setToolTip( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.RegExpColumn.Column" ) ); colinfo[ 3 ].setToolTip( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.RequiredColumn.Tooltip" ) ); colinfo[ 4 ].setToolTip( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.IncludeSubDirs.Tooltip" ) ); wFilenameList = new TableView( transMeta, wFileComp, SWT.FULL_SELECTION | SWT.SINGLE | SWT.BORDER, colinfo, 4, lsMod, props ); props.setLook( wFilenameList ); FormData fdFilenameList = new FormData(); fdFilenameList.bottom = new FormAttachment( gAccepting, 0 ); fdFilenameList.right = new FormAttachment( 100, 0 ); fdFilenameList.left = new FormAttachment( 0, 0 ); fdFilenameList.top = new FormAttachment( tb, margin ); wFilenameList.setLayoutData( fdFilenameList ); FormData fdFileComp = new FormData(); fdFileComp.left = new FormAttachment( 0, 0 ); fdFileComp.top = new FormAttachment( 0, 0 ); fdFileComp.right = new FormAttachment( 100, 0 ); fdFileComp.bottom = new FormAttachment( 100, 0 ); wFileComp.setLayoutData( fdFileComp ); wFileComp.pack(); Rectangle bounds = wFileComp.getBounds(); wFileSComp.setContent( wFileComp ); wFileSComp.setExpandHorizontal( true ); wFileSComp.setExpandVertical( true ); wFileSComp.setMinWidth( bounds.width ); wFileSComp.setMinHeight( bounds.height ); wFileTab.setControl( wFileSComp ); // /////////////////////////////////////////////////////////// // / END OF FILE TAB // /////////////////////////////////////////////////////////// } private void addContentTab() { // //////////////////////// // START OF CONTENT TAB/// // / CTabItem wContentTab = new CTabItem( wTabFolder, SWT.NONE ); wContentTab.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ContentTab.TabTitle" ) ); FormLayout contentLayout = new FormLayout(); contentLayout.marginWidth = 3; contentLayout.marginHeight = 3; ScrolledComposite wContentSComp = new ScrolledComposite( wTabFolder, SWT.V_SCROLL | SWT.H_SCROLL ); wContentSComp.setLayout( new FillLayout() ); Composite wContentComp = new Composite( wContentSComp, SWT.NONE ); props.setLook( wContentComp ); wContentComp.setLayout( contentLayout ); // Filetype line Label wlFiletype = new Label( wContentComp, SWT.RIGHT ); wlFiletype.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Filetype.Label" ) ); props.setLook( wlFiletype ); FormData fdlFiletype = new FormData(); fdlFiletype.left = new FormAttachment( 0, 0 ); fdlFiletype.top = new FormAttachment( 0, 0 ); fdlFiletype.right = new FormAttachment( middle, -margin ); wlFiletype.setLayoutData( fdlFiletype ); wFiletype = new CCombo( wContentComp, SWT.BORDER | SWT.READ_ONLY ); wFiletype.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Filetype.Label" ) ); props.setLook( wFiletype ); wFiletype.add( "CSV" ); wFiletype.add( "Fixed" ); wFiletype.select( 0 ); wFiletype.addModifyListener( lsMod ); FormData fdFiletype = new FormData(); fdFiletype.left = new FormAttachment( middle, 0 ); fdFiletype.top = new FormAttachment( 0, 0 ); fdFiletype.right = new FormAttachment( 100, 0 ); wFiletype.setLayoutData( fdFiletype ); Label wlSeparator = new Label( wContentComp, SWT.RIGHT ); wlSeparator.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Separator.Label" ) ); props.setLook( wlSeparator ); FormData fdlSeparator = new FormData(); fdlSeparator.left = new FormAttachment( 0, 0 ); fdlSeparator.top = new FormAttachment( wFiletype, margin ); fdlSeparator.right = new FormAttachment( middle, -margin ); wlSeparator.setLayoutData( fdlSeparator ); wbSeparator = new Button( wContentComp, SWT.PUSH | SWT.CENTER ); wbSeparator.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Delimiter.Button" ) ); props.setLook( wbSeparator ); FormData fdbSeparator = new FormData(); fdbSeparator.right = new FormAttachment( 100, 0 ); fdbSeparator.top = new FormAttachment( wFiletype, 0 ); wbSeparator.setLayoutData( fdbSeparator ); wSeparator = new TextVar( transMeta, wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wSeparator ); wSeparator.addModifyListener( lsMod ); FormData fdSeparator = new FormData(); fdSeparator.top = new FormAttachment( wFiletype, margin ); fdSeparator.left = new FormAttachment( middle, 0 ); fdSeparator.right = new FormAttachment( wbSeparator, -margin ); wSeparator.setLayoutData( fdSeparator ); // Enclosure Label wlEnclosure = new Label( wContentComp, SWT.RIGHT ); wlEnclosure.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Enclosure.Label" ) ); props.setLook( wlEnclosure ); FormData fdlEnclosure = new FormData(); fdlEnclosure.left = new FormAttachment( 0, 0 ); fdlEnclosure.top = new FormAttachment( wSeparator, margin ); fdlEnclosure.right = new FormAttachment( middle, -margin ); wlEnclosure.setLayoutData( fdlEnclosure ); wEnclosure = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wEnclosure ); wEnclosure.addModifyListener( lsMod ); FormData fdEnclosure = new FormData(); fdEnclosure.left = new FormAttachment( middle, 0 ); fdEnclosure.top = new FormAttachment( wSeparator, margin ); fdEnclosure.right = new FormAttachment( 100, 0 ); wEnclosure.setLayoutData( fdEnclosure ); // Allow Enclosure breaks checkbox Label wlEnclBreaks = new Label( wContentComp, SWT.RIGHT ); wlEnclBreaks.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.EnclBreaks.Label" ) ); props.setLook( wlEnclBreaks ); FormData fdlEnclBreaks = new FormData(); fdlEnclBreaks.left = new FormAttachment( 0, 0 ); fdlEnclBreaks.top = new FormAttachment( wEnclosure, margin ); fdlEnclBreaks.right = new FormAttachment( middle, -margin ); wlEnclBreaks.setLayoutData( fdlEnclBreaks ); Button wEnclBreaks = new Button( wContentComp, SWT.CHECK ); props.setLook( wEnclBreaks ); FormData fdEnclBreaks = new FormData(); fdEnclBreaks.left = new FormAttachment( middle, 0 ); fdEnclBreaks.top = new FormAttachment( wEnclosure, margin ); wEnclBreaks.setLayoutData( fdEnclBreaks ); // Disable until the logic works... wlEnclBreaks.setEnabled( false ); wEnclBreaks.setEnabled( false ); // Escape Label wlEscape = new Label( wContentComp, SWT.RIGHT ); wlEscape.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Escape.Label" ) ); props.setLook( wlEscape ); FormData fdlEscape = new FormData(); fdlEscape.left = new FormAttachment( 0, 0 ); fdlEscape.top = new FormAttachment( wEnclBreaks, margin ); fdlEscape.right = new FormAttachment( middle, -margin ); wlEscape.setLayoutData( fdlEscape ); wEscape = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wEscape ); wEscape.addModifyListener( lsMod ); FormData fdEscape = new FormData(); fdEscape.left = new FormAttachment( middle, 0 ); fdEscape.top = new FormAttachment( wEnclBreaks, margin ); fdEscape.right = new FormAttachment( 100, 0 ); wEscape.setLayoutData( fdEscape ); // Header checkbox Label wlHeader = new Label( wContentComp, SWT.RIGHT ); wlHeader.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Header.Label" ) ); props.setLook( wlHeader ); FormData fdlHeader = new FormData(); fdlHeader.left = new FormAttachment( 0, 0 ); fdlHeader.top = new FormAttachment( wEscape, margin ); fdlHeader.right = new FormAttachment( middle, -margin ); wlHeader.setLayoutData( fdlHeader ); wHeader = new Button( wContentComp, SWT.CHECK ); props.setLook( wHeader ); FormData fdHeader = new FormData(); fdHeader.left = new FormAttachment( middle, 0 ); fdHeader.top = new FormAttachment( wEscape, margin ); wHeader.setLayoutData( fdHeader ); // NrHeader wlNrHeader = new Label( wContentComp, SWT.RIGHT ); wlNrHeader.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NrHeader.Label" ) ); props.setLook( wlNrHeader ); FormData fdlNrHeader = new FormData(); fdlNrHeader.left = new FormAttachment( wHeader, margin ); fdlNrHeader.top = new FormAttachment( wEscape, margin ); wlNrHeader.setLayoutData( fdlNrHeader ); wNrHeader = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wNrHeader.setTextLimit( 3 ); props.setLook( wNrHeader ); wNrHeader.addModifyListener( lsMod ); FormData fdNrHeader = new FormData(); fdNrHeader.left = new FormAttachment( wlNrHeader, margin ); fdNrHeader.top = new FormAttachment( wEscape, margin ); fdNrHeader.right = new FormAttachment( 100, 0 ); wNrHeader.setLayoutData( fdNrHeader ); Label wlFooter = new Label( wContentComp, SWT.RIGHT ); wlFooter.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Footer.Label" ) ); props.setLook( wlFooter ); FormData fdlFooter = new FormData(); fdlFooter.left = new FormAttachment( 0, 0 ); fdlFooter.top = new FormAttachment( wHeader, margin ); fdlFooter.right = new FormAttachment( middle, -margin ); wlFooter.setLayoutData( fdlFooter ); wFooter = new Button( wContentComp, SWT.CHECK ); props.setLook( wFooter ); FormData fdFooter = new FormData(); fdFooter.left = new FormAttachment( middle, 0 ); fdFooter.top = new FormAttachment( wHeader, margin ); wFooter.setLayoutData( fdFooter ); // NrFooter wlNrFooter = new Label( wContentComp, SWT.RIGHT ); wlNrFooter.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NrFooter.Label" ) ); props.setLook( wlNrFooter ); FormData fdlNrFooter = new FormData(); fdlNrFooter.left = new FormAttachment( wFooter, margin ); fdlNrFooter.top = new FormAttachment( wHeader, margin ); wlNrFooter.setLayoutData( fdlNrFooter ); wNrFooter = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wNrFooter.setTextLimit( 3 ); props.setLook( wNrFooter ); wNrFooter.addModifyListener( lsMod ); FormData fdNrFooter = new FormData(); fdNrFooter.left = new FormAttachment( wlNrFooter, margin ); fdNrFooter.top = new FormAttachment( wHeader, margin ); fdNrFooter.right = new FormAttachment( 100, 0 ); wNrFooter.setLayoutData( fdNrFooter ); // Wraps Label wlWraps = new Label( wContentComp, SWT.RIGHT ); wlWraps.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Wraps.Label" ) ); props.setLook( wlWraps ); FormData fdlWraps = new FormData(); fdlWraps.left = new FormAttachment( 0, 0 ); fdlWraps.top = new FormAttachment( wFooter, margin ); fdlWraps.right = new FormAttachment( middle, -margin ); wlWraps.setLayoutData( fdlWraps ); wWraps = new Button( wContentComp, SWT.CHECK ); props.setLook( wWraps ); FormData fdWraps = new FormData(); fdWraps.left = new FormAttachment( middle, 0 ); fdWraps.top = new FormAttachment( wFooter, margin ); wWraps.setLayoutData( fdWraps ); // NrWraps wlNrWraps = new Label( wContentComp, SWT.RIGHT ); wlNrWraps.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NrWraps.Label" ) ); props.setLook( wlNrWraps ); FormData fdlNrWraps = new FormData(); fdlNrWraps.left = new FormAttachment( wWraps, margin ); fdlNrWraps.top = new FormAttachment( wFooter, margin ); wlNrWraps.setLayoutData( fdlNrWraps ); wNrWraps = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wNrWraps.setTextLimit( 3 ); props.setLook( wNrWraps ); wNrWraps.addModifyListener( lsMod ); FormData fdNrWraps = new FormData(); fdNrWraps.left = new FormAttachment( wlNrWraps, margin ); fdNrWraps.top = new FormAttachment( wFooter, margin ); fdNrWraps.right = new FormAttachment( 100, 0 ); wNrWraps.setLayoutData( fdNrWraps ); // Pages Label wlLayoutPaged = new Label( wContentComp, SWT.RIGHT ); wlLayoutPaged.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.LayoutPaged.Label" ) ); props.setLook( wlLayoutPaged ); FormData fdlLayoutPaged = new FormData(); fdlLayoutPaged.left = new FormAttachment( 0, 0 ); fdlLayoutPaged.top = new FormAttachment( wWraps, margin ); fdlLayoutPaged.right = new FormAttachment( middle, -margin ); wlLayoutPaged.setLayoutData( fdlLayoutPaged ); wLayoutPaged = new Button( wContentComp, SWT.CHECK ); props.setLook( wLayoutPaged ); FormData fdLayoutPaged = new FormData(); fdLayoutPaged.left = new FormAttachment( middle, 0 ); fdLayoutPaged.top = new FormAttachment( wWraps, margin ); wLayoutPaged.setLayoutData( fdLayoutPaged ); // Nr of lines per page wlNrLinesPerPage = new Label( wContentComp, SWT.RIGHT ); wlNrLinesPerPage.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NrLinesPerPage.Label" ) ); props.setLook( wlNrLinesPerPage ); FormData fdlNrLinesPerPage = new FormData(); fdlNrLinesPerPage.left = new FormAttachment( wLayoutPaged, margin ); fdlNrLinesPerPage.top = new FormAttachment( wWraps, margin ); wlNrLinesPerPage.setLayoutData( fdlNrLinesPerPage ); wNrLinesPerPage = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wNrLinesPerPage.setTextLimit( 3 ); props.setLook( wNrLinesPerPage ); wNrLinesPerPage.addModifyListener( lsMod ); FormData fdNrLinesPerPage = new FormData(); fdNrLinesPerPage.left = new FormAttachment( wlNrLinesPerPage, margin ); fdNrLinesPerPage.top = new FormAttachment( wWraps, margin ); fdNrLinesPerPage.right = new FormAttachment( 100, 0 ); wNrLinesPerPage.setLayoutData( fdNrLinesPerPage ); // NrPages wlNrLinesDocHeader = new Label( wContentComp, SWT.RIGHT ); wlNrLinesDocHeader.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NrLinesDocHeader.Label" ) ); props.setLook( wlNrLinesDocHeader ); FormData fdlNrLinesDocHeader = new FormData(); fdlNrLinesDocHeader.left = new FormAttachment( wLayoutPaged, margin ); fdlNrLinesDocHeader.top = new FormAttachment( wNrLinesPerPage, margin ); wlNrLinesDocHeader.setLayoutData( fdlNrLinesDocHeader ); wNrLinesDocHeader = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wNrLinesDocHeader.setTextLimit( 3 ); props.setLook( wNrLinesDocHeader ); wNrLinesDocHeader.addModifyListener( lsMod ); FormData fdNrLinesDocHeader = new FormData(); fdNrLinesDocHeader.left = new FormAttachment( wlNrLinesPerPage, margin ); fdNrLinesDocHeader.top = new FormAttachment( wNrLinesPerPage, margin ); fdNrLinesDocHeader.right = new FormAttachment( 100, 0 ); wNrLinesDocHeader.setLayoutData( fdNrLinesDocHeader ); // Compression type (None, Zip or GZip Label wlCompression = new Label( wContentComp, SWT.RIGHT ); wlCompression.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Compression.Label" ) ); props.setLook( wlCompression ); FormData fdlCompression = new FormData(); fdlCompression.left = new FormAttachment( 0, 0 ); fdlCompression.top = new FormAttachment( wNrLinesDocHeader, margin ); fdlCompression.right = new FormAttachment( middle, -margin ); wlCompression.setLayoutData( fdlCompression ); wCompression = new CCombo( wContentComp, SWT.BORDER | SWT.READ_ONLY ); wCompression.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Compression.Label" ) ); wCompression.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Compression.Tooltip" ) ); props.setLook( wCompression ); wCompression.setItems( CompressionProviderFactory.getInstance().getCompressionProviderNames() ); wCompression.addModifyListener( lsMod ); FormData fdCompression = new FormData(); fdCompression.left = new FormAttachment( middle, 0 ); fdCompression.top = new FormAttachment( wNrLinesDocHeader, margin ); fdCompression.right = new FormAttachment( 100, 0 ); wCompression.setLayoutData( fdCompression ); Label wlNoempty = new Label( wContentComp, SWT.RIGHT ); wlNoempty.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NoEmpty.Label" ) ); props.setLook( wlNoempty ); FormData fdlNoempty = new FormData(); fdlNoempty.left = new FormAttachment( 0, 0 ); fdlNoempty.top = new FormAttachment( wCompression, margin ); fdlNoempty.right = new FormAttachment( middle, -margin ); wlNoempty.setLayoutData( fdlNoempty ); wNoempty = new Button( wContentComp, SWT.CHECK ); props.setLook( wNoempty ); wNoempty.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NoEmpty.Tooltip" ) ); FormData fdNoempty = new FormData(); fdNoempty.left = new FormAttachment( middle, 0 ); fdNoempty.top = new FormAttachment( wCompression, margin ); fdNoempty.right = new FormAttachment( 100, 0 ); wNoempty.setLayoutData( fdNoempty ); Label wlInclFilename = new Label( wContentComp, SWT.RIGHT ); wlInclFilename.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.InclFilename.Label" ) ); props.setLook( wlInclFilename ); FormData fdlInclFilename = new FormData(); fdlInclFilename.left = new FormAttachment( 0, 0 ); fdlInclFilename.top = new FormAttachment( wNoempty, margin ); fdlInclFilename.right = new FormAttachment( middle, -margin ); wlInclFilename.setLayoutData( fdlInclFilename ); wInclFilename = new Button( wContentComp, SWT.CHECK ); props.setLook( wInclFilename ); wInclFilename.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.InclFilename.Tooltip" ) ); FormData fdInclFilename = new FormData(); fdInclFilename.left = new FormAttachment( middle, 0 ); fdInclFilename.top = new FormAttachment( wNoempty, margin ); wInclFilename.setLayoutData( fdInclFilename ); wlInclFilenameField = new Label( wContentComp, SWT.LEFT ); wlInclFilenameField.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.InclFilenameField.Label" ) ); props.setLook( wlInclFilenameField ); FormData fdlInclFilenameField = new FormData(); fdlInclFilenameField.left = new FormAttachment( wInclFilename, margin ); fdlInclFilenameField.top = new FormAttachment( wNoempty, margin ); wlInclFilenameField.setLayoutData( fdlInclFilenameField ); wInclFilenameField = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wInclFilenameField ); wInclFilenameField.addModifyListener( lsMod ); FormData fdInclFilenameField = new FormData(); fdInclFilenameField.left = new FormAttachment( wlInclFilenameField, margin ); fdInclFilenameField.top = new FormAttachment( wNoempty, margin ); fdInclFilenameField.right = new FormAttachment( 100, 0 ); wInclFilenameField.setLayoutData( fdInclFilenameField ); Label wlInclRownum = new Label( wContentComp, SWT.RIGHT ); wlInclRownum.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.InclRownum.Label" ) ); props.setLook( wlInclRownum ); FormData fdlInclRownum = new FormData(); fdlInclRownum.left = new FormAttachment( 0, 0 ); fdlInclRownum.top = new FormAttachment( wInclFilenameField, margin ); fdlInclRownum.right = new FormAttachment( middle, -margin ); wlInclRownum.setLayoutData( fdlInclRownum ); wInclRownum = new Button( wContentComp, SWT.CHECK ); props.setLook( wInclRownum ); wInclRownum.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.InclRownum.Tooltip" ) ); FormData fdRownum = new FormData(); fdRownum.left = new FormAttachment( middle, 0 ); fdRownum.top = new FormAttachment( wInclFilenameField, margin ); wInclRownum.setLayoutData( fdRownum ); wlInclRownumField = new Label( wContentComp, SWT.RIGHT ); wlInclRownumField.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.InclRownumField.Label" ) ); props.setLook( wlInclRownumField ); FormData fdlInclRownumField = new FormData(); fdlInclRownumField.left = new FormAttachment( wInclRownum, margin ); fdlInclRownumField.top = new FormAttachment( wInclFilenameField, margin ); wlInclRownumField.setLayoutData( fdlInclRownumField ); wInclRownumField = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wInclRownumField ); wInclRownumField.addModifyListener( lsMod ); FormData fdInclRownumField = new FormData(); fdInclRownumField.left = new FormAttachment( wlInclRownumField, margin ); fdInclRownumField.top = new FormAttachment( wInclFilenameField, margin ); fdInclRownumField.right = new FormAttachment( 100, 0 ); wInclRownumField.setLayoutData( fdInclRownumField ); wlRownumByFileField = new Label( wContentComp, SWT.RIGHT ); wlRownumByFileField.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.RownumByFile.Label" ) ); props.setLook( wlRownumByFileField ); FormData fdlRownumByFile = new FormData(); fdlRownumByFile.left = new FormAttachment( wInclRownum, margin ); fdlRownumByFile.top = new FormAttachment( wInclRownumField, margin ); wlRownumByFileField.setLayoutData( fdlRownumByFile ); wRownumByFile = new Button( wContentComp, SWT.CHECK ); props.setLook( wRownumByFile ); wRownumByFile.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.RownumByFile.Tooltip" ) ); FormData fdRownumByFile = new FormData(); fdRownumByFile.left = new FormAttachment( wlRownumByFileField, margin ); fdRownumByFile.top = new FormAttachment( wInclRownumField, margin ); wRownumByFile.setLayoutData( fdRownumByFile ); Label wlFormat = new Label( wContentComp, SWT.RIGHT ); wlFormat.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Format.Label" ) ); props.setLook( wlFormat ); FormData fdlFormat = new FormData(); fdlFormat.left = new FormAttachment( 0, 0 ); fdlFormat.top = new FormAttachment( wRownumByFile, margin * 2 ); fdlFormat.right = new FormAttachment( middle, -margin ); wlFormat.setLayoutData( fdlFormat ); wFormat = new CCombo( wContentComp, SWT.BORDER | SWT.READ_ONLY ); wFormat.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Format.Label" ) ); props.setLook( wFormat ); wFormat.add( "DOS" ); wFormat.add( "Unix" ); wFormat.add( "mixed" ); wFormat.select( 0 ); wFormat.addModifyListener( lsMod ); FormData fdFormat = new FormData(); fdFormat.left = new FormAttachment( middle, 0 ); fdFormat.top = new FormAttachment( wRownumByFile, margin * 2 ); fdFormat.right = new FormAttachment( 100, 0 ); wFormat.setLayoutData( fdFormat ); Label wlEncoding = new Label( wContentComp, SWT.RIGHT ); wlEncoding.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Encoding.Label" ) ); props.setLook( wlEncoding ); FormData fdlEncoding = new FormData(); fdlEncoding.left = new FormAttachment( 0, 0 ); fdlEncoding.top = new FormAttachment( wFormat, margin ); fdlEncoding.right = new FormAttachment( middle, -margin ); wlEncoding.setLayoutData( fdlEncoding ); wEncoding = new CCombo( wContentComp, SWT.BORDER | SWT.READ_ONLY ); wEncoding.setEditable( true ); props.setLook( wEncoding ); wEncoding.addModifyListener( lsMod ); FormData fdEncoding = new FormData(); fdEncoding.left = new FormAttachment( middle, 0 ); fdEncoding.top = new FormAttachment( wFormat, margin ); fdEncoding.right = new FormAttachment( 100, 0 ); wEncoding.setLayoutData( fdEncoding ); wEncoding.addFocusListener( new FocusListener() { @Override public void focusLost( org.eclipse.swt.events.FocusEvent e ) { // No-Op Necessary } @Override public void focusGained( org.eclipse.swt.events.FocusEvent e ) { Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT ); shell.setCursor( busy ); setEncodings(); shell.setCursor( null ); busy.dispose(); } } ); Label wlLimit = new Label( wContentComp, SWT.RIGHT ); wlLimit.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Limit.Label" ) ); props.setLook( wlLimit ); FormData fdlLimit = new FormData(); fdlLimit.left = new FormAttachment( 0, 0 ); fdlLimit.top = new FormAttachment( wEncoding, margin ); fdlLimit.right = new FormAttachment( middle, -margin ); wlLimit.setLayoutData( fdlLimit ); wLimit = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wLimit ); wLimit.addModifyListener( lsMod ); FormData fdLimit = new FormData(); fdLimit.left = new FormAttachment( middle, 0 ); fdLimit.top = new FormAttachment( wEncoding, margin ); fdLimit.right = new FormAttachment( 100, 0 ); wLimit.setLayoutData( fdLimit ); // Date Lenient checkbox Label wlDateLenient = new Label( wContentComp, SWT.RIGHT ); wlDateLenient.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.DateLenient.Label" ) ); props.setLook( wlDateLenient ); FormData fdlDateLenient = new FormData(); fdlDateLenient.left = new FormAttachment( 0, 0 ); fdlDateLenient.top = new FormAttachment( wLimit, margin ); fdlDateLenient.right = new FormAttachment( middle, -margin ); wlDateLenient.setLayoutData( fdlDateLenient ); wDateLenient = new Button( wContentComp, SWT.CHECK ); wDateLenient.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.DateLenient.Tooltip" ) ); props.setLook( wDateLenient ); FormData fdDateLenient = new FormData(); fdDateLenient.left = new FormAttachment( middle, 0 ); fdDateLenient.top = new FormAttachment( wLimit, margin ); wDateLenient.setLayoutData( fdDateLenient ); Label wlDateLocale = new Label( wContentComp, SWT.RIGHT ); wlDateLocale.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.DateLocale.Label" ) ); props.setLook( wlDateLocale ); FormData fdlDateLocale = new FormData(); fdlDateLocale.left = new FormAttachment( 0, 0 ); fdlDateLocale.top = new FormAttachment( wDateLenient, margin ); fdlDateLocale.right = new FormAttachment( middle, -margin ); wlDateLocale.setLayoutData( fdlDateLocale ); wDateLocale = new CCombo( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wDateLocale.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.DateLocale.Tooltip" ) ); props.setLook( wDateLocale ); wDateLocale.addModifyListener( lsMod ); FormData fdDateLocale = new FormData(); fdDateLocale.left = new FormAttachment( middle, 0 ); fdDateLocale.top = new FormAttachment( wDateLenient, margin ); fdDateLocale.right = new FormAttachment( 100, 0 ); wDateLocale.setLayoutData( fdDateLocale ); wDateLocale.addFocusListener( new FocusListener() { @Override public void focusLost( org.eclipse.swt.events.FocusEvent e ) { // No-Op Necessary } @Override public void focusGained( org.eclipse.swt.events.FocusEvent e ) { Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT ); shell.setCursor( busy ); setLocales(); shell.setCursor( null ); busy.dispose(); } } ); // /////////////////////////////// // START OF AddFileResult GROUP // // /////////////////////////////// Group wAddFileResult = new Group( wContentComp, SWT.SHADOW_NONE ); props.setLook( wAddFileResult ); wAddFileResult.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.wAddFileResult.Label" ) ); FormLayout addFileResultgroupLayout = new FormLayout(); addFileResultgroupLayout.marginWidth = 10; addFileResultgroupLayout.marginHeight = 10; wAddFileResult.setLayout( addFileResultgroupLayout ); Label wlAddResult = new Label( wAddFileResult, SWT.RIGHT ); wlAddResult.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.AddResult.Label" ) ); props.setLook( wlAddResult ); FormData fdlAddResult = new FormData(); fdlAddResult.left = new FormAttachment( 0, 0 ); fdlAddResult.top = new FormAttachment( wDateLocale, margin ); fdlAddResult.right = new FormAttachment( middle, -margin ); wlAddResult.setLayoutData( fdlAddResult ); wAddResult = new Button( wAddFileResult, SWT.CHECK ); props.setLook( wAddResult ); wAddResult.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.AddResult.Tooltip" ) ); FormData fdAddResult = new FormData(); fdAddResult.left = new FormAttachment( middle, 0 ); fdAddResult.top = new FormAttachment( wDateLocale, margin ); wAddResult.setLayoutData( fdAddResult ); FormData fdAddFileResult = new FormData(); fdAddFileResult.left = new FormAttachment( 0, margin ); fdAddFileResult.top = new FormAttachment( wDateLocale, margin ); fdAddFileResult.right = new FormAttachment( 100, -margin ); wAddFileResult.setLayoutData( fdAddFileResult ); // /////////////////////////////////////////////////////////// // / END OF AddFileResult GROUP // /////////////////////////////////////////////////////////// wContentComp.pack(); // What's the size: Rectangle bounds = wContentComp.getBounds(); wContentSComp.setContent( wContentComp ); wContentSComp.setExpandHorizontal( true ); wContentSComp.setExpandVertical( true ); wContentSComp.setMinWidth( bounds.width ); wContentSComp.setMinHeight( bounds.height ); FormData fdContentComp = new FormData(); fdContentComp.left = new FormAttachment( 0, 0 ); fdContentComp.top = new FormAttachment( 0, 0 ); fdContentComp.right = new FormAttachment( 100, 0 ); fdContentComp.bottom = new FormAttachment( 100, 0 ); wContentComp.setLayoutData( fdContentComp ); wContentTab.setControl( wContentSComp ); // /////////////////////////////////////////////////////////// // / END OF CONTENT TAB // /////////////////////////////////////////////////////////// } protected void setLocales() { Locale[] locale = Locale.getAvailableLocales(); String[] dateLocale = new String[ locale.length ]; for ( int i = 0; i < locale.length; i++ ) { dateLocale[ i ] = locale[ i ].toString(); } wDateLocale.setItems( dateLocale ); } private void addErrorTab() { // //////////////////////// // START OF ERROR TAB /// // / CTabItem wErrorTab = new CTabItem( wTabFolder, SWT.NONE ); wErrorTab.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ErrorTab.TabTitle" ) ); ScrolledComposite wErrorSComp = new ScrolledComposite( wTabFolder, SWT.V_SCROLL | SWT.H_SCROLL ); wErrorSComp.setLayout( new FillLayout() ); FormLayout errorLayout = new FormLayout(); errorLayout.marginWidth = 3; errorLayout.marginHeight = 3; Composite wErrorComp = new Composite( wErrorSComp, SWT.NONE ); props.setLook( wErrorComp ); wErrorComp.setLayout( errorLayout ); // ERROR HANDLING... // ErrorIgnored? Label wlErrorIgnored = new Label( wErrorComp, SWT.RIGHT ); wlErrorIgnored.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ErrorIgnored.Label" ) ); props.setLook( wlErrorIgnored ); FormData fdlErrorIgnored = new FormData(); fdlErrorIgnored.left = new FormAttachment( 0, 0 ); fdlErrorIgnored.top = new FormAttachment( 0, margin ); fdlErrorIgnored.right = new FormAttachment( middle, -margin ); wlErrorIgnored.setLayoutData( fdlErrorIgnored ); wErrorIgnored = new Button( wErrorComp, SWT.CHECK ); props.setLook( wErrorIgnored ); wErrorIgnored.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ErrorIgnored.Tooltip" ) ); FormData fdErrorIgnored = new FormData(); fdErrorIgnored.left = new FormAttachment( middle, 0 ); fdErrorIgnored.top = new FormAttachment( 0, margin ); wErrorIgnored.setLayoutData( fdErrorIgnored ); // Skip error lines? wlSkipErrorLines = new Label( wErrorComp, SWT.RIGHT ); wlSkipErrorLines.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.SkipErrorLines.Label" ) ); props.setLook( wlSkipErrorLines ); FormData fdlSkipErrorLines = new FormData(); fdlSkipErrorLines.left = new FormAttachment( 0, 0 ); fdlSkipErrorLines.top = new FormAttachment( wErrorIgnored, margin ); fdlSkipErrorLines.right = new FormAttachment( middle, -margin ); wlSkipErrorLines.setLayoutData( fdlSkipErrorLines ); wSkipErrorLines = new Button( wErrorComp, SWT.CHECK ); props.setLook( wSkipErrorLines ); wSkipErrorLines.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.SkipErrorLines.Tooltip" ) ); FormData fdSkipErrorLines = new FormData(); fdSkipErrorLines.left = new FormAttachment( middle, 0 ); fdSkipErrorLines.top = new FormAttachment( wErrorIgnored, margin ); wSkipErrorLines.setLayoutData( fdSkipErrorLines ); wlErrorCount = new Label( wErrorComp, SWT.RIGHT ); wlErrorCount.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ErrorCount.Label" ) ); props.setLook( wlErrorCount ); FormData fdlErrorCount = new FormData(); fdlErrorCount.left = new FormAttachment( 0, 0 ); fdlErrorCount.top = new FormAttachment( wSkipErrorLines, margin ); fdlErrorCount.right = new FormAttachment( middle, -margin ); wlErrorCount.setLayoutData( fdlErrorCount ); wErrorCount = new Text( wErrorComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wErrorCount ); wErrorCount.addModifyListener( lsMod ); FormData fdErrorCount = new FormData(); fdErrorCount.left = new FormAttachment( middle, 0 ); fdErrorCount.top = new FormAttachment( wSkipErrorLines, margin ); fdErrorCount.right = new FormAttachment( 100, 0 ); wErrorCount.setLayoutData( fdErrorCount ); wlErrorFields = new Label( wErrorComp, SWT.RIGHT ); wlErrorFields.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ErrorFields.Label" ) ); props.setLook( wlErrorFields ); FormData fdlErrorFields = new FormData(); fdlErrorFields.left = new FormAttachment( 0, 0 ); fdlErrorFields.top = new FormAttachment( wErrorCount, margin ); fdlErrorFields.right = new FormAttachment( middle, -margin ); wlErrorFields.setLayoutData( fdlErrorFields ); wErrorFields = new Text( wErrorComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wErrorFields ); wErrorFields.addModifyListener( lsMod ); FormData fdErrorFields = new FormData(); fdErrorFields.left = new FormAttachment( middle, 0 ); fdErrorFields.top = new FormAttachment( wErrorCount, margin ); fdErrorFields.right = new FormAttachment( 100, 0 ); wErrorFields.setLayoutData( fdErrorFields ); wlErrorText = new Label( wErrorComp, SWT.RIGHT ); wlErrorText.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ErrorText.Label" ) ); props.setLook( wlErrorText ); FormData fdlErrorText = new FormData(); fdlErrorText.left = new FormAttachment( 0, 0 ); fdlErrorText.top = new FormAttachment( wErrorFields, margin ); fdlErrorText.right = new FormAttachment( middle, -margin ); wlErrorText.setLayoutData( fdlErrorText ); wErrorText = new Text( wErrorComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wErrorText ); wErrorText.addModifyListener( lsMod ); FormData fdErrorText = new FormData(); fdErrorText.left = new FormAttachment( middle, 0 ); fdErrorText.top = new FormAttachment( wErrorFields, margin ); fdErrorText.right = new FormAttachment( 100, 0 ); wErrorText.setLayoutData( fdErrorText ); // Bad lines files directory + extension Control previous = wErrorText; // BadDestDir line wlWarnDestDir = new Label( wErrorComp, SWT.RIGHT ); wlWarnDestDir.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.WarnDestDir.Label" ) ); props.setLook( wlWarnDestDir ); FormData fdlWarnDestDir = new FormData(); fdlWarnDestDir.left = new FormAttachment( 0, 0 ); fdlWarnDestDir.top = new FormAttachment( previous, margin * 4 ); fdlWarnDestDir.right = new FormAttachment( middle, -margin ); wlWarnDestDir.setLayoutData( fdlWarnDestDir ); wbbWarnDestDir = new Button( wErrorComp, SWT.PUSH | SWT.CENTER ); props.setLook( wbbWarnDestDir ); wbbWarnDestDir.setText( BUTTON_BROWSE ); wbbWarnDestDir.setToolTipText( BaseMessages.getString( BASE_PKG, "System.Tooltip.BrowseForDir" ) ); FormData fdbBadDestDir = new FormData(); fdbBadDestDir.right = new FormAttachment( 100, 0 ); fdbBadDestDir.top = new FormAttachment( previous, margin * 4 ); wbbWarnDestDir.setLayoutData( fdbBadDestDir ); wbvWarnDestDir = new Button( wErrorComp, SWT.PUSH | SWT.CENTER ); props.setLook( wbvWarnDestDir ); wbvWarnDestDir.setText( BUTTON_VARIABLE ); wbvWarnDestDir.setToolTipText( TOOLTIP_VARIABLE ); FormData fdbvWarnDestDir = new FormData(); fdbvWarnDestDir.right = new FormAttachment( wbbWarnDestDir, -margin ); fdbvWarnDestDir.top = new FormAttachment( previous, margin * 4 ); wbvWarnDestDir.setLayoutData( fdbvWarnDestDir ); wWarnExt = new Text( wErrorComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wWarnExt ); wWarnExt.addModifyListener( lsMod ); FormData fdWarnDestExt = new FormData(); fdWarnDestExt.left = new FormAttachment( wbvWarnDestDir, -150 ); fdWarnDestExt.right = new FormAttachment( wbvWarnDestDir, -margin ); fdWarnDestExt.top = new FormAttachment( previous, margin * 4 ); wWarnExt.setLayoutData( fdWarnDestExt ); wlWarnExt = new Label( wErrorComp, SWT.RIGHT ); wlWarnExt.setText( LABEL_EXTENSION ); props.setLook( wlWarnExt ); FormData fdlWarnDestExt = new FormData(); fdlWarnDestExt.top = new FormAttachment( previous, margin * 4 ); fdlWarnDestExt.right = new FormAttachment( wWarnExt, -margin ); wlWarnExt.setLayoutData( fdlWarnDestExt ); wWarnDestDir = new Text( wErrorComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wWarnDestDir ); wWarnDestDir.addModifyListener( lsMod ); FormData fdBadDestDir = new FormData(); fdBadDestDir.left = new FormAttachment( middle, 0 ); fdBadDestDir.right = new FormAttachment( wlWarnExt, -margin ); fdBadDestDir.top = new FormAttachment( previous, margin * 4 ); wWarnDestDir.setLayoutData( fdBadDestDir ); // Listen to the Browse... button wbbWarnDestDir.addSelectionListener( new DirectoryBrowserAdapter( wWarnDestDir ) ); // Listen to the Variable... button wbvWarnDestDir.addSelectionListener( VariableButtonListenerFactory.getSelectionAdapter( shell, wWarnDestDir, transMeta ) ); // Whenever something changes, set the tooltip to the expanded version of the directory: wWarnDestDir.addModifyListener( getModifyListenerTooltipText( wWarnDestDir ) ); // Error lines files directory + extension previous = wWarnDestDir; // ErrorDestDir line wlErrorDestDir = new Label( wErrorComp, SWT.RIGHT ); wlErrorDestDir.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ErrorDestDir.Label" ) ); props.setLook( wlErrorDestDir ); FormData fdlErrorDestDir = new FormData(); fdlErrorDestDir.left = new FormAttachment( 0, 0 ); fdlErrorDestDir.top = new FormAttachment( previous, margin ); fdlErrorDestDir.right = new FormAttachment( middle, -margin ); wlErrorDestDir.setLayoutData( fdlErrorDestDir ); wbbErrorDestDir = new Button( wErrorComp, SWT.PUSH | SWT.CENTER ); props.setLook( wbbErrorDestDir ); wbbErrorDestDir.setText( BUTTON_BROWSE ); wbbErrorDestDir.setToolTipText( BaseMessages.getString( BASE_PKG, "System.Tooltip.BrowseForDir" ) ); FormData fdbErrorDestDir = new FormData(); fdbErrorDestDir.right = new FormAttachment( 100, 0 ); fdbErrorDestDir.top = new FormAttachment( previous, margin ); wbbErrorDestDir.setLayoutData( fdbErrorDestDir ); wbvErrorDestDir = new Button( wErrorComp, SWT.PUSH | SWT.CENTER ); props.setLook( wbvErrorDestDir ); wbvErrorDestDir.setText( BUTTON_VARIABLE ); wbvErrorDestDir.setToolTipText( TOOLTIP_VARIABLE ); FormData fdbvErrorDestDir = new FormData(); fdbvErrorDestDir.right = new FormAttachment( wbbErrorDestDir, -margin ); fdbvErrorDestDir.left = new FormAttachment( wbvWarnDestDir, 0, SWT.LEFT ); fdbvErrorDestDir.top = new FormAttachment( previous, margin ); wbvErrorDestDir.setLayoutData( fdbvErrorDestDir ); wErrorExt = new Text( wErrorComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wErrorExt ); wErrorExt.addModifyListener( lsMod ); FormData fdErrorDestExt = new FormData(); fdErrorDestExt.left = new FormAttachment( wWarnExt, 0, SWT.LEFT ); fdErrorDestExt.right = new FormAttachment( wWarnExt, 0, SWT.RIGHT ); fdErrorDestExt.top = new FormAttachment( previous, margin ); wErrorExt.setLayoutData( fdErrorDestExt ); wlErrorExt = new Label( wErrorComp, SWT.RIGHT ); wlErrorExt.setText( LABEL_EXTENSION ); props.setLook( wlErrorExt ); FormData fdlErrorDestExt = new FormData(); fdlErrorDestExt.top = new FormAttachment( previous, margin ); fdlErrorDestExt.right = new FormAttachment( wErrorExt, -margin ); wlErrorExt.setLayoutData( fdlErrorDestExt ); wErrorDestDir = new Text( wErrorComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wErrorDestDir ); wErrorDestDir.addModifyListener( lsMod ); FormData fdErrorDestDir = new FormData(); fdErrorDestDir.left = new FormAttachment( middle, 0 ); fdErrorDestDir.right = new FormAttachment( wlErrorExt, -margin ); fdErrorDestDir.top = new FormAttachment( previous, margin ); wErrorDestDir.setLayoutData( fdErrorDestDir ); // Listen to the Browse... button wbbErrorDestDir.addSelectionListener( new DirectoryBrowserAdapter( wErrorDestDir ) ); // Listen to the Variable... button wbvErrorDestDir.addSelectionListener( VariableButtonListenerFactory.getSelectionAdapter( shell, wErrorDestDir, transMeta ) ); // Whenever something changes, set the tooltip to the expanded version of the directory: wErrorDestDir.addModifyListener( getModifyListenerTooltipText( wErrorDestDir ) ); // Data Error lines files directory + extention previous = wErrorDestDir; // LineNrDestDir line wlLineNrDestDir = new Label( wErrorComp, SWT.RIGHT ); wlLineNrDestDir.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.LineNrDestDir.Label" ) ); props.setLook( wlLineNrDestDir ); FormData fdlLineNrDestDir = new FormData(); fdlLineNrDestDir.left = new FormAttachment( 0, 0 ); fdlLineNrDestDir.top = new FormAttachment( previous, margin ); fdlLineNrDestDir.right = new FormAttachment( middle, -margin ); wlLineNrDestDir.setLayoutData( fdlLineNrDestDir ); wbbLineNrDestDir = new Button( wErrorComp, SWT.PUSH | SWT.CENTER ); props.setLook( wbbLineNrDestDir ); wbbLineNrDestDir.setText( BUTTON_BROWSE ); wbbLineNrDestDir.setToolTipText( BaseMessages.getString( BASE_PKG, "System.Tooltip.Browse" ) ); FormData fdbLineNrDestDir = new FormData(); fdbLineNrDestDir.right = new FormAttachment( 100, 0 ); fdbLineNrDestDir.top = new FormAttachment( previous, margin ); wbbLineNrDestDir.setLayoutData( fdbLineNrDestDir ); wbvLineNrDestDir = new Button( wErrorComp, SWT.PUSH | SWT.CENTER ); props.setLook( wbvLineNrDestDir ); wbvLineNrDestDir.setText( BUTTON_VARIABLE ); wbvLineNrDestDir.setToolTipText( TOOLTIP_VARIABLE ); FormData fdbvLineNrDestDir = new FormData(); fdbvLineNrDestDir.right = new FormAttachment( wbbLineNrDestDir, -margin ); fdbvLineNrDestDir.left = new FormAttachment( wbvErrorDestDir, 0, SWT.LEFT ); fdbvLineNrDestDir.top = new FormAttachment( previous, margin ); wbvLineNrDestDir.setLayoutData( fdbvLineNrDestDir ); wLineNrExt = new Text( wErrorComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wLineNrExt ); wLineNrExt.addModifyListener( lsMod ); FormData fdLineNrDestExt = new FormData(); fdLineNrDestExt.left = new FormAttachment( wErrorExt, 0, SWT.LEFT ); fdLineNrDestExt.right = new FormAttachment( wErrorExt, 0, SWT.RIGHT ); fdLineNrDestExt.top = new FormAttachment( previous, margin ); wLineNrExt.setLayoutData( fdLineNrDestExt ); wlLineNrExt = new Label( wErrorComp, SWT.RIGHT ); wlLineNrExt.setText( LABEL_EXTENSION ); props.setLook( wlLineNrExt ); FormData fdlLineNrDestExt = new FormData(); fdlLineNrDestExt.top = new FormAttachment( previous, margin ); fdlLineNrDestExt.right = new FormAttachment( wLineNrExt, -margin ); wlLineNrExt.setLayoutData( fdlLineNrDestExt ); wLineNrDestDir = new Text( wErrorComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wLineNrDestDir ); wLineNrDestDir.addModifyListener( lsMod ); FormData fdLineNrDestDir = new FormData(); fdLineNrDestDir.left = new FormAttachment( middle, 0 ); fdLineNrDestDir.right = new FormAttachment( wlLineNrExt, -margin ); fdLineNrDestDir.top = new FormAttachment( previous, margin ); wLineNrDestDir.setLayoutData( fdLineNrDestDir ); // Listen to the Browse... button wbbLineNrDestDir.addSelectionListener( new DirectoryBrowserAdapter( wLineNrDestDir ) ); // Listen to the Variable... button wbvLineNrDestDir.addSelectionListener( VariableButtonListenerFactory.getSelectionAdapter( shell, wLineNrDestDir, transMeta ) ); // Whenever something changes, set the tooltip to the expanded version of the directory: wLineNrDestDir.addModifyListener( getModifyListenerTooltipText( wLineNrDestDir ) ); FormData fdErrorComp = new FormData(); fdErrorComp.left = new FormAttachment( 0, 0 ); fdErrorComp.top = new FormAttachment( 0, 0 ); fdErrorComp.right = new FormAttachment( 100, 0 ); fdErrorComp.bottom = new FormAttachment( 100, 0 ); wErrorComp.setLayoutData( fdErrorComp ); wErrorComp.pack(); // What's the size: Rectangle bounds = wErrorComp.getBounds(); wErrorSComp.setContent( wErrorComp ); wErrorSComp.setExpandHorizontal( true ); wErrorSComp.setExpandVertical( true ); wErrorSComp.setMinWidth( bounds.width ); wErrorSComp.setMinHeight( bounds.height ); wErrorTab.setControl( wErrorSComp ); // /////////////////////////////////////////////////////////// // / END OF CONTENT TAB // /////////////////////////////////////////////////////////// } private void addFiltersTabs() { // Filters tab... CTabItem wFilterTab = new CTabItem( wTabFolder, SWT.NONE ); wFilterTab.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FilterTab.TabTitle" ) ); FormLayout filterLayout = new FormLayout(); filterLayout.marginWidth = Const.FORM_MARGIN; filterLayout.marginHeight = Const.FORM_MARGIN; Composite wFilterComp = new Composite( wTabFolder, SWT.NONE ); wFilterComp.setLayout( filterLayout ); props.setLook( wFilterComp ); final int FilterRows = input.getFilter().length; ColumnInfo[] colinf = new ColumnInfo[] { new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FilterStringColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FilterPositionColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.StopOnFilterColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, YES_NO_COMBO ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FilterPositiveColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, YES_NO_COMBO ) }; colinf[ 2 ].setToolTip( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.StopOnFilterColumn.Tooltip" ) ); colinf[ 3 ].setToolTip( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FilterPositiveColumn.Tooltip" ) ); wFilter = new TableView( transMeta, wFilterComp, SWT.FULL_SELECTION | SWT.MULTI, colinf, FilterRows, lsMod, props ); FormData fdFilter = new FormData(); fdFilter.left = new FormAttachment( 0, 0 ); fdFilter.top = new FormAttachment( 0, 0 ); fdFilter.right = new FormAttachment( 100, 0 ); fdFilter.bottom = new FormAttachment( 100, 0 ); wFilter.setLayoutData( fdFilter ); FormData fdFilterComp = new FormData(); fdFilterComp.left = new FormAttachment( 0, 0 ); fdFilterComp.top = new FormAttachment( 0, 0 ); fdFilterComp.right = new FormAttachment( 100, 0 ); fdFilterComp.bottom = new FormAttachment( 100, 0 ); wFilterComp.setLayoutData( fdFilterComp ); wFilterComp.layout(); wFilterTab.setControl( wFilterComp ); } private void addFieldsTabs() { // Fields tab... CTabItem wFieldsTab = new CTabItem( wTabFolder, SWT.NONE ); wFieldsTab.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FieldsTab.TabTitle" ) ); FormLayout fieldsLayout = new FormLayout(); fieldsLayout.marginWidth = Const.FORM_MARGIN; fieldsLayout.marginHeight = Const.FORM_MARGIN; Composite wFieldsComp = new Composite( wTabFolder, SWT.NONE ); wFieldsComp.setLayout( fieldsLayout ); props.setLook( wFieldsComp ); wGet = new Button( wFieldsComp, SWT.PUSH ); wGet.setText( BaseMessages.getString( BASE_PKG, "System.Button.GetFields" ) ); fdGet = new FormData(); fdGet.left = new FormAttachment( 50, 0 ); fdGet.bottom = new FormAttachment( 100, 0 ); wGet.setLayoutData( fdGet ); final int FieldsRows = input.inputFields.length; ColumnInfo[] colinf = new ColumnInfo[] { new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NameColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.TypeColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, ValueMetaBase.getTypes(), true ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FormatColumn.Column" ), ColumnInfo.COLUMN_TYPE_FORMAT, 2 ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.PositionColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.LengthColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.PrecisionColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.CurrencyColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.DecimalColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.GroupColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NullIfColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.IfNullColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.TrimTypeColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, ValueMetaBase.trimTypeDesc, true ), new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.RepeatColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { COMBO_YES, COMBO_NO }, true ) }; colinf[ 12 ].setToolTip( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.RepeatColumn.Tooltip" ) ); wFields = new TableView( transMeta, wFieldsComp, SWT.FULL_SELECTION | SWT.MULTI, colinf, FieldsRows, lsMod, props ); FormData fdFields = new FormData(); fdFields.left = new FormAttachment( 0, 0 ); fdFields.top = new FormAttachment( 0, 0 ); fdFields.right = new FormAttachment( 100, 0 ); fdFields.bottom = new FormAttachment( wGet, -margin ); wFields.setLayoutData( fdFields ); FormData fdFieldsComp = new FormData(); fdFieldsComp.left = new FormAttachment( 0, 0 ); fdFieldsComp.top = new FormAttachment( 0, 0 ); fdFieldsComp.right = new FormAttachment( 100, 0 ); fdFieldsComp.bottom = new FormAttachment( 100, 0 ); wFieldsComp.setLayoutData( fdFieldsComp ); wFieldsComp.layout(); wFieldsTab.setControl( wFieldsComp ); } public void setFlags() { boolean accept = wAccFilenames.getSelection(); wlPassThruFields.setEnabled( accept ); wPassThruFields.setEnabled( accept ); if ( !wAccFilenames.getSelection() ) { wPassThruFields.setSelection( false ); } wlAccField.setEnabled( accept ); wAccField.setEnabled( accept ); wlAccStep.setEnabled( accept ); wAccStep.setEnabled( accept ); wlFilenameList.setEnabled( !accept ); wFilenameList.setEnabled( !accept ); wbShowFiles.setEnabled( !accept ); wFirst.setEnabled( !accept ); wFirstHeader.setEnabled( !accept ); wlInclFilenameField.setEnabled( wInclFilename.getSelection() ); wInclFilenameField.setEnabled( wInclFilename.getSelection() ); wlInclRownumField.setEnabled( wInclRownum.getSelection() ); wInclRownumField.setEnabled( wInclRownum.getSelection() ); wlRownumByFileField.setEnabled( wInclRownum.getSelection() ); wRownumByFile.setEnabled( wInclRownum.getSelection() ); // Error handling tab... wlSkipErrorLines.setEnabled( wErrorIgnored.getSelection() ); wSkipErrorLines.setEnabled( wErrorIgnored.getSelection() ); wlErrorCount.setEnabled( wErrorIgnored.getSelection() ); wErrorCount.setEnabled( wErrorIgnored.getSelection() ); wlErrorFields.setEnabled( wErrorIgnored.getSelection() ); wErrorFields.setEnabled( wErrorIgnored.getSelection() ); wlErrorText.setEnabled( wErrorIgnored.getSelection() ); wErrorText.setEnabled( wErrorIgnored.getSelection() ); wlWarnDestDir.setEnabled( wErrorIgnored.getSelection() ); wWarnDestDir.setEnabled( wErrorIgnored.getSelection() ); wlWarnExt.setEnabled( wErrorIgnored.getSelection() ); wWarnExt.setEnabled( wErrorIgnored.getSelection() ); wbbWarnDestDir.setEnabled( wErrorIgnored.getSelection() ); wbvWarnDestDir.setEnabled( wErrorIgnored.getSelection() ); wlErrorDestDir.setEnabled( wErrorIgnored.getSelection() ); wErrorDestDir.setEnabled( wErrorIgnored.getSelection() ); wlErrorExt.setEnabled( wErrorIgnored.getSelection() ); wErrorExt.setEnabled( wErrorIgnored.getSelection() ); wbbErrorDestDir.setEnabled( wErrorIgnored.getSelection() ); wbvErrorDestDir.setEnabled( wErrorIgnored.getSelection() ); wlLineNrDestDir.setEnabled( wErrorIgnored.getSelection() ); wLineNrDestDir.setEnabled( wErrorIgnored.getSelection() ); wlLineNrExt.setEnabled( wErrorIgnored.getSelection() ); wLineNrExt.setEnabled( wErrorIgnored.getSelection() ); wbbLineNrDestDir.setEnabled( wErrorIgnored.getSelection() ); wbvLineNrDestDir.setEnabled( wErrorIgnored.getSelection() ); wlNrHeader.setEnabled( wHeader.getSelection() ); wNrHeader.setEnabled( wHeader.getSelection() ); wlNrFooter.setEnabled( wFooter.getSelection() ); wNrFooter.setEnabled( wFooter.getSelection() ); wlNrWraps.setEnabled( wWraps.getSelection() ); wNrWraps.setEnabled( wWraps.getSelection() ); wlNrLinesPerPage.setEnabled( wLayoutPaged.getSelection() ); wNrLinesPerPage.setEnabled( wLayoutPaged.getSelection() ); wlNrLinesDocHeader.setEnabled( wLayoutPaged.getSelection() ); wNrLinesDocHeader.setEnabled( wLayoutPaged.getSelection() ); } /** * Read the data from the HadoopFileInputMeta object and show it in this dialog. * * @param meta The HadoopFileInputMeta object to obtain the data from. */ public void getData( HadoopFileInputMeta meta ) { final HadoopFileInputMeta in = meta; wAccFilenames.setSelection( in.isAcceptingFilenames() ); wPassThruFields.setSelection( in.inputFiles.passingThruFields ); if ( in.getAcceptingField() != null ) { wAccField.setText( in.getAcceptingField() ); } if ( in.getAcceptingStep() != null ) { wAccStep.setText( in.getAcceptingStep().getName() ); } if ( in.getFileName() != null ) { wFilenameList.removeAll(); for ( int i = 0; i < in.getFileName().length; i++ ) { String sourceUrl = in.getFileName()[ i ]; String clusterName = input.getClusterNameBy( sourceUrl ); String environment = STATIC_ENVIRONMENT; if ( in.environment != null && i < in.environment.length && in.environment[ i ] != null ) { environment = in.environment[ i ]; } if ( clusterName != null ) { clusterName = clusterName.startsWith( HadoopFileInputMeta.LOCAL_SOURCE_FILE ) ? LOCAL_ENVIRONMENT : clusterName; clusterName = clusterName.startsWith( HadoopFileInputMeta.STATIC_SOURCE_FILE ) ? STATIC_ENVIRONMENT : clusterName; clusterName = clusterName.startsWith( HadoopFileInputMeta.S3_SOURCE_FILE ) ? S3_ENVIRONMENT : clusterName; if ( clusterName.equals( LOCAL_ENVIRONMENT ) || clusterName.equals( STATIC_ENVIRONMENT ) || clusterName.equals( S3_ENVIRONMENT ) ) { environment = clusterName; } else { sourceUrl = input.getUrlPath( sourceUrl ); NamedCluster c = namedClusterService.getNamedClusterByName( clusterName, metaStore ); environment = c == null ? "" : clusterName; } } wFilenameList .add( environment, sourceUrl, in.inputFiles.fileMask[ i ], in.getRequiredFilesDesc( in.inputFiles.fileRequired[ i ] ), in.getRequiredFilesDesc( in.inputFiles.includeSubFolders[ i ] ) ); } wFilenameList.removeEmptyRows(); wFilenameList.setRowNums(); wFilenameList.optWidth( true ); } if ( in.content.fileType != null ) { wFiletype.setText( in.content.fileType ); } if ( in.content.separator != null ) { wSeparator.setText( in.content.separator ); } if ( in.content.enclosure != null ) { wEnclosure.setText( in.content.enclosure ); } if ( in.content.escapeCharacter != null ) { wEscape.setText( in.content.escapeCharacter ); } wHeader.setSelection( in.content.header ); wNrHeader.setText( "" + in.content.nrHeaderLines ); wFooter.setSelection( in.content.footer ); wNrFooter.setText( "" + in.content.nrFooterLines ); wWraps.setSelection( in.content.lineWrapped ); wNrWraps.setText( "" + in.content.nrWraps ); wLayoutPaged.setSelection( in.content.layoutPaged ); wNrLinesPerPage.setText( "" + in.content.nrLinesPerPage ); wNrLinesDocHeader.setText( "" + in.content.nrLinesDocHeader ); if ( in.content.fileCompression != null ) { wCompression.setText( in.content.fileCompression ); } wNoempty.setSelection( in.content.noEmptyLines ); wInclFilename.setSelection( in.content.includeFilename ); wInclRownum.setSelection( in.content.includeRowNumber ); wRownumByFile.setSelection( in.content.rowNumberByFile ); wDateLenient.setSelection( in.content.dateFormatLenient ); wAddResult.setSelection( in.inputFiles.isaddresult ); if ( in.content.filenameField != null ) { wInclFilenameField.setText( in.content.filenameField ); } if ( in.content.rowNumberField != null ) { wInclRownumField.setText( in.content.rowNumberField ); } if ( in.content.fileFormat != null ) { wFormat.setText( in.content.fileFormat ); } wLimit.setText( "" + in.content.rowLimit ); logDebug( "getting fields info..." ); getFieldsData( in, false ); if ( in.getEncoding() != null ) { wEncoding.setText( in.getEncoding() ); } // Error handling fields... wErrorIgnored.setSelection( in.errorHandling.errorIgnored ); wSkipErrorLines.setSelection( in.isErrorLineSkipped() ); if ( in.getErrorCountField() != null ) { wErrorCount.setText( in.getErrorCountField() ); } if ( in.getErrorFieldsField() != null ) { wErrorFields.setText( in.getErrorFieldsField() ); } if ( in.getErrorTextField() != null ) { wErrorText.setText( in.getErrorTextField() ); } if ( in.errorHandling.warningFilesDestinationDirectory != null ) { wWarnDestDir.setText( in.errorHandling.warningFilesDestinationDirectory ); } if ( in.errorHandling.warningFilesExtension != null ) { wWarnExt.setText( in.errorHandling.warningFilesExtension ); } if ( in.errorHandling.errorFilesDestinationDirectory != null ) { wErrorDestDir.setText( in.errorHandling.errorFilesDestinationDirectory ); } if ( in.errorHandling.errorFilesExtension != null ) { wErrorExt.setText( in.errorHandling.errorFilesExtension ); } if ( in.errorHandling.lineNumberFilesDestinationDirectory != null ) { wLineNrDestDir.setText( in.errorHandling.lineNumberFilesDestinationDirectory ); } if ( in.errorHandling.lineNumberFilesExtension != null ) { wLineNrExt.setText( in.errorHandling.lineNumberFilesExtension ); } for ( int i = 0; i < in.getFilter().length; i++ ) { TableItem item = wFilter.table.getItem( i ); TextFileFilter filter = in.getFilter()[ i ]; if ( filter.getFilterString() != null ) { item.setText( 1, filter.getFilterString() ); } if ( filter.getFilterPosition() >= 0 ) { item.setText( 2, "" + filter.getFilterPosition() ); } item.setText( 3, filter.isFilterLastLine() ? COMBO_YES : COMBO_NO ); item.setText( 4, filter.isFilterPositive() ? COMBO_YES : COMBO_NO ); } // Date locale wDateLocale.setText( in.content.dateFormatLocale.toString() ); wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth( true ); wFilter.removeEmptyRows(); wFilter.setRowNums(); wFilter.optWidth( true ); setFlags(); wStepname.selectAll(); } private void getFieldsData( HadoopFileInputMeta in, boolean insertAtTop ) { for ( int i = 0; i < in.inputFields.length; i++ ) { BaseFileField field = in.inputFields[ i ]; TableItem item; if ( insertAtTop ) { item = new TableItem( wFields.table, SWT.NONE, i ); } else { if ( i >= wFields.table.getItemCount() ) { item = wFields.table.getItem( i ); } else { item = new TableItem( wFields.table, SWT.NONE ); } } item.setText( 1, field.getName() ); String type = field.getTypeDesc(); String format = field.getFormat(); String position = "" + field.getPosition(); String length = "" + field.getLength(); String prec = "" + field.getPrecision(); String curr = field.getCurrencySymbol(); String group = field.getGroupSymbol(); String decim = field.getDecimalSymbol(); String def = field.getNullString(); String ifNull = field.getIfNullValue(); String trim = field.getTrimTypeDesc(); String rep = field.isRepeated() ? COMBO_YES : COMBO_NO; if ( type != null ) { item.setText( 2, type ); } if ( format != null ) { item.setText( 3, format ); } if ( position != null && !"-1".equals( position ) ) { item.setText( 4, position ); } if ( length != null && !"-1".equals( length ) ) { item.setText( 5, length ); } if ( prec != null && !"-1".equals( prec ) ) { item.setText( 6, prec ); } if ( curr != null ) { item.setText( 7, curr ); } if ( decim != null ) { item.setText( 8, decim ); } if ( group != null ) { item.setText( 9, group ); } if ( def != null ) { item.setText( 10, def ); } if ( ifNull != null ) { item.setText( 11, ifNull ); } if ( trim != null ) { item.setText( 12, trim ); } if ( rep != null ) { item.setText( 13, rep ); } } } private void setEncodings() { // Encoding of the text file: if ( !gotEncodings ) { gotEncodings = true; wEncoding.removeAll(); List values = new ArrayList<>( Charset.availableCharsets().values() ); for ( int i = 0; i < values.size(); i++ ) { Charset charSet = values.get( i ); wEncoding.add( charSet.displayName() ); } // Now select the default! String defEncoding = Const.getEnvironmentVariable( "file.encoding", "UTF-8" ); int idx = Const.indexOfString( defEncoding, wEncoding.getItems() ); if ( idx >= 0 ) { wEncoding.select( idx ); } } } private void cancel() { stepname = null; input.setChanged( changed ); dispose(); } private void ok() { if ( Utils.isEmpty( wStepname.getText() ) ) { return; } getInfo( input ); dispose(); } private void getInfo( HadoopFileInputMeta meta ) { stepname = wStepname.getText(); // return value // copy info to HadoopFileInputMeta class (input) meta.inputFiles.acceptingFilenames = wAccFilenames.getSelection(); meta.inputFiles.passingThruFields = wPassThruFields.getSelection(); meta.inputFiles.acceptingField = wAccField.getText(); meta.inputFiles.acceptingStepName = wAccStep.getText(); meta.setAcceptingStep( transMeta.findStep( wAccStep.getText() ) ); meta.content.fileType = wFiletype.getText(); meta.content.fileFormat = wFormat.getText(); meta.content.separator = wSeparator.getText(); meta.content.enclosure = wEnclosure.getText(); meta.content.escapeCharacter = wEscape.getText(); meta.content.rowLimit = Const.toLong( wLimit.getText(), 0L ); meta.content.filenameField = wInclFilenameField.getText(); meta.content.rowNumberField = wInclRownumField.getText(); meta.inputFiles.isaddresult = wAddResult.getSelection(); meta.content.includeFilename = wInclFilename.getSelection(); meta.content.includeRowNumber = wInclRownum.getSelection(); meta.content.rowNumberByFile = wRownumByFile.getSelection(); meta.content.header = wHeader.getSelection(); meta.content.nrHeaderLines = Const.toInt( wNrHeader.getText(), 1 ); meta.content.footer = wFooter.getSelection(); meta.content.nrFooterLines = Const.toInt( wNrFooter.getText(), 1 ); meta.content.lineWrapped = wWraps.getSelection(); meta.content.nrWraps = Const.toInt( wNrWraps.getText(), 1 ); meta.content.layoutPaged = wLayoutPaged.getSelection(); meta.content.nrLinesPerPage = Const.toInt( wNrLinesPerPage.getText(), 80 ); meta.content.nrLinesDocHeader = Const.toInt( wNrLinesDocHeader.getText(), 0 ); meta.content.fileCompression = wCompression.getText(); meta.content.dateFormatLenient = wDateLenient.getSelection(); meta.content.noEmptyLines = wNoempty.getSelection(); meta.content.encoding = wEncoding.getText(); int nrfiles = wFilenameList.getItemCount(); int nrfields = wFields.nrNonEmpty(); int nrfilters = wFilter.nrNonEmpty(); meta.allocate( nrfiles, nrfields, nrfilters ); Map namedClusterURLMappings = new HashMap<>(); String[] fileNames = new String[ wFilenameList.getItems( 1 ).length ]; meta.environment = wFilenameList.getItems( 0 ); for ( int i = 0; i < meta.environment.length; i++ ) { String sourceNc = meta.environment[ i ]; sourceNc = sourceNc.equals( LOCAL_ENVIRONMENT ) ? HadoopFileInputMeta.LOCAL_SOURCE_FILE + i : sourceNc; sourceNc = sourceNc.equals( STATIC_ENVIRONMENT ) ? HadoopFileInputMeta.STATIC_SOURCE_FILE + i : sourceNc; sourceNc = sourceNc.equals( S3_ENVIRONMENT ) ? HadoopFileInputMeta.S3_SOURCE_FILE + i : sourceNc; String source = wFilenameList.getItems( 1 )[ i ]; if ( !Utils.isEmpty( source ) ) { fileNames[ i ] = input.loadUrl( source, sourceNc, getMetaStore(), namedClusterURLMappings ); } else { fileNames[ i ] = ""; } } meta.setFileName( fileNames ); meta.inputFiles.fileMask = wFilenameList.getItems( 2 ); meta.inputFiles.setFileRequired( wFilenameList.getItems( 3 ) ); meta.inputFiles.setIncludeSubFolders( wFilenameList.getItems( 4 ) ); input.setNamedClusterURLMapping( namedClusterURLMappings ); for ( int i = 0; i < nrfields; i++ ) { BaseFileField field = new BaseFileField(); TableItem item = wFields.getNonEmpty( i ); field.setName( item.getText( 1 ) ); field.setType( ValueMetaBase.getType( item.getText( 2 ) ) ); field.setFormat( item.getText( 3 ) ); field.setPosition( Const.toInt( item.getText( 4 ), -1 ) ); field.setLength( Const.toInt( item.getText( 5 ), -1 ) ); field.setPrecision( Const.toInt( item.getText( 6 ), -1 ) ); field.setCurrencySymbol( item.getText( 7 ) ); field.setDecimalSymbol( item.getText( 8 ) ); field.setGroupSymbol( item.getText( 9 ) ); field.setNullString( item.getText( 10 ) ); field.setIfNullValue( item.getText( 11 ) ); field.setTrimType( ValueMetaBase.getTrimTypeByDesc( item.getText( 12 ) ) ); field.setRepeated( COMBO_YES.equalsIgnoreCase( item.getText( 13 ) ) ); ( meta.inputFields )[ i ] = field; } for ( int i = 0; i < nrfilters; i++ ) { TableItem item = wFilter.getNonEmpty( i ); TextFileFilter filter = new TextFileFilter(); ( meta.getFilter() )[ i ] = filter; filter.setFilterString( item.getText( 1 ) ); filter.setFilterPosition( Const.toInt( item.getText( 2 ), -1 ) ); filter.setFilterLastLine( COMBO_YES.equalsIgnoreCase( item.getText( 3 ) ) ); filter.setFilterPositive( COMBO_YES.equalsIgnoreCase( item.getText( 4 ) ) ); } // Error handling fields... meta.errorHandling.errorIgnored = wErrorIgnored.getSelection(); meta.setErrorLineSkipped( wSkipErrorLines.getSelection() ); meta.setErrorCountField( wErrorCount.getText() ); meta.setErrorFieldsField( wErrorFields.getText() ); meta.setErrorTextField( wErrorText.getText() ); meta.errorHandling.warningFilesDestinationDirectory = wWarnDestDir.getText(); meta.errorHandling.warningFilesExtension = wWarnExt.getText(); meta.errorHandling.errorFilesDestinationDirectory = wErrorDestDir.getText(); meta.errorHandling.errorFilesExtension = wErrorExt.getText(); meta.errorHandling.lineNumberFilesDestinationDirectory = wLineNrDestDir.getText(); meta.errorHandling.lineNumberFilesExtension = wLineNrExt.getText(); // Date format Locale Locale locale = EnvUtil.createLocale( wDateLocale.getText() ); if ( !locale.equals( Locale.getDefault() ) ) { meta.content.dateFormatLocale = locale; } else { meta.content.dateFormatLocale = Locale.getDefault(); } } private void get() { if ( wFiletype.getText().equalsIgnoreCase( "CSV" ) ) { getCSV(); } else { getFixed(); } } // Get the data layout private void getCSV() { HadoopFileInputMeta meta = new HadoopFileInputMeta(); getInfo( meta ); HadoopFileInputMeta previousMeta = (HadoopFileInputMeta) meta.clone(); FileInputList textFileList = meta.getTextFileList( transMeta.getBowl(), transMeta ); InputStream fileInputStream = null; InputStream inputStream = null; StringBuilder lineStringBuilder = new StringBuilder( 256 ); int fileFormatType = meta.getFileFormatTypeNr(); String delimiter = transMeta.environmentSubstitute( meta.content.separator ); if ( textFileList.nrOfFiles() > 0 ) { int clearFields = meta.content.header ? SWT.YES : SWT.NO; int nrInputFields = meta.inputFields.length; if ( meta.content.header && nrInputFields > 0 ) { MessageBox mb = new MessageBox( shell, SWT.YES | SWT.NO | SWT.CANCEL | SWT.ICON_QUESTION ); mb.setMessage( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ClearFieldList.DialogMessage" ) ); mb.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ClearFieldList.DialogTitle" ) ); clearFields = mb.open(); if ( clearFields == SWT.CANCEL ) { return; } } try { wFields.table.removeAll(); Table table = wFields.table; inputStream = getInputStream( meta, textFileList ); InputStreamReader reader = getInputStreamReader( meta, inputStream ); if ( clearFields == SWT.YES || !meta.content.header || nrInputFields > 0 ) { // Scan the header-line, determine fields... String line = null; if ( meta.content.header || meta.inputFields.length == 0 ) { line = getLine( meta, textFileList ); if ( line != null ) { // Estimate the number of input fields... // Chop up the line using the delimiter String[] guessedFields = TextFileInputUtils.guessStringsFromLine( new Variables(), log, line, meta, delimiter, StringUtil .substituteHex( meta.content.enclosure ), StringUtil.substituteHex( meta.content.escapeCharacter ) ); for ( int i = 0; i < guessedFields.length; i++ ) { String field = guessedFields[ i ]; if ( field == null || field.length() == 0 || ( nrInputFields == 0 && !meta.content.header ) ) { field = "Field" + ( i + 1 ); } else { // Trim the field field = Const.trim( field ); // Replace all spaces & - with underscore _ field = Const.replace( field, " ", "_" ); field = Const.replace( field, "-", "_" ); } TableItem item = new TableItem( table, SWT.NONE ); item.setText( 1, field ); item.setText( 2, "String" ); // The default type is String... } wFields.setRowNums(); wFields.optWidth( true ); // Copy it... getInfo( meta ); } } // Sample a few lines to determine the correct type of the fields... String shellText = BaseMessages.getString( BASE_PKG, "TextFileInputDialog.LinesToSample.DialogTitle" ); String lineText = BaseMessages.getString( BASE_PKG, "TextFileInputDialog.LinesToSample.DialogMessage" ); EnterNumberDialog end = new EnterNumberDialog( shell, 100, shellText, lineText ); int samples = end.open(); if ( samples >= 0 ) { getInfo( meta ); TextFileCSVImportProgressDialog pd = new TextFileCSVImportProgressDialog( shell, meta, transMeta, reader, samples, clearFields == SWT.YES ); String message = pd.open(); if ( message != null ) { wFields.removeAll(); // OK, what's the result of our search? getData( meta ); // If we didn't want the list to be cleared, we need to re-inject the previous values... // if ( clearFields == SWT.NO ) { getFieldsData( previousMeta, true ); wFields.table.setSelection( previousMeta.inputFields.length, wFields.table.getItemCount() - 1 ); } wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth( true ); EnterTextDialog etd = new EnterTextDialog( shell, BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ScanResults.DialogTitle" ), BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ScanResults.DialogMessage" ), message, true ); etd.setReadOnly(); etd.open(); } } } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.UnableToReadHeaderLine.DialogMessage" ) ); mb.setText( ERROR_TITLE ); mb.open(); } } catch ( IOException e ) { new ErrorDialog( shell, BaseMessages.getString( BASE_PKG, "TextFileInputDialog.IOError.DialogTitle" ), BaseMessages.getString( BASE_PKG, "TextFileInputDialog.IOError.DialogMessage" ), e ); } catch ( KettleException e ) { new ErrorDialog( shell, ERROR_TITLE, BaseMessages .getString( BASE_PKG, "TextFileInputDialog.ErrorGettingFileDesc.DialogMessage" ), e ); } finally { try { inputStream.close(); } catch ( Exception e ) { // Ignore errors } } } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NoValidFileFound.DialogMessage" ) ); mb.setText( ERROR_TITLE ); mb.open(); } } public static final int guessPrecision( double d ) { // Round numbers long frac = Math.round( ( d - Math.floor( d ) ) * 1E10 ); // max precision : 10 int precision = 10; // 0,34 --> 3400000000 // 0 to the right --> precision -1! // 0 to the right means frac%10 == 0 while ( precision >= 0 && ( frac % 10 ) == 0 ) { frac /= 10; precision--; } precision++; return precision; } public static final int guessIntLength( double d ) { double flr = Math.floor( d ); int len = 1; while ( flr > 9 ) { flr /= 10; flr = Math.floor( flr ); len++; } return len; } public static final int guessLength( double d ) { int intlen = guessIntLength( d ); int precis = guessPrecision( d ); int length = 1; if ( precis > 0 ) { length = intlen + 1 + precis; } else { length = intlen; } return length; } // Preview the data private void preview() { // Create the XML input step HadoopFileInputMeta oneMeta = new HadoopFileInputMeta(); getInfo( oneMeta ); if ( oneMeta.isAcceptingFilenames() ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION ); // Nothing found that matches your criteria mb.setMessage( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Dialog.SpecifyASampleFile.Message" ) ); // Sorry! mb.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Dialog.SpecifyASampleFile.Title" ) ); mb.open(); return; } TransMeta previewMeta = TransPreviewFactory.generatePreviewTransformation( transMeta, oneMeta, wStepname.getText() ); EnterNumberDialog numberDialog = new EnterNumberDialog( shell, props.getDefaultPreviewSize(), BaseMessages.getString( BASE_PKG, "TextFileInputDialog.PreviewSize.DialogTitle" ), BaseMessages.getString( BASE_PKG, "TextFileInputDialog.PreviewSize.DialogMessage" ) ); int previewSize = numberDialog.open(); if ( previewSize > 0 ) { TransPreviewProgressDialog progressDialog = new TransPreviewProgressDialog( shell, previewMeta, new String[] { wStepname.getText() }, new int[] { previewSize } ); progressDialog.open(); Trans trans = progressDialog.getTrans(); String loggingText = progressDialog.getLoggingText(); if ( !progressDialog.isCancelled() ) { if ( trans.getResult() != null && trans.getResult().getNrErrors() > 0 ) { EnterTextDialog etd = new EnterTextDialog( shell, BaseMessages.getString( BASE_PKG, "System.Dialog.PreviewError.Title" ), BaseMessages.getString( BASE_PKG, "System.Dialog.PreviewError.Message" ), loggingText, true ); etd.setReadOnly(); etd.open(); } } PreviewRowsDialog prd = new PreviewRowsDialog( shell, transMeta, SWT.NONE, wStepname.getText(), progressDialog .getPreviewRowsMeta( wStepname.getText() ), progressDialog.getPreviewRows( wStepname.getText() ), loggingText ); prd.open(); } } // Get the first x lines private void first( boolean skipHeaders ) { HadoopFileInputMeta info = new HadoopFileInputMeta(); getInfo( info ); try { if ( info.getTextFileList( transMeta.getBowl(), transMeta ).nrOfFiles() > 0 ) { String shellText = BaseMessages.getString( BASE_PKG, "TextFileInputDialog.LinesToView.DialogTitle" ); String lineText = BaseMessages.getString( BASE_PKG, "TextFileInputDialog.LinesToView.DialogMessage" ); EnterNumberDialog end = new EnterNumberDialog( shell, 100, shellText, lineText ); int nrLines = end.open(); if ( nrLines >= 0 ) { List linesList = getFirst( nrLines, skipHeaders ); if ( linesList != null && linesList.size() > 0 ) { String firstlines = ""; for ( int i = 0; i < linesList.size(); i++ ) { firstlines += linesList.get( i ) + Const.CR; } EnterTextDialog etd = new EnterTextDialog( shell, BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ContentOfFirstFile.DialogTitle" ), ( nrLines == 0 ? BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ContentOfFirstFile.AllLines.DialogMessage" ) : BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ContentOfFirstFile.NLines.DialogMessage", "" + nrLines ) ), firstlines, true ); etd.setReadOnly(); etd.open(); } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.UnableToReadLines.DialogMessage" ) ); mb.setText( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.UnableToReadLines.DialogTitle" ) ); mb.open(); } } } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.NoValidFile.DialogMessage" ) ); mb.setText( ERROR_TITLE ); mb.open(); } } catch ( KettleException e ) { new ErrorDialog( shell, ERROR_TITLE, BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ErrorGettingData.DialogMessage" ), e ); } } // Get the first x lines private List getFirst( int nrlines, boolean skipHeaders ) throws KettleException { HadoopFileInputMeta meta = new HadoopFileInputMeta(); getInfo( meta ); FileInputList textFileList = meta.getTextFileList( transMeta.getBowl(), transMeta ); InputStream fi = null; InputStream f = null; StringBuilder lineStringBuilder = new StringBuilder( 256 ); int fileFormatType = meta.getFileFormatTypeNr(); List retval = new ArrayList<>(); if ( textFileList.nrOfFiles() > 0 ) { FileObject file = textFileList.getFile( 0 ); try { fi = KettleVFS.getInputStream( file ); CompressionProvider provider = CompressionProviderFactory.getInstance().createCompressionProviderInstance( meta.content.fileCompression ); f = provider.createInputStream( fi ); BufferedInputStreamReader reader; if ( meta.getEncoding() != null && meta.getEncoding().length() > 0 ) { reader = new BufferedInputStreamReader( new InputStreamReader( f, meta.getEncoding() ) ); } else { reader = new BufferedInputStreamReader( new InputStreamReader( f ) ); } int linenr = 0; int maxnr = nrlines + ( meta.content.header ? meta.content.nrHeaderLines : 0 ); if ( skipHeaders ) { // Skip the header lines first if more then one, it helps us position if ( meta.content.layoutPaged && meta.content.nrLinesDocHeader > 0 ) { int skipped = 0; String line = TextFileInputUtils.getLine( log, reader, fileFormatType, lineStringBuilder ); while ( line != null && skipped < meta.content.nrLinesDocHeader - 1 ) { skipped++; line = TextFileInputUtils.getLine( log, reader, fileFormatType, lineStringBuilder ); } } // Skip the header lines first if more then one, it helps us position if ( meta.content.header && meta.content.nrHeaderLines > 0 ) { int skipped = 0; String line = TextFileInputUtils.getLine( log, reader, fileFormatType, lineStringBuilder ); while ( line != null && skipped < meta.content.nrHeaderLines - 1 ) { skipped++; line = TextFileInputUtils.getLine( log, reader, fileFormatType, lineStringBuilder ); } } } String line = TextFileInputUtils.getLine( log, reader, fileFormatType, lineStringBuilder ); while ( line != null && ( linenr < maxnr || nrlines == 0 ) ) { retval.add( line ); linenr++; line = TextFileInputUtils.getLine( log, reader, fileFormatType, lineStringBuilder ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( BASE_PKG, "TextFileInputDialog.Exception.ErrorGettingFirstLines", "" + nrlines, file.getName().getURI() ), e ); } finally { try { f.close(); } catch ( Exception e ) { // Ignore errors } } } return retval; } private void getFixed() { HadoopFileInputMeta info = new HadoopFileInputMeta(); getInfo( info ); Shell sh = new Shell( shell, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN ); try { List rows = getFirst( 50, false ); fields = getFields( info, rows ); final TextFileImportWizardPage1 page1 = new TextFileImportWizardPage1( "1", props, rows, fields ); page1.createControl( sh ); final TextFileImportWizardPage2 page2 = new TextFileImportWizardPage2( "2", props, rows, fields ); page2.createControl( sh ); Wizard wizard = new Wizard() { @Override public boolean performFinish() { wFields.clearAll( false ); for ( int i = 0; i < fields.size(); i++ ) { BaseFileField field = (BaseFileField) fields.get( i ); if ( !field.isIgnored() && field.getLength() > 0 ) { TableItem item = new TableItem( wFields.table, SWT.NONE ); item.setText( 1, field.getName() ); item.setText( 2, "" + field.getTypeDesc() ); item.setText( 3, "" + field.getFormat() ); item.setText( 4, "" + field.getPosition() ); item.setText( 5, field.getLength() < 0 ? "" : "" + field.getLength() ); item.setText( 6, field.getPrecision() < 0 ? "" : "" + field.getPrecision() ); item.setText( 7, "" + field.getCurrencySymbol() ); item.setText( 8, "" + field.getDecimalSymbol() ); item.setText( 9, "" + field.getGroupSymbol() ); item.setText( 10, "" + field.getNullString() ); item.setText( 11, "" + field.getIfNullValue() ); item.setText( 12, "" + field.getTrimTypeDesc() ); item.setText( 13, field.isRepeated() ? COMBO_YES : COMBO_NO ); } } int size = wFields.table.getItemCount(); if ( size == 0 ) { new TableItem( wFields.table, SWT.NONE ); } wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth( true ); input.setChanged(); return true; } }; wizard.addPage( page1 ); wizard.addPage( page2 ); WizardDialog wd = new WizardDialog( shell, wizard ); Window.setDefaultImage( GUIResource.getInstance().getImageWizard() ); wd.setMinimumPageSize( 700, 375 ); wd.updateSize(); wd.open(); } catch ( Exception e ) { new ErrorDialog( shell, BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ErrorShowingFixedWizard.DialogTitle" ), BaseMessages.getString( BASE_PKG, "TextFileInputDialog.ErrorShowingFixedWizard.DialogMessage" ), e ); } } private Vector getFields( HadoopFileInputMeta info, List rows ) { Vector result = new Vector<>(); int maxsize = 0; for ( int i = 0; i < rows.size(); i++ ) { int len = rows.get( i ).length(); if ( len > maxsize ) { maxsize = len; } } int prevEnd = 0; int dummynr = 1; for ( int i = 0; i < info.inputFields.length; i++ ) { BaseFileField f = info.inputFields[ i ]; // See if positions are skipped, if this is the case, add dummy fields... if ( f.getPosition() != prevEnd ) { // gap BaseFileField field = new BaseFileField( "Dummy" + dummynr, prevEnd, f.getPosition() - prevEnd ); field.setIgnored( true ); // don't include in result by default. result.add( field ); dummynr++; } BaseFileField field = new BaseFileField( f.getName(), f.getPosition(), f.getLength() ); field.setType( f.getType() ); field.setIgnored( false ); field.setFormat( f.getFormat() ); field.setPrecision( f.getPrecision() ); field.setTrimType( f.getTrimType() ); field.setDecimalSymbol( f.getDecimalSymbol() ); field.setGroupSymbol( f.getGroupSymbol() ); field.setCurrencySymbol( f.getCurrencySymbol() ); field.setRepeated( f.isRepeated() ); field.setNullString( f.getNullString() ); result.add( field ); prevEnd = field.getPosition() + field.getLength(); } if ( info.inputFields.length == 0 ) { BaseFileField field = new BaseFileField( "Field1", 0, maxsize ); result.add( field ); } else { // Take the last field and see if it reached until the maximum... BaseFileField f = info.inputFields[ info.inputFields.length - 1 ]; int pos = f.getPosition(); int len = f.getLength(); if ( pos + len < maxsize ) { // If not, add an extra trailing field! BaseFileField field = new BaseFileField( "Dummy" + dummynr, pos + len, maxsize - pos - len ); field.setIgnored( true ); // don't include in result by default. result.add( field ); dummynr++; } } Collections.sort( result ); return result; } @Override public String toString() { return this.getClass().getName(); } private SelectionAdapter getFileDirectoryListener() { return new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { try { // Setup file type filtering String[] fileFilters = null; String[] fileFilterNames = null; if ( !wCompression.getText().equals( "None" ) ) { fileFilters = new String[] { "*.zip;*.gz", "*.txt;*.csv", "*.csv", "*.txt", "*" }; fileFilterNames = new String[] { BaseMessages.getString( BASE_PKG, "System.FileType.ZIPFiles" ), BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FileType.TextAndCSVFiles" ), BaseMessages.getString( BASE_PKG, "System.FileType.CSVFiles" ), BaseMessages.getString( BASE_PKG, "System.FileType.TextFiles" ), BaseMessages.getString( BASE_PKG, "System.FileType.AllFiles" ) }; } else { fileFilters = new String[] { "*", "*.txt;*.csv", "*.csv", "*.txt" }; fileFilterNames = new String[] { BaseMessages.getString( BASE_PKG, "System.FileType.AllFiles" ), BaseMessages.getString( BASE_PKG, "TextFileInputDialog.FileType.TextAndCSVFiles" ), BaseMessages.getString( BASE_PKG, "System.FileType.CSVFiles" ), BaseMessages.getString( BASE_PKG, "System.FileType.TextFiles" ) }; } String clusterName = wFilenameList.getActiveTableItem().getText( wFilenameList.getActiveTableColumn() - 1 ); String path = wFilenameList.getActiveTableItem().getText( wFilenameList.getActiveTableColumn() ); if ( clusterName.equals( S3_ENVIRONMENT ) && !path.startsWith( Schemes.S3_SCHEME + "://" ) ) { path = Schemes.S3_SCHEME + "://"; } // Get current file FileObject rootFile = null; FileObject initialFile = null; FileObject defaultInitialFile = null; boolean isCluster = false; if ( !clusterName.equals( LOCAL_ENVIRONMENT ) && !clusterName.equals( S3_ENVIRONMENT ) ) { if ( Const.isEmpty( path ) ) { path = "/"; } NamedCluster namedCluster = namedClusterService.getNamedClusterByName( clusterName, getMetaStore() ); if ( namedCluster == null ) { return; } isCluster = true; path = namedCluster.processURLsubstitution( path, getMetaStore(), transMeta ); } boolean resolvedInitialFile = false; if ( path != null ) { String fileName = transMeta.environmentSubstitute( path ); if ( fileName != null && !fileName.equals( "" ) ) { try { initialFile = KettleVFS.getInstance( transMeta.getBowl() ).getFileObject( fileName ); resolvedInitialFile = true; } catch ( Exception ex ) { showMessageAndLog( BaseMessages.getString( PKG, "HadoopFileInputDialog.Connection.Error.title" ), BaseMessages.getString( PKG, "HadoopFileInputDialog.Connection.error" ), ex.getMessage() ); return; } File startFile = new File( System.getProperty( "user.home" ) ); defaultInitialFile = KettleVFS.getInstance( transMeta.getBowl() ) .getFileObject( startFile.getAbsolutePath() ); rootFile = initialFile.getFileSystem().getRoot(); } else { defaultInitialFile = KettleVFS.getInstance( transMeta.getBowl() ) .getFileObject( Spoon.getInstance().getLastFileOpened() ); } } if ( rootFile == null ) { if ( defaultInitialFile == null ) { return; } rootFile = defaultInitialFile.getFileSystem().getRoot(); initialFile = defaultInitialFile; } VfsFileChooserDialog fileChooserDialog = Spoon.getInstance().getVfsFileChooserDialog( rootFile, initialFile ); fileChooserDialog.defaultInitialFile = defaultInitialFile; NamedClusterWidgetImpl namedClusterWidget = null; FileObject selectedFile = null; if ( clusterName.equals( LOCAL_ENVIRONMENT ) ) { selectedFile = fileChooserDialog.open( shell, new String[] { "file" }, "file", true, path, fileFilters, fileFilterNames, false, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE_OR_DIRECTORY, false, false ); } else if ( clusterName.equals( S3_ENVIRONMENT ) ) { selectedFile = fileChooserDialog.open( shell, new String[] { Schemes.S3_SCHEME }, Schemes.S3_SCHEME, true, path, fileFilters, fileFilterNames, false, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE_OR_DIRECTORY, false, true ); } else { NamedCluster namedCluster = namedClusterService.getNamedClusterByName( clusterName, getMetaStore() ); if ( namedCluster != null ) { if ( namedCluster.isMapr() ) { selectedFile = fileChooserDialog.open( shell, new String[] { Schemes.MAPRFS_SCHEME }, Schemes.MAPRFS_SCHEME, true, path, fileFilters, fileFilterNames, true, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE_OR_DIRECTORY, false, false ); } else { List customPanels = fileChooserDialog.getCustomVfsUiPanels(); for ( CustomVfsUiPanel panel : customPanels ) { if ( panel instanceof HadoopVfsFileChooserDialog ) { HadoopVfsFileChooserDialog hadoopDialog = ( (HadoopVfsFileChooserDialog) panel ); namedClusterWidget = hadoopDialog.getNamedClusterWidget(); namedClusterWidget.initiate(); hadoopDialog.setNamedCluster( clusterName ); hadoopDialog.initializeConnectionPanel( initialFile ); } } if ( resolvedInitialFile ) { fileChooserDialog.initialFile = initialFile; } selectedFile = fileChooserDialog.open( shell, new String[] { Schemes.HDFS_SCHEME }, Schemes.HDFS_SCHEME, false, path, fileFilters, fileFilterNames, true, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE_OR_DIRECTORY, false, false ); } } } CustomVfsUiPanel currentPanel = fileChooserDialog.getCurrentPanel(); if ( currentPanel instanceof HadoopVfsFileChooserDialog ) { namedClusterWidget = ( (HadoopVfsFileChooserDialog) currentPanel ).getNamedClusterWidget(); } if ( selectedFile != null ) { String url = selectedFile.getURL().toString(); if ( currentPanel != null ) { if ( currentPanel.getVfsSchemeDisplayText().equals( LOCAL_ENVIRONMENT ) ) { wFilenameList.getActiveTableItem() .setText( wFilenameList.getActiveTableColumn() - 1, LOCAL_ENVIRONMENT ); } else if ( currentPanel.getVfsSchemeDisplayText().equals( S3_ENVIRONMENT ) ) { wFilenameList.getActiveTableItem().setText( wFilenameList.getActiveTableColumn() - 1, S3_ENVIRONMENT ); } else if ( isCluster ) { url = input.getUrlPath( url ); wFilenameList.getActiveTableItem().setText( wFilenameList.getActiveTableColumn() - 1, clusterName ); } } wFilenameList.getActiveTableItem().setText( wFilenameList.getActiveTableColumn(), url ); } } catch ( KettleFileException ex ) { log.logError( BaseMessages.getString( PKG, "HadoopFileInputDialog.FileBrowser.KettleFileException" ) ); } catch ( FileSystemException ex ) { log.logError( BaseMessages.getString( PKG, "HadoopFileInputDialog.FileBrowser.FileSystemException" ) ); } } }; } protected void setComboValues( ColumnInfo colInfo ) { try { String[] comboValues = { LOCAL_ENVIRONMENT, STATIC_ENVIRONMENT, S3_ENVIRONMENT }; String[] namedClusters = namedClusterService.listNames( getMetaStore() ).toArray( new String[ 0 ] ); String[] values = (String[]) ArrayUtils.addAll( comboValues, namedClusters ); colInfo.setComboValues( values ); } catch ( MetaStoreException e ) { log.logError( e.getMessage() ); } } private InputStream getInputStream( HadoopFileInputMeta meta, FileInputList textFileList ) throws IOException { FileObject fileObject = textFileList.getFile( 0 ); InputStream fileInputStream = KettleVFS.getInputStream( fileObject ); CompressionProvider provider = CompressionProviderFactory.getInstance().createCompressionProviderInstance( meta.content.fileCompression ); return provider.createInputStream( fileInputStream ); } private InputStreamReader getInputStreamReader( HadoopFileInputMeta meta, InputStream inputStream ) throws IOException { if ( meta.getEncoding() != null && meta.getEncoding().length() > 0 ) { return new InputStreamReader( inputStream, meta.getEncoding() ); } return new InputStreamReader( inputStream ); } private BufferedInputStreamReader getBufferedInputStreamReader( HadoopFileInputMeta meta, InputStream inputStream ) throws IOException { return new BufferedInputStreamReader( getInputStreamReader( meta, inputStream ) ); } private void showMessageAndLog( String title, String message, String messageToLog ) { MessageBox box = new MessageBox( shell ); box.setText( title ); //$NON-NLS-1$ box.setMessage( message ); log.logError( messageToLog ); box.open(); } private String getLine( HadoopFileInputMeta meta, FileInputList textFileList ) throws IOException, KettleFileException { InputStream inputStream = null; BufferedInputStreamReader reader = null; inputStream = getInputStream( meta, textFileList ); reader = getBufferedInputStreamReader( meta, inputStream ); EncodingType encodingType = EncodingType.guessEncodingType( reader.getEncoding() ); StringBuilder lineStringBuilder = new StringBuilder( 256 ); String enclosure = StringUtil.substituteHex( meta.content.enclosure ); String sLine = TextFileInputUtils.getLine( log, reader, encodingType, meta.getFileFormatTypeNr(), lineStringBuilder, enclosure ); inputStream.close(); return sLine; } private class DirectoryBrowserAdapter extends SelectionAdapter { private Text widget; /** * Create a new Directory Browser Adapter that reads/sets the text of {@code widget} to the directory chosen. * * @param widget Text widget linked to the VFS browser */ public DirectoryBrowserAdapter( Text widget ) { this.widget = widget; } @Override public void widgetSelected( SelectionEvent e ) { try { // Get current file FileObject rootFile = null; FileObject initialFile = null; FileObject defaultInitialFile = null; if ( widget.getText() != null ) { String fileName = transMeta.environmentSubstitute( widget.getText() ); if ( fileName != null && !fileName.equals( "" ) ) { initialFile = KettleVFS.getInstance( transMeta.getBowl() ).getFileObject( fileName ); rootFile = initialFile.getFileSystem().getRoot(); } else { defaultInitialFile = KettleVFS.getInstance( transMeta.getBowl() ) .getFileObject( Spoon.getInstance().getLastFileOpened() ); } } defaultInitialFile = KettleVFS.getInstance( transMeta.getBowl() ).getFileObject( "file:///c:/" ); if ( rootFile == null ) { rootFile = defaultInitialFile.getFileSystem().getRoot(); initialFile = defaultInitialFile; } VfsFileChooserDialog fileChooserDialog = Spoon.getInstance().getVfsFileChooserDialog( rootFile, initialFile ); fileChooserDialog.defaultInitialFile = defaultInitialFile; FileObject selectedFile = fileChooserDialog.open( shell, null, Schemes.HDFS_SCHEME, false, null, new String[] { "*.*" }, ALL_FILES_TYPE, VfsFileChooserDialog.VFS_DIALOG_OPEN_DIRECTORY ); if ( selectedFile != null ) { if ( !selectedFile.getType().equals( FileType.FOLDER ) ) { selectedFile = selectedFile.getParent(); } widget.setText( selectedFile.getURL().toString() ); } } catch ( KettleFileException ex ) { log.logError( BaseMessages.getString( PKG, "HadoopFileInputDialog.FileBrowser.KettleFileException" ) ); } catch ( FileSystemException ex ) { log.logError( BaseMessages.getString( PKG, "HadoopFileInputDialog.FileBrowser.FileSystemException" ) ); } } } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/HadoopFileInputMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans; import org.apache.commons.lang.Validate; import org.apache.commons.vfs2.FileName; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.di.core.Const; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.bowl.DefaultBowl; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.fileinput.FileInputList; import org.pentaho.di.core.fileinput.NonAccessibleFileObject; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.steps.fileinput.text.TextFileInputMeta; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; import java.net.URI; import java.net.URISyntaxException; import java.security.InvalidParameterException; import java.util.HashMap; import java.util.Map; import static org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileInputDialog.LOCAL_ENVIRONMENT; import static org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileInputDialog.S3_ENVIRONMENT; import static org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileInputDialog.STATIC_ENVIRONMENT; import static org.pentaho.big.data.kettle.plugins.hdfs.vfs.Schemes.NAMED_CLUSTER_SCHEME; @Step( id = "HadoopFileInputPlugin", image = "HDI.svg", name = "HadoopFileInputPlugin.Name", description = "HadoopFileInputPlugin.Description", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", i18nPackageName = "org.pentaho.di.trans.steps.hadoopfileinput" ) @InjectionSupported( localizationPrefix = "HadoopFileInput.Injection.", groups = { "FILENAME_LINES", "FIELDS", "FILTERS" } ) public class HadoopFileInputMeta extends TextFileInputMeta implements HadoopFileMeta { // is not used. Can we delete it? @SuppressWarnings( "squid:S1068" ) private VariableSpace variableSpace; private Map namedClusterURLMapping = null; public static final String SOURCE_CONFIGURATION_NAME = "source_configuration_name"; public static final String LOCAL_SOURCE_FILE = "LOCAL-SOURCE-FILE-"; public static final String STATIC_SOURCE_FILE = "STATIC-SOURCE-FILE-"; public static final String S3_SOURCE_FILE = "S3-SOURCE-FILE-"; public static final String S3_DEST_FILE = "S3-DEST-FILE-"; private final NamedClusterService namedClusterService; private final HadoopFileSystemLocator hadoopFileSystemLocator; private final boolean fatalErrorOnHdfsNotFound = "Y".equalsIgnoreCase( System.getProperty( Const.KETTLE_FATAL_ERROR_ON_HDFS_NOT_FOUND, Const.KETTLE_FATAL_ERROR_ON_HDFS_NOT_FOUND_DEFAULT ) ); enum EncryptDirection { ENCRYPT, DECRYPT } /** * The environment of the selected file/folder */ @Injection( name = "ENVIRONMENT", group = "FILENAME_LINES" ) public String[] environment = {}; public HadoopFileInputMeta() { this( NamedClusterManager.getInstance(), null ); } public HadoopFileInputMeta( NamedClusterService namedClusterService, HadoopFileSystemLocator hadoopFileSystemLocator ) { this.namedClusterService = namedClusterService; this.hadoopFileSystemLocator = hadoopFileSystemLocator; namedClusterURLMapping = new HashMap<>(); } @Override protected String loadSource( Node filenode, Node filenamenode, int i, IMetaStore metaStore ) { String source_filefolder = XMLHandler.getNodeValue( filenamenode ); Node sourceNode = XMLHandler.getSubNodeByNr( filenode, SOURCE_CONFIGURATION_NAME, i ); String source = XMLHandler.getNodeValue( sourceNode ); try { return source_filefolder == null ? null : loadUrl( encryptDecryptPassword( source_filefolder, EncryptDirection.DECRYPT ), source, metaStore, namedClusterURLMapping ); } catch ( Exception ex ) { // Do nothing } return null; } @Override protected void saveSource( StringBuilder retVal, String source ) { String namedCluster = namedClusterURLMapping.get( source ); retVal.append( " " ) .append( XMLHandler.addTagValue( "name", encryptDecryptPassword( source, EncryptDirection.ENCRYPT ) ) ); retVal.append( " " ).append( XMLHandler.addTagValue( SOURCE_CONFIGURATION_NAME, namedCluster ) ); } // Receiving metaStore because RepositoryProxy.getMetaStore() returns a hard-coded null @Override protected String loadSourceRep( Repository rep, ObjectId id_step, int i, IMetaStore metaStore ) throws KettleException { String source_filefolder = rep.getStepAttributeString( id_step, i, "file_name" ); String ncName = rep.getJobEntryAttributeString( id_step, i, SOURCE_CONFIGURATION_NAME ); return loadUrl( encryptDecryptPassword( source_filefolder, EncryptDirection.DECRYPT ), ncName, metaStore, namedClusterURLMapping ); } @Override protected void saveSourceRep( Repository rep, ObjectId id_transformation, ObjectId id_step, int i, String fileName ) throws KettleException { String namedCluster = namedClusterURLMapping.get( fileName ); rep.saveStepAttribute( id_transformation, id_step, i, "file_name", encryptDecryptPassword( fileName, EncryptDirection.ENCRYPT ) ); rep.saveStepAttribute( id_transformation, id_step, i, SOURCE_CONFIGURATION_NAME, namedCluster ); } public String loadUrl( String url, String ncName, IMetaStore metastore, Map mappings ) { NamedCluster c = namedClusterService.getNamedClusterByName( ncName, metastore ); if ( c != null ) { url = c.processURLsubstitution( url, metastore, new Variables() ); } if ( !Utils.isEmpty( ncName ) && !Utils.isEmpty( url ) && mappings != null ) { mappings.put( url, ncName ); // in addition to the url as-is, add the public uri string version of the url (hidden password) to the map, // since that is the value that the data-lineage analyzer will have access to for cluster lookup try { mappings.put( getFriendlyUri( url ).toString(), ncName ); } catch ( final Exception e ) { // no-op } } return url; } public void setNamedClusterURLMapping( Map mappings ) { this.namedClusterURLMapping = mappings; } public Map getNamedClusterURLMapping() { return this.namedClusterURLMapping; } @Override public String getClusterName( final String url ) { String clusterName = null; try { URI friendlyUri = getFriendlyUri( url ); clusterName = getClusterNameBy( friendlyUri.toString() ); } catch ( final URISyntaxException e ) { // no-op } return clusterName; } private URI getFriendlyUri( String url ) throws URISyntaxException { URI origUri = new URI( url ); return new URI( origUri.getScheme(), null, origUri.getHost(), origUri.getPort(), origUri.getPath(), origUri.getQuery(), origUri.getFragment() ); } public String getClusterNameBy( String url ) { return this.namedClusterURLMapping.get( url ); } public String getUrlPath( String incomingURL ) { String path = null; FileName fileName = getUrlFileName( incomingURL ); if ( fileName != null ) { String root = fileName.getRootURI(); path = incomingURL.substring( root.length() - 1 ); } return path; } public void setVariableSpace( VariableSpace variableSpace ) { this.variableSpace = variableSpace; } public NamedClusterService getNamedClusterService() { return namedClusterService; } @Override public FileInputList getFileInputList( Bowl bowl, VariableSpace space ) { inputFiles.normalizeAllocation( inputFiles.fileName.length ); for ( int i = 0; i < environment.length; i++ ) { if ( inputFiles.fileName[ i ].contains( "://" ) ) { continue; } String sourceNc = environment[ i ]; sourceNc = sourceNc.equals( LOCAL_ENVIRONMENT ) ? HadoopFileInputMeta.LOCAL_SOURCE_FILE + i : sourceNc; sourceNc = sourceNc.equals( STATIC_ENVIRONMENT ) ? HadoopFileInputMeta.STATIC_SOURCE_FILE + i : sourceNc; sourceNc = sourceNc.equals( S3_ENVIRONMENT ) ? HadoopFileInputMeta.S3_SOURCE_FILE + i : sourceNc; String source = inputFiles.fileName[ i ]; if ( !Utils.isEmpty( source ) ) { inputFiles.fileName[ i ] = loadUrl( source, sourceNc, getParentStepMeta().getParentTransMeta().getMetaStore(), null ); } else { inputFiles.fileName[ i ] = ""; } } FileInputList returnList = createFileList( bowl, space ); for ( int i = 0; i < inputFiles.fileName.length; i++ ) { if ( !canAccessHdfs( inputFiles.fileName[ i ], fatalErrorOnHdfsNotFound ) ) { returnList.addNonAccessibleFile( new NonAccessibleFileObject( inputFiles.fileName[ i ] ) ); } } return returnList; } /** * If the KETTLE_FATAL_ERROR_ON_HDFS_NOT_FOUND property is set to Y, return false if we can find a named cluster that should * be used to access the file AND there is no corresponding HDFS file system for that named cluster. * * @param fileName * @return false if the filename should be accessed via a named cluster and HDFS and it cannot and the KETTLE_FATAL_ERROR_ON_HDFS_NOT_FOUND * property is Y */ protected boolean canAccessHdfs( String fileName, boolean checkHdfs ) { if ( checkHdfs ) { try { URI fileUri = new URI( fileName ); NamedCluster c = namedClusterService.getNamedClusterByHost( fileUri.getHost(), getParentStepMeta().getParentTransMeta().getMetaStore() ); if ( null == c && NAMED_CLUSTER_SCHEME.equalsIgnoreCase( fileUri.getScheme() ) ) { c = namedClusterService.getNamedClusterByName( fileUri.getHost(), getParentStepMeta().getParentTransMeta().getMetaStore() ); } if ( null != c && null == hadoopFileSystemLocator.getHadoopFilesystem( c, fileUri ) ) { return false; } } catch ( URISyntaxException | ClusterInitializationException e ) { return false; } } return true; } FileInputList createFileList( VariableSpace space ) { return createFileList( null, space ); } /** * Created for test purposes */ FileInputList createFileList( Bowl bowl, VariableSpace space ) { return FileInputList.createFileList( bowl, space, inputFiles.fileName, inputFiles.fileMask, inputFiles.excludeFileMask, inputFiles.fileRequired, inputFiles.includeSubFolderBoolean() ); } protected String encryptDecryptPassword( String source, EncryptDirection direction ) { Validate.notNull( direction, "'direction' must not be null" ); try { URI uri = new URI( source ); String userInfo = uri.getUserInfo(); if ( userInfo != null ) { String[] userInfoArray = userInfo.split( ":", 2 ); if ( userInfoArray.length < 2 ) { return source; //no password present } String password = userInfoArray[ 1 ]; String processedPassword; switch ( direction ) { case ENCRYPT: processedPassword = Encr.encryptPasswordIfNotUsingVariables( password ); break; case DECRYPT: processedPassword = Encr.decryptPasswordOptionallyEncrypted( password ); break; default: throw new InvalidParameterException( "direction must be 'ENCODE' or 'DECODE'" ); } URI encryptedUri = new URI( uri.getScheme(), userInfoArray[ 0 ] + ":" + processedPassword, uri.getHost(), uri.getPort(), uri.getPath(), uri.getQuery(), uri.getFragment() ); return encryptedUri.toString(); } } catch ( URISyntaxException e ) { return source; // if this is non-parseable as a uri just return the source without changing it. } return source; // Just for the compiler should NEVER hit this code } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/HadoopFileMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.provider.URLFileName; import org.pentaho.di.core.vfs.KettleVFS; /** * Common functionality for a hadoop based {@link org.pentaho.di.trans.steps.file.BaseFileMeta}. */ public interface HadoopFileMeta { default String getUrlHostName( final String incomingURL ) { String hostName = null; final FileName fileName = getUrlFileName( incomingURL ); if ( fileName instanceof URLFileName ) { hostName = ( (URLFileName) fileName ).getHostName(); } return hostName; } default FileName getUrlFileName( final String incomingURL ) { FileName fileName = null; try { final String noVariablesURL = incomingURL.replaceAll( "[${}]", "/" ); fileName = KettleVFS.getInstance().getFileSystemManager().resolveURI( noVariablesURL ); } catch ( FileSystemException e ) { // no-op } return fileName; } String getUrlPath( final String incomingURL ); String getClusterName( final String incomingURL ); } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/HadoopFileOutputDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.Cursor; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.big.data.kettle.plugins.hdfs.vfs.HadoopVfsFileChooserDialog; import org.pentaho.big.data.kettle.plugins.hdfs.vfs.Schemes; import org.pentaho.big.data.plugins.common.ui.NamedClusterWidgetImpl; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.compress.CompressionProviderFactory; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBase; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.textfileoutput.TextFileField; import org.pentaho.di.trans.steps.textfileoutput.TextFileOutputMeta; import org.pentaho.di.ui.core.dialog.EnterSelectionDialog; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.ComboVar; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.trans.step.BaseStepDialog; import org.pentaho.di.ui.trans.step.TableItemInsertListener; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.vfs.ui.CustomVfsUiPanel; import org.pentaho.vfs.ui.VfsFileChooserDialog; import java.io.File; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @PluginDialog( id = "HadoopFileOutputPlugin", image = "HDO.svg", pluginType = PluginDialog.PluginType.STEP, documentationUrl = "pdi-transformation-steps-reference-overview/hadoop-file-output-cp-main-page" ) public class HadoopFileOutputDialog extends BaseStepDialog implements StepDialogInterface { private static Class BASE_PKG = TextFileOutputMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private static Class PKG = HadoopFileOutputMeta.class; private CTabFolder wTabFolder; private FormData fdTabFolder; private CTabItem wFileTab, wContentTab, wFieldsTab; private FormData fdFileComp, fdContentComp, fdFieldsComp; private Label wlFilename; private Button wbFilename; private TextVar wFilename; private FormData fdlFilename, fdbFilename, fdFilename; private Label wlExtension; private TextVar wExtension; private FormData fdlExtension, fdExtension; private Label wlAddStepnr; private Button wAddStepnr; private FormData fdlAddStepnr, fdAddStepnr; private Label wlAddPartnr; private Button wAddPartnr; private FormData fdlAddPartnr, fdAddPartnr; private Label wlAddDate; private Button wAddDate; private FormData fdlAddDate, fdAddDate; private Label wlAddTime; private Button wAddTime; private FormData fdlAddTime, fdAddTime; private Button wbShowFiles; private FormData fdbShowFiles; /* Additional fields */ private Label wlFileNameInField; private Button wFileNameInField; private FormData fdlFileNameInField, fdFileNameInField; private Label wlFileNameField; private ComboVar wFileNameField; private FormData fdlFileNameField, fdFileNameField; /* END */ private Label wlAppend; private Button wAppend; private FormData fdlAppend, fdAppend; private Label wlSeparator; private Button wbSeparator; private TextVar wSeparator; private FormData fdlSeparator, fdbSeparator, fdSeparator; private Label wlEnclosure; private TextVar wEnclosure; private FormData fdlEnclosure, fdEnclosure; private Label wlEndedLine; private Text wEndedLine; private FormData fdlEndedLine, fdEndedLine; private Label wlEnclForced; private Button wEnclForced; private FormData fdlEnclForced, fdEnclForced; private Label wlHeader; private Button wHeader; private FormData fdlHeader, fdHeader; private Label wlFooter; private Button wFooter; private FormData fdlFooter, fdFooter; private Label wlFormat; private CCombo wFormat; private FormData fdlFormat, fdFormat; private Label wlCompression; private CCombo wCompression; private FormData fdlCompression, fdCompression; private Label wlEncoding; private CCombo wEncoding; private FormData fdlEncoding, fdEncoding; private Label wlPad; private Button wPad; private FormData fdlPad, fdPad; private Label wlFastDump; private Button wFastDump; private FormData fdlFastDump, fdFastDump; private Label wlSplitEvery; private Text wSplitEvery; private FormData fdlSplitEvery, fdSplitEvery; private TableView wFields; private FormData fdFields; private HadoopFileOutputMeta input; private Button wMinWidth; private Listener lsMinWidth; private boolean gotEncodings = false; private Label wlAddToResult; private Button wAddToResult; private FormData fdlAddToResult, fdAddToResult; private Label wlDoNotOpenNewFileInit; private Button wDoNotOpenNewFileInit; private FormData fdlDoNotOpenNewFileInit, fdDoNotOpenNewFileInit; private Label wlDateTimeFormat; private CCombo wDateTimeFormat; private FormData fdlDateTimeFormat, fdDateTimeFormat; private Label wlSpecifyFormat; private Button wSpecifyFormat; private FormData fdlSpecifyFormat, fdSpecifyFormat; private Label wlCreateParentFolder; private Button wCreateParentFolder; private FormData fdlCreateParentFolder, fdCreateParentFolder; private ColumnInfo[] colinf; private NamedClusterWidgetImpl namedClusterWidget; private Map inputFields; private boolean gotPreviousFields = false; private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; public HadoopFileOutputDialog( Shell parent, Object in, TransMeta transMeta, String sname ) { super( parent, (BaseStepMeta) in, transMeta, sname ); input = (HadoopFileOutputMeta) in; namedClusterService = input.getNamedClusterService(); runtimeTestActionService = input.getRuntimeTestActionService(); runtimeTester = input.getRuntimeTester(); inputFields = new HashMap(); } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN ); props.setLook( shell ); setShellImage( shell, input ); ModifyListener lsMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { input.setChanged(); } }; changed = input.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( BaseMessages.getString( PKG, "HadoopFileOutputDialog.DialogTitle" ) ); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line wlStepname = new Label( shell, SWT.RIGHT ); wlStepname.setText( BaseMessages.getString( BASE_PKG, "System.Label.StepName" ) ); props.setLook( wlStepname ); fdlStepname = new FormData(); fdlStepname.left = new FormAttachment( 0, 0 ); fdlStepname.top = new FormAttachment( 0, margin ); fdlStepname.right = new FormAttachment( middle, -margin ); wlStepname.setLayoutData( fdlStepname ); wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wStepname.setText( stepname ); props.setLook( wStepname ); wStepname.addModifyListener( lsMod ); fdStepname = new FormData(); fdStepname.left = new FormAttachment( middle, 0 ); fdStepname.top = new FormAttachment( 0, margin ); fdStepname.right = new FormAttachment( 100, 0 ); wStepname.setLayoutData( fdStepname ); wTabFolder = new CTabFolder( shell, SWT.BORDER ); props.setLook( wTabFolder, Props.WIDGET_STYLE_TAB ); wTabFolder.setSimple( false ); // //////////////////////// // START OF FILE TAB/// // / wFileTab = new CTabItem( wTabFolder, SWT.NONE ); wFileTab.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.FileTab.TabTitle" ) ); Composite wFileComp = new Composite( wTabFolder, SWT.NONE ); props.setLook( wFileComp ); FormLayout fileLayout = new FormLayout(); fileLayout.marginWidth = 3; fileLayout.marginHeight = 3; wFileComp.setLayout( fileLayout ); namedClusterWidget = new NamedClusterWidgetImpl( wFileComp, true, namedClusterService, runtimeTestActionService, runtimeTester, false ); namedClusterWidget.initiate(); props.setLook( namedClusterWidget ); FormData fd = new FormData(); fd.right = new FormAttachment( 100, 0 ); fd.top = new FormAttachment( 0, 0 ); fd.left = new FormAttachment( 0, 235 ); namedClusterWidget.setLayoutData( fd ); namedClusterWidget.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent evt ) { String ncName = ( (Combo) evt.getSource() ).getText(); NamedCluster nc = namedClusterService.getNamedClusterByName( ncName, getMetaStore() ); if ( nc != null ) { HadoopFileOutputMeta meta = (HadoopFileOutputMeta) input; meta.setSourceConfigurationName( nc.getName() ); } } } ); // Filename line wlFilename = new Label( wFileComp, SWT.RIGHT ); wlFilename.setText( BaseMessages.getString( PKG, "HadoopFileOutputDialog.Filename.Label" ) ); props.setLook( wlFilename ); fdlFilename = new FormData(); fdlFilename.left = new FormAttachment( 0, 0 ); fdlFilename.top = new FormAttachment( namedClusterWidget, margin ); fdlFilename.right = new FormAttachment( middle, -margin ); wlFilename.setLayoutData( fdlFilename ); wbFilename = new Button( wFileComp, SWT.PUSH | SWT.CENTER ); props.setLook( wbFilename ); wbFilename.setText( BaseMessages.getString( BASE_PKG, "System.Button.Browse" ) ); fdbFilename = new FormData(); fdbFilename.right = new FormAttachment( 100, 0 ); fdbFilename.top = new FormAttachment( namedClusterWidget, 0 ); wbFilename.setLayoutData( fdbFilename ); wFilename = new TextVar( transMeta, wFileComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wFilename ); wFilename.addModifyListener( lsMod ); fdFilename = new FormData(); fdFilename.left = new FormAttachment( middle, 0 ); fdFilename.top = new FormAttachment( namedClusterWidget, margin ); fdFilename.right = new FormAttachment( wbFilename, -margin ); wFilename.setLayoutData( fdFilename ); // Create Parent Folder wlCreateParentFolder = new Label( wFileComp, SWT.RIGHT ); wlCreateParentFolder.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.CreateParentFolder.Label" ) ); props.setLook( wlCreateParentFolder ); fdlCreateParentFolder = new FormData(); fdlCreateParentFolder.left = new FormAttachment( 0, 0 ); fdlCreateParentFolder.top = new FormAttachment( wFilename, margin ); fdlCreateParentFolder.right = new FormAttachment( middle, -margin ); wlCreateParentFolder.setLayoutData( fdlCreateParentFolder ); wCreateParentFolder = new Button( wFileComp, SWT.CHECK ); wCreateParentFolder.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.CreateParentFolder.Tooltip" ) ); props.setLook( wCreateParentFolder ); fdCreateParentFolder = new FormData(); fdCreateParentFolder.left = new FormAttachment( middle, 0 ); fdCreateParentFolder.top = new FormAttachment( wFilename, margin ); fdCreateParentFolder.right = new FormAttachment( 100, 0 ); wCreateParentFolder.setLayoutData( fdCreateParentFolder ); wCreateParentFolder.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); // Open new File at Init wlDoNotOpenNewFileInit = new Label( wFileComp, SWT.RIGHT ); wlDoNotOpenNewFileInit.setText( BaseMessages .getString( BASE_PKG, "TextFileOutputDialog.DoNotOpenNewFileInit.Label" ) ); props.setLook( wlDoNotOpenNewFileInit ); fdlDoNotOpenNewFileInit = new FormData(); fdlDoNotOpenNewFileInit.left = new FormAttachment( 0, 0 ); fdlDoNotOpenNewFileInit.top = new FormAttachment( wCreateParentFolder, margin ); fdlDoNotOpenNewFileInit.right = new FormAttachment( middle, -margin ); wlDoNotOpenNewFileInit.setLayoutData( fdlDoNotOpenNewFileInit ); wDoNotOpenNewFileInit = new Button( wFileComp, SWT.CHECK ); wDoNotOpenNewFileInit.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.DoNotOpenNewFileInit.Tooltip" ) ); props.setLook( wDoNotOpenNewFileInit ); fdDoNotOpenNewFileInit = new FormData(); fdDoNotOpenNewFileInit.left = new FormAttachment( middle, 0 ); fdDoNotOpenNewFileInit.top = new FormAttachment( wCreateParentFolder, margin ); fdDoNotOpenNewFileInit.right = new FormAttachment( 100, 0 ); wDoNotOpenNewFileInit.setLayoutData( fdDoNotOpenNewFileInit ); wDoNotOpenNewFileInit.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); /* next Lines */ // FileNameInField line wlFileNameInField = new Label( wFileComp, SWT.RIGHT ); wlFileNameInField.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.FileNameInField.Label" ) ); props.setLook( wlFileNameInField ); fdlFileNameInField = new FormData(); fdlFileNameInField.left = new FormAttachment( 0, 0 ); fdlFileNameInField.top = new FormAttachment( wDoNotOpenNewFileInit, margin ); fdlFileNameInField.right = new FormAttachment( middle, -margin ); wlFileNameInField.setLayoutData( fdlFileNameInField ); wFileNameInField = new Button( wFileComp, SWT.CHECK ); props.setLook( wFileNameInField ); fdFileNameInField = new FormData(); fdFileNameInField.left = new FormAttachment( middle, 0 ); fdFileNameInField.top = new FormAttachment( wDoNotOpenNewFileInit, margin ); fdFileNameInField.right = new FormAttachment( 100, 0 ); wFileNameInField.setLayoutData( fdFileNameInField ); wFileNameInField.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); activeFileNameField(); } } ); // FileNameField Line wlFileNameField = new Label( wFileComp, SWT.RIGHT ); wlFileNameField.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.FileNameField.Label" ) ); //$NON-NLS-1$ props.setLook( wlFileNameField ); fdlFileNameField = new FormData(); fdlFileNameField.left = new FormAttachment( 0, 0 ); fdlFileNameField.right = new FormAttachment( middle, -margin ); fdlFileNameField.top = new FormAttachment( wFileNameInField, margin ); wlFileNameField.setLayoutData( fdlFileNameField ); wFileNameField = new ComboVar( transMeta, wFileComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wFileNameField ); wFileNameField.addModifyListener( lsMod ); fdFileNameField = new FormData(); fdFileNameField.left = new FormAttachment( middle, 0 ); fdFileNameField.top = new FormAttachment( wFileNameInField, margin ); fdFileNameField.right = new FormAttachment( 100, 0 ); wFileNameField.setLayoutData( fdFileNameField ); wFileNameField.setEnabled( false ); wFileNameField.addFocusListener( new FocusListener() { public void focusLost( org.eclipse.swt.events.FocusEvent e ) { } public void focusGained( org.eclipse.swt.events.FocusEvent e ) { Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT ); shell.setCursor( busy ); getFields(); shell.setCursor( null ); busy.dispose(); } } ); /* End */ // Extension line wlExtension = new Label( wFileComp, SWT.RIGHT ); wlExtension.setText( BaseMessages.getString( BASE_PKG, "System.Label.Extension" ) ); props.setLook( wlExtension ); fdlExtension = new FormData(); fdlExtension.left = new FormAttachment( 0, 0 ); fdlExtension.top = new FormAttachment( wFileNameField, margin ); fdlExtension.right = new FormAttachment( middle, -margin ); wlExtension.setLayoutData( fdlExtension ); wExtension = new TextVar( transMeta, wFileComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wExtension.setText( "" ); props.setLook( wExtension ); wExtension.addModifyListener( lsMod ); fdExtension = new FormData(); fdExtension.left = new FormAttachment( middle, 0 ); fdExtension.top = new FormAttachment( wFileNameField, margin ); fdExtension.right = new FormAttachment( 100, 0 ); wExtension.setLayoutData( fdExtension ); // Create multi-part file? wlAddStepnr = new Label( wFileComp, SWT.RIGHT ); wlAddStepnr.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.AddStepnr.Label" ) ); props.setLook( wlAddStepnr ); fdlAddStepnr = new FormData(); fdlAddStepnr.left = new FormAttachment( 0, 0 ); fdlAddStepnr.top = new FormAttachment( wExtension, margin ); fdlAddStepnr.right = new FormAttachment( middle, -margin ); wlAddStepnr.setLayoutData( fdlAddStepnr ); wAddStepnr = new Button( wFileComp, SWT.CHECK ); props.setLook( wAddStepnr ); fdAddStepnr = new FormData(); fdAddStepnr.left = new FormAttachment( middle, 0 ); fdAddStepnr.top = new FormAttachment( wExtension, margin ); fdAddStepnr.right = new FormAttachment( 100, 0 ); wAddStepnr.setLayoutData( fdAddStepnr ); wAddStepnr.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); // Create multi-part file? wlAddPartnr = new Label( wFileComp, SWT.RIGHT ); wlAddPartnr.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.AddPartnr.Label" ) ); props.setLook( wlAddPartnr ); fdlAddPartnr = new FormData(); fdlAddPartnr.left = new FormAttachment( 0, 0 ); fdlAddPartnr.top = new FormAttachment( wAddStepnr, margin ); fdlAddPartnr.right = new FormAttachment( middle, -margin ); wlAddPartnr.setLayoutData( fdlAddPartnr ); wAddPartnr = new Button( wFileComp, SWT.CHECK ); props.setLook( wAddPartnr ); fdAddPartnr = new FormData(); fdAddPartnr.left = new FormAttachment( middle, 0 ); fdAddPartnr.top = new FormAttachment( wAddStepnr, margin ); fdAddPartnr.right = new FormAttachment( 100, 0 ); wAddPartnr.setLayoutData( fdAddPartnr ); wAddPartnr.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); // Create multi-part file? wlAddDate = new Label( wFileComp, SWT.RIGHT ); wlAddDate.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.AddDate.Label" ) ); props.setLook( wlAddDate ); fdlAddDate = new FormData(); fdlAddDate.left = new FormAttachment( 0, 0 ); fdlAddDate.top = new FormAttachment( wAddPartnr, margin ); fdlAddDate.right = new FormAttachment( middle, -margin ); wlAddDate.setLayoutData( fdlAddDate ); wAddDate = new Button( wFileComp, SWT.CHECK ); props.setLook( wAddDate ); fdAddDate = new FormData(); fdAddDate.left = new FormAttachment( middle, 0 ); fdAddDate.top = new FormAttachment( wAddPartnr, margin ); fdAddDate.right = new FormAttachment( 100, 0 ); wAddDate.setLayoutData( fdAddDate ); wAddDate.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); // System.out.println("wAddDate.getSelection()="+wAddDate.getSelection()); } } ); // Create multi-part file? wlAddTime = new Label( wFileComp, SWT.RIGHT ); wlAddTime.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.AddTime.Label" ) ); props.setLook( wlAddTime ); fdlAddTime = new FormData(); fdlAddTime.left = new FormAttachment( 0, 0 ); fdlAddTime.top = new FormAttachment( wAddDate, margin ); fdlAddTime.right = new FormAttachment( middle, -margin ); wlAddTime.setLayoutData( fdlAddTime ); wAddTime = new Button( wFileComp, SWT.CHECK ); props.setLook( wAddTime ); fdAddTime = new FormData(); fdAddTime.left = new FormAttachment( middle, 0 ); fdAddTime.top = new FormAttachment( wAddDate, margin ); fdAddTime.right = new FormAttachment( 100, 0 ); wAddTime.setLayoutData( fdAddTime ); wAddTime.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); // Specify date time format? wlSpecifyFormat = new Label( wFileComp, SWT.RIGHT ); wlSpecifyFormat.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.SpecifyFormat.Label" ) ); props.setLook( wlSpecifyFormat ); fdlSpecifyFormat = new FormData(); fdlSpecifyFormat.left = new FormAttachment( 0, 0 ); fdlSpecifyFormat.top = new FormAttachment( wAddTime, margin ); fdlSpecifyFormat.right = new FormAttachment( middle, -margin ); wlSpecifyFormat.setLayoutData( fdlSpecifyFormat ); wSpecifyFormat = new Button( wFileComp, SWT.CHECK ); props.setLook( wSpecifyFormat ); wSpecifyFormat.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.SpecifyFormat.Tooltip" ) ); fdSpecifyFormat = new FormData(); fdSpecifyFormat.left = new FormAttachment( middle, 0 ); fdSpecifyFormat.top = new FormAttachment( wAddTime, margin ); fdSpecifyFormat.right = new FormAttachment( 100, 0 ); wSpecifyFormat.setLayoutData( fdSpecifyFormat ); wSpecifyFormat.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); setDateTimeFormat(); } } ); // DateTimeFormat wlDateTimeFormat = new Label( wFileComp, SWT.RIGHT ); wlDateTimeFormat.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.DateTimeFormat.Label" ) ); props.setLook( wlDateTimeFormat ); fdlDateTimeFormat = new FormData(); fdlDateTimeFormat.left = new FormAttachment( 0, 0 ); fdlDateTimeFormat.top = new FormAttachment( wSpecifyFormat, margin ); fdlDateTimeFormat.right = new FormAttachment( middle, -margin ); wlDateTimeFormat.setLayoutData( fdlDateTimeFormat ); wDateTimeFormat = new CCombo( wFileComp, SWT.BORDER | SWT.READ_ONLY ); wDateTimeFormat.setEditable( true ); props.setLook( wDateTimeFormat ); wDateTimeFormat.addModifyListener( lsMod ); fdDateTimeFormat = new FormData(); fdDateTimeFormat.left = new FormAttachment( middle, 0 ); fdDateTimeFormat.top = new FormAttachment( wSpecifyFormat, margin ); fdDateTimeFormat.right = new FormAttachment( 100, 0 ); wDateTimeFormat.setLayoutData( fdDateTimeFormat ); String[] dates = Const.getDateFormats(); fillWithSupportedDateFormats( wDateTimeFormat, dates ); wbShowFiles = new Button( wFileComp, SWT.PUSH | SWT.CENTER ); props.setLook( wbShowFiles ); wbShowFiles.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.ShowFiles.Button" ) ); fdbShowFiles = new FormData(); fdbShowFiles.left = new FormAttachment( middle, 0 ); fdbShowFiles.top = new FormAttachment( wDateTimeFormat, margin * 2 ); wbShowFiles.setLayoutData( fdbShowFiles ); wbShowFiles.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { HadoopFileOutputMeta tfoi = new HadoopFileOutputMeta( namedClusterService, runtimeTestActionService, runtimeTester ); getInfo( tfoi ); String[] files = tfoi.getFiles( transMeta ); if ( files != null && files.length > 0 ) { EnterSelectionDialog esd = new EnterSelectionDialog( shell, files, BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.SelectOutputFiles.DialogTitle" ), BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.SelectOutputFiles.DialogMessage" ) ); esd.setViewOnly(); esd.open(); } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.NoFilesFound.DialogMessage" ) ); mb.setText( BaseMessages.getString( BASE_PKG, "System.Dialog.Error.Title" ) ); mb.open(); } } } ); // Add File to the result files name wlAddToResult = new Label( wFileComp, SWT.RIGHT ); wlAddToResult.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.AddFileToResult.Label" ) ); props.setLook( wlAddToResult ); fdlAddToResult = new FormData(); fdlAddToResult.left = new FormAttachment( 0, 0 ); fdlAddToResult.top = new FormAttachment( wbShowFiles, 2 * margin ); fdlAddToResult.right = new FormAttachment( middle, -margin ); wlAddToResult.setLayoutData( fdlAddToResult ); wAddToResult = new Button( wFileComp, SWT.CHECK ); wAddToResult.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.AddFileToResult.Tooltip" ) ); props.setLook( wAddToResult ); fdAddToResult = new FormData(); fdAddToResult.left = new FormAttachment( middle, 0 ); fdAddToResult.top = new FormAttachment( wbShowFiles, 2 * margin ); fdAddToResult.right = new FormAttachment( 100, 0 ); wAddToResult.setLayoutData( fdAddToResult ); SelectionAdapter lsSelR = new SelectionAdapter() { public void widgetSelected( SelectionEvent arg0 ) { input.setChanged(); } }; wAddToResult.addSelectionListener( lsSelR ); fdFileComp = new FormData(); fdFileComp.left = new FormAttachment( 0, 0 ); fdFileComp.top = new FormAttachment( 0, 0 ); fdFileComp.right = new FormAttachment( 100, 0 ); fdFileComp.bottom = new FormAttachment( 100, 0 ); wFileComp.setLayoutData( fdFileComp ); wFileComp.layout(); wFileTab.setControl( wFileComp ); // /////////////////////////////////////////////////////////// // / END OF FILE TAB // /////////////////////////////////////////////////////////// // //////////////////////// // START OF CONTENT TAB/// // / wContentTab = new CTabItem( wTabFolder, SWT.NONE ); wContentTab.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.ContentTab.TabTitle" ) ); FormLayout contentLayout = new FormLayout(); contentLayout.marginWidth = 3; contentLayout.marginHeight = 3; Composite wContentComp = new Composite( wTabFolder, SWT.NONE ); props.setLook( wContentComp ); wContentComp.setLayout( contentLayout ); // Append to end of file? wlAppend = new Label( wContentComp, SWT.RIGHT ); wlAppend.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Append.Label" ) ); props.setLook( wlAppend ); fdlAppend = new FormData(); fdlAppend.left = new FormAttachment( 0, 0 ); fdlAppend.top = new FormAttachment( 0, 0 ); fdlAppend.right = new FormAttachment( middle, -margin ); wlAppend.setLayoutData( fdlAppend ); wAppend = new Button( wContentComp, SWT.CHECK ); props.setLook( wAppend ); fdAppend = new FormData(); fdAppend.left = new FormAttachment( middle, 0 ); fdAppend.top = new FormAttachment( 0, 0 ); fdAppend.right = new FormAttachment( 100, 0 ); wAppend.setLayoutData( fdAppend ); wAppend.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); wlSeparator = new Label( wContentComp, SWT.RIGHT ); wlSeparator.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Separator.Label" ) ); props.setLook( wlSeparator ); fdlSeparator = new FormData(); fdlSeparator.left = new FormAttachment( 0, 0 ); fdlSeparator.top = new FormAttachment( wAppend, margin ); fdlSeparator.right = new FormAttachment( middle, -margin ); wlSeparator.setLayoutData( fdlSeparator ); wbSeparator = new Button( wContentComp, SWT.PUSH | SWT.CENTER ); props.setLook( wbSeparator ); wbSeparator.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Separator.Button" ) ); fdbSeparator = new FormData(); fdbSeparator.right = new FormAttachment( 100, 0 ); fdbSeparator.top = new FormAttachment( wAppend, 0 ); wbSeparator.setLayoutData( fdbSeparator ); wbSeparator.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent se ) { // wSeparator.insert("\t"); wSeparator.getTextWidget().insert( "\t" ); } } ); wSeparator = new TextVar( transMeta, wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wSeparator ); wSeparator.addModifyListener( lsMod ); fdSeparator = new FormData(); fdSeparator.left = new FormAttachment( middle, 0 ); fdSeparator.top = new FormAttachment( wAppend, margin ); fdSeparator.right = new FormAttachment( wbSeparator, -margin ); wSeparator.setLayoutData( fdSeparator ); // Enclosure line... wlEnclosure = new Label( wContentComp, SWT.RIGHT ); wlEnclosure.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Enclosure.Label" ) ); props.setLook( wlEnclosure ); fdlEnclosure = new FormData(); fdlEnclosure.left = new FormAttachment( 0, 0 ); fdlEnclosure.top = new FormAttachment( wSeparator, margin ); fdlEnclosure.right = new FormAttachment( middle, -margin ); wlEnclosure.setLayoutData( fdlEnclosure ); wEnclosure = new TextVar( transMeta, wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wEnclosure ); wEnclosure.addModifyListener( lsMod ); fdEnclosure = new FormData(); fdEnclosure.left = new FormAttachment( middle, 0 ); fdEnclosure.top = new FormAttachment( wSeparator, margin ); fdEnclosure.right = new FormAttachment( 100, 0 ); wEnclosure.setLayoutData( fdEnclosure ); wlEnclForced = new Label( wContentComp, SWT.RIGHT ); wlEnclForced.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.EnclForced.Label" ) ); props.setLook( wlEnclForced ); fdlEnclForced = new FormData(); fdlEnclForced.left = new FormAttachment( 0, 0 ); fdlEnclForced.top = new FormAttachment( wEnclosure, margin ); fdlEnclForced.right = new FormAttachment( middle, -margin ); wlEnclForced.setLayoutData( fdlEnclForced ); wEnclForced = new Button( wContentComp, SWT.CHECK ); props.setLook( wEnclForced ); fdEnclForced = new FormData(); fdEnclForced.left = new FormAttachment( middle, 0 ); fdEnclForced.top = new FormAttachment( wEnclosure, margin ); fdEnclForced.right = new FormAttachment( 100, 0 ); wEnclForced.setLayoutData( fdEnclForced ); wEnclForced.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); wlHeader = new Label( wContentComp, SWT.RIGHT ); wlHeader.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Header.Label" ) ); props.setLook( wlHeader ); fdlHeader = new FormData(); fdlHeader.left = new FormAttachment( 0, 0 ); fdlHeader.top = new FormAttachment( wEnclForced, margin ); fdlHeader.right = new FormAttachment( middle, -margin ); wlHeader.setLayoutData( fdlHeader ); wHeader = new Button( wContentComp, SWT.CHECK ); props.setLook( wHeader ); fdHeader = new FormData(); fdHeader.left = new FormAttachment( middle, 0 ); fdHeader.top = new FormAttachment( wEnclForced, margin ); fdHeader.right = new FormAttachment( 100, 0 ); wHeader.setLayoutData( fdHeader ); wHeader.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); wlFooter = new Label( wContentComp, SWT.RIGHT ); wlFooter.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Footer.Label" ) ); props.setLook( wlFooter ); fdlFooter = new FormData(); fdlFooter.left = new FormAttachment( 0, 0 ); fdlFooter.top = new FormAttachment( wHeader, margin ); fdlFooter.right = new FormAttachment( middle, -margin ); wlFooter.setLayoutData( fdlFooter ); wFooter = new Button( wContentComp, SWT.CHECK ); props.setLook( wFooter ); fdFooter = new FormData(); fdFooter.left = new FormAttachment( middle, 0 ); fdFooter.top = new FormAttachment( wHeader, margin ); fdFooter.right = new FormAttachment( 100, 0 ); wFooter.setLayoutData( fdFooter ); wFooter.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); wlFormat = new Label( wContentComp, SWT.RIGHT ); wlFormat.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Format.Label" ) ); props.setLook( wlFormat ); fdlFormat = new FormData(); fdlFormat.left = new FormAttachment( 0, 0 ); fdlFormat.top = new FormAttachment( wFooter, margin ); fdlFormat.right = new FormAttachment( middle, -margin ); wlFormat.setLayoutData( fdlFormat ); wFormat = new CCombo( wContentComp, SWT.BORDER | SWT.READ_ONLY ); wFormat.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Format.Label" ) ); props.setLook( wFormat ); for ( int i = 0; i < HadoopFileOutputMeta.formatMapperLineTerminator.length; i++ ) { wFormat.add( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Format." + HadoopFileOutputMeta.formatMapperLineTerminator[i] ) ); } wFormat.select( 0 ); wFormat.addModifyListener( lsMod ); fdFormat = new FormData(); fdFormat.left = new FormAttachment( middle, 0 ); fdFormat.top = new FormAttachment( wFooter, margin ); fdFormat.right = new FormAttachment( 100, 0 ); wFormat.setLayoutData( fdFormat ); wlCompression = new Label( wContentComp, SWT.RIGHT ); wlCompression.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Compression.Label" ) ); props.setLook( wlCompression ); fdlCompression = new FormData(); fdlCompression.left = new FormAttachment( 0, 0 ); fdlCompression.top = new FormAttachment( wFormat, margin ); fdlCompression.right = new FormAttachment( middle, -margin ); wlCompression.setLayoutData( fdlCompression ); wCompression = new CCombo( wContentComp, SWT.BORDER | SWT.READ_ONLY ); wCompression.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Compression.Label" ) ); props.setLook( wCompression ); wCompression.setItems( CompressionProviderFactory.getInstance().getCompressionProviderNames() ); wCompression.addModifyListener( lsMod ); fdCompression = new FormData(); fdCompression.left = new FormAttachment( middle, 0 ); fdCompression.top = new FormAttachment( wFormat, margin ); fdCompression.right = new FormAttachment( 100, 0 ); wCompression.setLayoutData( fdCompression ); wlEncoding = new Label( wContentComp, SWT.RIGHT ); wlEncoding.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Encoding.Label" ) ); props.setLook( wlEncoding ); fdlEncoding = new FormData(); fdlEncoding.left = new FormAttachment( 0, 0 ); fdlEncoding.top = new FormAttachment( wCompression, margin ); fdlEncoding.right = new FormAttachment( middle, -margin ); wlEncoding.setLayoutData( fdlEncoding ); wEncoding = new CCombo( wContentComp, SWT.BORDER | SWT.READ_ONLY ); wEncoding.setEditable( true ); props.setLook( wEncoding ); wEncoding.addModifyListener( lsMod ); fdEncoding = new FormData(); fdEncoding.left = new FormAttachment( middle, 0 ); fdEncoding.top = new FormAttachment( wCompression, margin ); fdEncoding.right = new FormAttachment( 100, 0 ); wEncoding.setLayoutData( fdEncoding ); wEncoding.addFocusListener( new FocusListener() { public void focusLost( org.eclipse.swt.events.FocusEvent e ) { } public void focusGained( org.eclipse.swt.events.FocusEvent e ) { Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT ); shell.setCursor( busy ); setEncodings(); shell.setCursor( null ); busy.dispose(); } } ); wlPad = new Label( wContentComp, SWT.RIGHT ); wlPad.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.Pad.Label" ) ); props.setLook( wlPad ); fdlPad = new FormData(); fdlPad.left = new FormAttachment( 0, 0 ); fdlPad.top = new FormAttachment( wEncoding, margin ); fdlPad.right = new FormAttachment( middle, -margin ); wlPad.setLayoutData( fdlPad ); wPad = new Button( wContentComp, SWT.CHECK ); props.setLook( wPad ); fdPad = new FormData(); fdPad.left = new FormAttachment( middle, 0 ); fdPad.top = new FormAttachment( wEncoding, margin ); fdPad.right = new FormAttachment( 100, 0 ); wPad.setLayoutData( fdPad ); wPad.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); wlFastDump = new Label( wContentComp, SWT.RIGHT ); wlFastDump.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.FastDump.Label" ) ); props.setLook( wlFastDump ); fdlFastDump = new FormData(); fdlFastDump.left = new FormAttachment( 0, 0 ); fdlFastDump.top = new FormAttachment( wPad, margin ); fdlFastDump.right = new FormAttachment( middle, -margin ); wlFastDump.setLayoutData( fdlFastDump ); wFastDump = new Button( wContentComp, SWT.CHECK ); props.setLook( wFastDump ); fdFastDump = new FormData(); fdFastDump.left = new FormAttachment( middle, 0 ); fdFastDump.top = new FormAttachment( wPad, margin ); fdFastDump.right = new FormAttachment( 100, 0 ); wFastDump.setLayoutData( fdFastDump ); wFastDump.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { input.setChanged(); } } ); wlSplitEvery = new Label( wContentComp, SWT.RIGHT ); wlSplitEvery.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.SplitEvery.Label" ) ); props.setLook( wlSplitEvery ); fdlSplitEvery = new FormData(); fdlSplitEvery.left = new FormAttachment( 0, 0 ); fdlSplitEvery.top = new FormAttachment( wFastDump, margin ); fdlSplitEvery.right = new FormAttachment( middle, -margin ); wlSplitEvery.setLayoutData( fdlSplitEvery ); wSplitEvery = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wSplitEvery ); wSplitEvery.addModifyListener( lsMod ); fdSplitEvery = new FormData(); fdSplitEvery.left = new FormAttachment( middle, 0 ); fdSplitEvery.top = new FormAttachment( wFastDump, margin ); fdSplitEvery.right = new FormAttachment( 100, 0 ); wSplitEvery.setLayoutData( fdSplitEvery ); // Bruise: wlEndedLine = new Label( wContentComp, SWT.RIGHT ); wlEndedLine.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.EndedLine.Label" ) ); props.setLook( wlEndedLine ); fdlEndedLine = new FormData(); fdlEndedLine.left = new FormAttachment( 0, 0 ); fdlEndedLine.top = new FormAttachment( wSplitEvery, margin ); fdlEndedLine.right = new FormAttachment( middle, -margin ); wlEndedLine.setLayoutData( fdlEndedLine ); wEndedLine = new Text( wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); props.setLook( wEndedLine ); wEndedLine.addModifyListener( lsMod ); fdEndedLine = new FormData(); fdEndedLine.left = new FormAttachment( middle, 0 ); fdEndedLine.top = new FormAttachment( wSplitEvery, margin ); fdEndedLine.right = new FormAttachment( 100, 0 ); wEndedLine.setLayoutData( fdEndedLine ); fdContentComp = new FormData(); fdContentComp.left = new FormAttachment( 0, 0 ); fdContentComp.top = new FormAttachment( 0, 0 ); fdContentComp.right = new FormAttachment( 100, 0 ); fdContentComp.bottom = new FormAttachment( 100, 0 ); wContentComp.setLayoutData( fdContentComp ); wContentComp.layout(); wContentTab.setControl( wContentComp ); // /////////////////////////////////////////////////////////// // / END OF CONTENT TAB // /////////////////////////////////////////////////////////// // Fields tab... // wFieldsTab = new CTabItem( wTabFolder, SWT.NONE ); wFieldsTab.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.FieldsTab.TabTitle" ) ); FormLayout fieldsLayout = new FormLayout(); fieldsLayout.marginWidth = Const.FORM_MARGIN; fieldsLayout.marginHeight = Const.FORM_MARGIN; Composite wFieldsComp = new Composite( wTabFolder, SWT.NONE ); wFieldsComp.setLayout( fieldsLayout ); props.setLook( wFieldsComp ); wGet = new Button( wFieldsComp, SWT.PUSH ); wGet.setText( BaseMessages.getString( BASE_PKG, "System.Button.GetFields" ) ); wGet.setToolTipText( BaseMessages.getString( BASE_PKG, "System.Tooltip.GetFields" ) ); wMinWidth = new Button( wFieldsComp, SWT.PUSH ); wMinWidth.setText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.MinWidth.Button" ) ); wMinWidth.setToolTipText( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.MinWidth.Tooltip" ) ); setButtonPositions( new Button[] { wGet, wMinWidth }, margin, null ); final int FieldsCols = 10; final int FieldsRows = input.getOutputFields().length; // Prepare a list of possible formats... String[] nums = Const.getNumberFormats(); int totsize = dates.length + nums.length; String[] formats = new String[totsize]; for ( int x = 0; x < dates.length; x++ ) { formats[x] = dates[x]; } for ( int x = 0; x < nums.length; x++ ) { formats[dates.length + x] = nums[x]; } colinf = new ColumnInfo[FieldsCols]; colinf[0] = new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.NameColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "" }, false ); colinf[1] = new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.TypeColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, ValueMetaBase.getTypes() ); colinf[2] = new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.FormatColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, formats ); colinf[3] = new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.LengthColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ); colinf[4] = new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.PrecisionColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ); colinf[5] = new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.CurrencyColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ); colinf[6] = new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.DecimalColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ); colinf[7] = new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.GroupColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ); colinf[8] = new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.TrimTypeColumn.Column" ), ColumnInfo.COLUMN_TYPE_CCOMBO, ValueMetaBase.trimTypeDesc, true ); colinf[9] = new ColumnInfo( BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.NullColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT, false ); wFields = new TableView( transMeta, wFieldsComp, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI, colinf, FieldsRows, lsMod, props ); fdFields = new FormData(); fdFields.left = new FormAttachment( 0, 0 ); fdFields.top = new FormAttachment( 0, 0 ); fdFields.right = new FormAttachment( 100, 0 ); fdFields.bottom = new FormAttachment( wGet, -margin ); wFields.setLayoutData( fdFields ); // // Search the fields in the background final Runnable runnable = new Runnable() { public void run() { StepMeta stepMeta = transMeta.findStep( stepname ); if ( stepMeta != null ) { try { RowMetaInterface row = transMeta.getPrevStepFields( stepMeta ); // Remember these fields... for ( int i = 0; i < row.size(); i++ ) { inputFields.put( row.getValueMeta( i ).getName(), Integer.valueOf( i ) ); } setComboBoxes(); } catch ( KettleException e ) { logError( BaseMessages.getString( BASE_PKG, "System.Dialog.GetFieldsFailed.Message" ) ); } } } }; new Thread( runnable ).start(); fdFieldsComp = new FormData(); fdFieldsComp.left = new FormAttachment( 0, 0 ); fdFieldsComp.top = new FormAttachment( 0, 0 ); fdFieldsComp.right = new FormAttachment( 100, 0 ); fdFieldsComp.bottom = new FormAttachment( 100, 0 ); wFieldsComp.setLayoutData( fdFieldsComp ); wFieldsComp.layout(); wFieldsTab.setControl( wFieldsComp ); fdTabFolder = new FormData(); fdTabFolder.left = new FormAttachment( 0, 0 ); fdTabFolder.top = new FormAttachment( wStepname, margin ); fdTabFolder.right = new FormAttachment( 100, 0 ); fdTabFolder.bottom = new FormAttachment( 100, -50 ); wTabFolder.setLayoutData( fdTabFolder ); wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( BASE_PKG, "System.Button.OK" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( BASE_PKG, "System.Button.Cancel" ) ); positionBottomRightButtons( shell, new Button[] { wOK, wCancel }, margin, wTabFolder ); // Add listeners lsOK = new Listener() { public void handleEvent( Event e ) { ok(); } }; lsGet = new Listener() { public void handleEvent( Event e ) { get(); } }; lsMinWidth = new Listener() { public void handleEvent( Event e ) { setMinimalWidth(); } }; lsCancel = new Listener() { public void handleEvent( Event e ) { cancel(); } }; wOK.addListener( SWT.Selection, lsOK ); wGet.addListener( SWT.Selection, lsGet ); wMinWidth.addListener( SWT.Selection, lsMinWidth ); wCancel.addListener( SWT.Selection, lsCancel ); lsDef = new SelectionAdapter() { public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; wStepname.addSelectionListener( lsDef ); wFilename.addSelectionListener( lsDef ); wSeparator.addSelectionListener( lsDef ); // Whenever something changes, set the tooltip to the expanded version: wFilename.addModifyListener( new ModifyListener() { public void modifyText( ModifyEvent e ) { wFilename.setToolTipText( transMeta.environmentSubstitute( wFilename.getText() ) ); } } ); // Listen to the Browse... button wbFilename.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { try { // Setup file type filtering String[] fileFilters = new String[] { "*.txt", "*.csv", "*" }; String[] fileFilterNames = new String[] { BaseMessages.getString( BASE_PKG, "System.FileType.TextFiles" ), BaseMessages.getString( BASE_PKG, "System.FileType.CSVFiles" ), BaseMessages.getString( BASE_PKG, "System.FileType.AllFiles" ) }; NamedCluster namedCluster = namedClusterWidget.getSelectedNamedCluster(); if ( namedCluster == null ) { return; } String path = wFilename.getText(); // Get current file FileObject rootFile = null; FileObject initialFile = null; FileObject defaultInitialFile = null; if ( Utils.isEmpty( path ) ) { path = "/"; } path = namedCluster.processURLsubstitution( path, getMetaStore(), transMeta ); boolean resolvedInitialFile = false; if ( path != null ) { String fileName = transMeta.environmentSubstitute( path ); if ( fileName != null && !fileName.equals( "" ) ) { try { initialFile = KettleVFS.getInstance( transMeta.getBowl() ).getFileObject( fileName ); resolvedInitialFile = true; } catch ( Exception ex ) { showMessageAndLog( BaseMessages.getString( PKG, "HadoopFileOutputDialog.Connection.Error.title" ), BaseMessages.getString( PKG, "HadoopFileOutputDialog.Connection.error" ), ex.getMessage() ); return; } File startFile = new File( System.getProperty( "user.home" ) ); defaultInitialFile = KettleVFS.getInstance( transMeta.getBowl() ) .getFileObject( startFile.getAbsolutePath() ); rootFile = initialFile.getFileSystem().getRoot(); } else { defaultInitialFile = KettleVFS.getInstance( transMeta.getBowl() ) .getFileObject( Spoon.getInstance().getLastFileOpened() ); } } if ( rootFile == null ) { if ( defaultInitialFile == null ) { return; } rootFile = defaultInitialFile.getFileSystem().getRoot(); initialFile = defaultInitialFile; } VfsFileChooserDialog fileChooserDialog = Spoon.getInstance().getVfsFileChooserDialog( rootFile, initialFile ); fileChooserDialog.defaultInitialFile = defaultInitialFile; FileObject selectedFile = null; if ( namedCluster != null ) { if ( namedCluster.isMapr() ) { selectedFile = fileChooserDialog.open( shell, new String[] { Schemes.MAPRFS_SCHEME }, Schemes.MAPRFS_SCHEME, true, path, fileFilters, fileFilterNames, true, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE_OR_DIRECTORY, false, false ); } else { List customPanels = fileChooserDialog.getCustomVfsUiPanels(); String ncName = null; HadoopVfsFileChooserDialog hadoopDialog = null; for ( CustomVfsUiPanel panel : customPanels ) { if ( panel instanceof HadoopVfsFileChooserDialog ) { hadoopDialog = ( (HadoopVfsFileChooserDialog) panel ); NamedClusterWidgetImpl ncWidget = hadoopDialog.getNamedClusterWidget(); ncWidget.initiate(); ncName = null; if ( initialFile != null ) { HadoopFileOutputMeta meta = (HadoopFileOutputMeta) input; ncName = meta.getSourceConfigurationName(); } hadoopDialog.setNamedCluster( ncName ); hadoopDialog.initializeConnectionPanel( initialFile ); } } if ( resolvedInitialFile ) { fileChooserDialog.initialFile = initialFile; } selectedFile = fileChooserDialog.open( shell, new String[] { Schemes.HDFS_SCHEME }, Schemes.HDFS_SCHEME, true, path, fileFilters, fileFilterNames, true, VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE_OR_DIRECTORY, false, false ); } } if ( selectedFile != null ) { String filename = selectedFile.getURL().toString(); String extension = wExtension.getText(); if ( extension != null && filename.endsWith( "." + extension ) ) { // The extension is filled in and matches the end // of the selected file => Strip off the extension. wFilename.setText( getUrlPath( filename.substring( 0, filename.length() - ( extension.length() + 1 ) ) ) ); } else { wFilename.setText( getUrlPath( filename ) ); } } } catch ( KettleFileException ex ) { log.logError( BaseMessages.getString( PKG, "HadoopFileInputDialog.FileBrowser.KettleFileException" ) ); } catch ( FileSystemException ex ) { log.logError( BaseMessages.getString( PKG, "HadoopFileInputDialog.FileBrowser.FileSystemException" ) ); } } } ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { public void shellClosed( ShellEvent e ) { cancel(); } } ); lsResize = new Listener() { public void handleEvent( Event event ) { Point size = shell.getSize(); wFields.setSize( size.x - 10, size.y - 50 ); wFields.table.setSize( size.x - 10, size.y - 50 ); wFields.redraw(); } }; shell.addListener( SWT.Resize, lsResize ); wTabFolder.setSelection( 0 ); // Set the shell size, based upon previous time... setSize(); getData(); activeFileNameField(); enableParentFolder(); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } protected void fillWithSupportedDateFormats( CCombo combo, String[] dates ) { for ( String s : dates ) { // ':' is not supported in filenames by hadoop file system, add other characters if needed to the regex below if ( s.matches( "[^:]+" ) ) { combo.add( s ); } } } private void activeFileNameField() { wlFileNameField.setEnabled( wFileNameInField.getSelection() ); wFileNameField.setEnabled( wFileNameInField.getSelection() ); wlFilename.setEnabled( !wFileNameInField.getSelection() ); wFilename.setEnabled( !wFileNameInField.getSelection() ); if ( wFileNameInField.getSelection() ) { if ( !wDoNotOpenNewFileInit.getSelection() ) { wDoNotOpenNewFileInit.setSelection( true ); } wAddDate.setSelection( false ); wAddTime.setSelection( false ); wSpecifyFormat.setSelection( false ); wAddStepnr.setSelection( false ); wAddPartnr.setSelection( false ); } wlDoNotOpenNewFileInit.setEnabled( !wFileNameInField.getSelection() ); wDoNotOpenNewFileInit.setEnabled( !wFileNameInField.getSelection() ); wlSpecifyFormat.setEnabled( !wFileNameInField.getSelection() ); wSpecifyFormat.setEnabled( !wFileNameInField.getSelection() ); wAddStepnr.setEnabled( !wFileNameInField.getSelection() ); wlAddStepnr.setEnabled( !wFileNameInField.getSelection() ); wAddPartnr.setEnabled( !wFileNameInField.getSelection() ); wlAddPartnr.setEnabled( !wFileNameInField.getSelection() ); if ( wFileNameInField.getSelection() ) { wSplitEvery.setText( "0" ); } wSplitEvery.setEnabled( !wFileNameInField.getSelection() ); wlSplitEvery.setEnabled( !wFileNameInField.getSelection() ); if ( wFileNameInField.getSelection() ) { wEndedLine.setText( "" ); } wEndedLine.setEnabled( !wFileNameInField.getSelection() ); wbShowFiles.setEnabled( !wFileNameInField.getSelection() ); wbFilename.setEnabled( !wFileNameInField.getSelection() ); setDateTimeFormat(); } protected void setComboBoxes() { // Something was changed in the row. // final Map fields = new HashMap(); // Add the currentMeta fields... fields.putAll( inputFields ); Set keySet = fields.keySet(); List entries = new ArrayList( keySet ); String[] fieldNames = (String[]) entries.toArray( new String[entries.size()] ); Const.sortStrings( fieldNames ); colinf[0].setComboValues( fieldNames ); } private void setDateTimeFormat() { if ( wSpecifyFormat.getSelection() ) { wAddDate.setSelection( false ); wAddTime.setSelection( false ); } wDateTimeFormat.setEnabled( wSpecifyFormat.getSelection() && !wFileNameInField.getSelection() ); wlDateTimeFormat.setEnabled( wSpecifyFormat.getSelection() && !wFileNameInField.getSelection() ); wAddDate.setEnabled( !( wFileNameInField.getSelection() || wSpecifyFormat.getSelection() ) ); wlAddDate.setEnabled( !( wSpecifyFormat.getSelection() || wFileNameInField.getSelection() ) ); wAddTime.setEnabled( !( wSpecifyFormat.getSelection() || wFileNameInField.getSelection() ) ); wlAddTime.setEnabled( !( wSpecifyFormat.getSelection() || wFileNameInField.getSelection() ) ); } private void setEncodings() { // Encoding of the text file: if ( !gotEncodings ) { gotEncodings = true; wEncoding.removeAll(); List values = new ArrayList( Charset.availableCharsets().values() ); for ( int i = 0; i < values.size(); i++ ) { Charset charSet = (Charset) values.get( i ); wEncoding.add( charSet.displayName() ); } // Now select the default! String defEncoding = Const.getEnvironmentVariable( "file.encoding", "UTF-8" ); int idx = Const.indexOfString( defEncoding, wEncoding.getItems() ); if ( idx >= 0 ) { wEncoding.select( idx ); } } } private void getFields() { if ( !gotPreviousFields ) { try { String field = wFileNameField.getText(); RowMetaInterface r = transMeta.getPrevStepFields( stepname ); if ( r != null ) { wFileNameField.setItems( r.getFieldNames() ); } if ( field != null ) { wFileNameField.setText( field ); } } catch ( KettleException ke ) { new ErrorDialog( shell, BaseMessages.getString( BASE_PKG, "TextFileOutputDialog.FailedToGetFields.DialogTitle" ), BaseMessages .getString( BASE_PKG, "TextFileOutputDialog.FailedToGetFields.DialogMessage" ), ke ); } gotPreviousFields = true; } } /** * Copy information from the meta-data input to the dialog fields. */ public void getData() { HadoopFileOutputMeta meta = (HadoopFileOutputMeta) input; String ncName = meta.getSourceConfigurationName(); if ( ncName != null ) { namedClusterWidget.setSelectedNamedCluster( ncName ); } if ( input.getFileName() != null ) { String fileName = input.getFileName(); fileName = getUrlPath( fileName ); if ( fileName != null ) { wFilename.setText( fileName ); } } wDoNotOpenNewFileInit.setSelection( input.isDoNotOpenNewFileInit() ); wCreateParentFolder.setSelection( input.isCreateParentFolder() ); if ( input.getExtension() != null ) { wExtension.setText( input.getExtension() ); } if ( input.getSeparator() != null ) { wSeparator.setText( input.getSeparator() ); } if ( input.getEnclosure() != null ) { wEnclosure.setText( input.getEnclosure() ); } if ( input.getFileFormat() != null ) { wFormat.select( 0 ); // default if not found: CR+LF for ( int i = 0; i < HadoopFileOutputMeta.formatMapperLineTerminator.length; i++ ) { if ( input.getFileFormat().equalsIgnoreCase( HadoopFileOutputMeta.formatMapperLineTerminator[i] ) ) { wFormat.select( i ); } } } if ( input.getFileCompression() != null ) { wCompression.setText( input.getFileCompression() ); } if ( input.getEncoding() != null ) { wEncoding.setText( input.getEncoding() ); } if ( input.getEndedLine() != null ) { wEndedLine.setText( input.getEndedLine() ); } wFileNameInField.setSelection( input.isFileNameInField() ); if ( input.getFileNameField() != null ) { wFileNameField.setText( input.getFileNameField() ); } wSplitEvery.setText( "" + input.getSplitEvery() ); wEnclForced.setSelection( input.isEnclosureForced() ); wHeader.setSelection( input.isHeaderEnabled() ); wFooter.setSelection( input.isFooterEnabled() ); wAddDate.setSelection( input.isDateInFilename() ); wAddTime.setSelection( input.isTimeInFilename() ); if ( input.getDateTimeFormat() != null ) { wDateTimeFormat.setText( input.getDateTimeFormat() ); } wSpecifyFormat.setSelection( input.isSpecifyingFormat() ); wAppend.setSelection( input.isFileAppended() ); wAddStepnr.setSelection( input.isStepNrInFilename() ); wAddPartnr.setSelection( input.isPartNrInFilename() ); wPad.setSelection( input.isPadded() ); wFastDump.setSelection( input.isFastDump() ); wAddToResult.setSelection( input.isAddToResultFiles() ); logDebug( "getting fields info..." ); for ( int i = 0; i < input.getOutputFields().length; i++ ) { TextFileField field = input.getOutputFields()[i]; TableItem item = wFields.table.getItem( i ); if ( field.getName() != null ) { item.setText( 1, field.getName() ); } item.setText( 2, field.getTypeDesc() ); if ( field.getFormat() != null ) { item.setText( 3, field.getFormat() ); } if ( field.getLength() >= 0 ) { item.setText( 4, "" + field.getLength() ); } if ( field.getPrecision() >= 0 ) { item.setText( 5, "" + field.getPrecision() ); } if ( field.getCurrencySymbol() != null ) { item.setText( 6, field.getCurrencySymbol() ); } if ( field.getDecimalSymbol() != null ) { item.setText( 7, field.getDecimalSymbol() ); } if ( field.getGroupingSymbol() != null ) { item.setText( 8, field.getGroupingSymbol() ); } String trim = field.getTrimTypeDesc(); if ( trim != null ) { item.setText( 9, trim ); } if ( field.getNullString() != null ) { item.setText( 10, field.getNullString() ); } } wFields.optWidth( true ); wStepname.selectAll(); } private void cancel() { stepname = null; input.setChanged( backupChanged ); dispose(); } private void getInfo( HadoopFileOutputMeta tfoi ) { String ncName = ( (HadoopFileOutputMeta) tfoi ).getSourceConfigurationName(); String fileName = wFilename.getText(); NamedCluster c = getMetaStore() == null ? null : namedClusterService.getNamedClusterByName( ncName, getMetaStore() ); if ( c != null ) { fileName = c.processURLsubstitution( fileName, getMetaStore(), variables ); } tfoi.setFileName( fileName ); tfoi.setDoNotOpenNewFileInit( wDoNotOpenNewFileInit.getSelection() ); tfoi.setCreateParentFolder( wCreateParentFolder.getSelection() ); tfoi.setFileFormat( HadoopFileOutputMeta.formatMapperLineTerminator[wFormat.getSelectionIndex()] ); tfoi.setFileCompression( wCompression.getText() ); tfoi.setEncoding( wEncoding.getText() ); tfoi.setSeparator( wSeparator.getText() ); tfoi.setEnclosure( wEnclosure.getText() ); tfoi.setExtension( wExtension.getText() ); tfoi.setSplitEvery( Const.toInt( wSplitEvery.getText(), 0 ) ); tfoi.setEndedLine( wEndedLine.getText() ); tfoi.setFileNameField( wFileNameField.getText() ); tfoi.setFileNameInField( wFileNameInField.getSelection() ); tfoi.setEnclosureForced( wEnclForced.getSelection() ); tfoi.setHeaderEnabled( wHeader.getSelection() ); tfoi.setFooterEnabled( wFooter.getSelection() ); tfoi.setFileAppended( wAppend.getSelection() ); tfoi.setStepNrInFilename( wAddStepnr.getSelection() ); tfoi.setPartNrInFilename( wAddPartnr.getSelection() ); tfoi.setDateInFilename( wAddDate.getSelection() ); tfoi.setTimeInFilename( wAddTime.getSelection() ); tfoi.setDateTimeFormat( wDateTimeFormat.getText() ); tfoi.setSpecifyingFormat( wSpecifyFormat.getSelection() ); tfoi.setPadded( wPad.getSelection() ); tfoi.setAddToResultFiles( wAddToResult.getSelection() ); tfoi.setFastDump( wFastDump.getSelection() ); int i; // Table table = wFields.table; int nrfields = wFields.nrNonEmpty(); tfoi.allocate( nrfields ); for ( i = 0; i < nrfields; i++ ) { TextFileField field = new TextFileField(); TableItem item = wFields.getNonEmpty( i ); field.setName( item.getText( 1 ) ); field.setType( item.getText( 2 ) ); field.setFormat( item.getText( 3 ) ); field.setLength( Const.toInt( item.getText( 4 ), -1 ) ); field.setPrecision( Const.toInt( item.getText( 5 ), -1 ) ); field.setCurrencySymbol( item.getText( 6 ) ); field.setDecimalSymbol( item.getText( 7 ) ); field.setGroupingSymbol( item.getText( 8 ) ); field.setTrimType( ValueMetaBase.getTrimTypeByDesc( item.getText( 9 ) ) ); field.setNullString( item.getText( 10 ) ); ( tfoi.getOutputFields() )[i] = field; } } private void ok() { if ( Utils.isEmpty( wStepname.getText() ) ) { return; } stepname = wStepname.getText(); // return value getInfo( input ); dispose(); } private void get() { try { RowMetaInterface r = transMeta.getPrevStepFields( stepname ); if ( r != null ) { TableItemInsertListener listener = new TableItemInsertListener() { public boolean tableItemInserted( TableItem tableItem, ValueMetaInterface v ) { if ( v.isNumber() ) { if ( v.getLength() > 0 ) { int le = v.getLength(); int pr = v.getPrecision(); if ( v.getPrecision() <= 0 ) { pr = 0; } String mask = ""; for ( int m = 0; m < le - pr; m++ ) { mask += "0"; } if ( pr > 0 ) { mask += "."; } for ( int m = 0; m < pr; m++ ) { mask += "0"; } tableItem.setText( 3, mask ); } } return true; } }; BaseStepDialog.getFieldsFromPrevious( r, wFields, 1, new int[] { 1 }, new int[] { 2 }, 4, 5, listener ); } } catch ( KettleException ke ) { new ErrorDialog( shell, BaseMessages.getString( BASE_PKG, "System.Dialog.GetFieldsFailed.Title" ), BaseMessages .getString( BASE_PKG, "System.Dialog.GetFieldsFailed.Message" ), ke ); } } /** * Sets the output width to minimal width... * */ public void setMinimalWidth() { int nrNonEmptyFields = wFields.nrNonEmpty(); for ( int i = 0; i < nrNonEmptyFields; i++ ) { TableItem item = wFields.getNonEmpty( i ); item.setText( 4, "" ); item.setText( 5, "" ); item.setText( 9, ValueMetaBase.getTrimTypeDesc( ValueMetaInterface.TRIM_TYPE_BOTH ) ); int type = ValueMetaBase.getType( item.getText( 2 ) ); switch ( type ) { case ValueMetaInterface.TYPE_STRING: item.setText( 3, "" ); break; case ValueMetaInterface.TYPE_INTEGER: item.setText( 3, "0" ); break; case ValueMetaInterface.TYPE_NUMBER: item.setText( 3, "0.#####" ); break; case ValueMetaInterface.TYPE_DATE: break; default: break; } } for ( int i = 0; i < input.getOutputFields().length; i++ ) { input.getOutputFields()[i].setTrimType( ValueMetaInterface.TRIM_TYPE_BOTH ); } wFields.optWidth( true ); } public String toString() { return this.getClass().getName(); } private void enableParentFolder() { wlCreateParentFolder.setEnabled( true ); wCreateParentFolder.setEnabled( true ); } public static String getUrlPath( String incomingURL ) { String path = incomingURL; try { String noVariablesURL = incomingURL.replaceAll( "[${}]", "/" ); FileName fileName = KettleVFS.getInstance().getFileSystemManager().resolveURI( noVariablesURL ); String root = fileName.getRootURI().replaceFirst( "/$", "" ); if ( noVariablesURL.startsWith( root ) ) { path = incomingURL.length() > root.length() ? incomingURL.substring( root.length() ) : "/"; } } catch ( FileSystemException e ) { path = incomingURL; } return path; } private void showMessageAndLog( String title, String message, String messageToLog ) { MessageBox box = new MessageBox( shell ); box.setText( title ); //$NON-NLS-1$ box.setMessage( message ); log.logError( messageToLog ); box.open(); } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/HadoopFileOutputMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.di.core.plugins.ParentFirst; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.core.Const; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.metastore.MetaStoreConst; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.resource.ResourceNamingInterface; import org.pentaho.di.trans.steps.textfileoutput.TextFileOutputMeta; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.runtime.test.action.impl.RuntimeTestActionServiceImpl; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import org.w3c.dom.Node; import java.util.Map; @Step( id = "HadoopFileOutputPlugin", image = "HDO.svg", name = "HadoopFileOutputPlugin.Name", description = "HadoopFileOutputPlugin.Description", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", i18nPackageName = "org.pentaho.di.trans.steps.hadoopfileoutput" ) @InjectionSupported( localizationPrefix = "HadoopFileOutput.Injection.", groups = { "OUTPUT_FIELDS" } ) //@ParentFirst( patterns = { "../../lib" } ) public class HadoopFileOutputMeta extends TextFileOutputMeta implements HadoopFileMeta { // for message resolution private static Class PKG = HadoopFileOutputMeta.class; private String sourceConfigurationName; private static final String SOURCE_CONFIGURATION_NAME = "source_configuration_name"; private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; private IMetaStore metaStore; private Node embeddedNamedClusterNode; public HadoopFileOutputMeta() { this.namedClusterService = NamedClusterManager.getInstance(); this.runtimeTestActionService = RuntimeTestActionServiceImpl.getInstance(); this.runtimeTester = RuntimeTesterImpl.getInstance(); } public HadoopFileOutputMeta( NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) { this.namedClusterService = namedClusterService; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; } @Override public void setDefault() { // call the base classes method super.setDefault(); // now set the default for the // filename to an empty string setFileName( "" ); } public String getSourceConfigurationName() { return sourceConfigurationName; } public void setSourceConfigurationName( String ncName ) { this.sourceConfigurationName = ncName; } protected String loadSource( Node stepnode, IMetaStore metastore ) { this.metaStore = metastore; String url = XMLHandler.getTagValue( stepnode, "file", "name" ); sourceConfigurationName = XMLHandler.getTagValue( stepnode, "file", SOURCE_CONFIGURATION_NAME ); embeddedNamedClusterNode = XMLHandler.getSubNode( stepnode, "NamedCluster" ); return getProcessedUrl( metastore, url ); } protected String getProcessedUrl( IMetaStore metastore, String url ) { if ( url == null ) { return null; } if ( metastore == null ) { // Maybe we can get a metastore from spoon try { metaStore = MetaStoreConst.openLocalPentahoMetaStore( false ); } catch ( Exception e ) { // If no local metastore we must ignore and proceed } } else { // if we already have a metastore use it metaStore = metastore; } NamedCluster c = getNamedCluster(); if ( c != null ) { url = c.processURLsubstitution( url, metaStore, new Variables() ); } return url; } @Override public String getClusterName( final String url ) { final NamedCluster cluster = getNamedCluster(); return cluster == null ? null : cluster.getName(); } public NamedCluster getNamedCluster() { NamedCluster cluster = namedClusterService.getNamedClusterByName( sourceConfigurationName, metaStore ); if ( cluster == null ) { // Still no metastore, try to make a named cluster from the embedded xml if ( namedClusterService.getClusterTemplate() != null ) { cluster = namedClusterService.getClusterTemplate().fromXmlForEmbed( embeddedNamedClusterNode ); } } return cluster; } public String getUrlPath( String incomingURL ) { return getProcessedUrl( null, incomingURL ); } protected void saveSource( StringBuilder retVal, String fileName ) { retVal.append( " " ).append( XMLHandler.addTagValue( "name", fileName ) ); retVal.append( " " ).append( XMLHandler.addTagValue( SOURCE_CONFIGURATION_NAME, sourceConfigurationName ) ); } @Override public String getXML() { String xml = super.getXML(); NamedCluster c = namedClusterService.getNamedClusterByName( sourceConfigurationName, metaStore ); if ( c != null ) { xml = xml + c.toXmlForEmbed( "NamedCluster" ) + Const.CR; } return xml; } // Receiving metaStore because RepositoryProxy.getMetaStore() returns a hard-coded null protected String loadSourceRep( Repository rep, ObjectId id_step, IMetaStore metaStore ) throws KettleException { this.metaStore = metaStore; String url = rep.getStepAttributeString( id_step, "file_name" ); sourceConfigurationName = rep.getStepAttributeString( id_step, SOURCE_CONFIGURATION_NAME ); return getProcessedUrl( metaStore, url ); } protected void saveSourceRep( Repository rep, ObjectId id_transformation, ObjectId id_step, String fileName ) throws KettleException { rep.saveStepAttribute( id_transformation, id_step, "file_name", fileName ); rep.saveStepAttribute( id_transformation, id_step, SOURCE_CONFIGURATION_NAME, sourceConfigurationName ); } public NamedClusterService getNamedClusterService() { return namedClusterService; } public RuntimeTester getRuntimeTester() { return runtimeTester; } public RuntimeTestActionService getRuntimeTestActionService() { return runtimeTestActionService; } @Override public String exportResources( Bowl executionBowl, Bowl globalManagementBowl, VariableSpace space, Map definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { return null; } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/HadoopInputFileSelectionAdapter.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans; import org.pentaho.di.base.AbstractMeta; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.ui.core.events.dialog.SelectionAdapterFileDialog; import org.pentaho.di.ui.core.events.dialog.SelectionAdapterOptions; import org.pentaho.di.ui.core.widget.TableView; public class HadoopInputFileSelectionAdapter extends SelectionAdapterFileDialog { public HadoopInputFileSelectionAdapter( LogChannelInterface log, TableView textUiWidget, AbstractMeta meta, SelectionAdapterOptions options ) { super( log, textUiWidget, meta, options ); } @Override protected String getText() { return this.getTextWidget().getActiveTableItem().getText( this.getTextWidget().getActiveTableColumn() ); } @Override protected void setText( String text ) { this.getTextWidget().getActiveTableItem().setText( this.getTextWidget().getActiveTableColumn(), text ); } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/analyzer/HadoopBaseStepAnalyzer.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans.analyzer; import org.apache.commons.lang.StringUtils; import org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.steps.file.BaseFileMeta; import org.pentaho.dictionary.DictionaryConst; import org.pentaho.metaverse.api.IMetaverseNode; import org.pentaho.metaverse.api.MetaverseException; import org.pentaho.metaverse.api.StepField; import org.pentaho.metaverse.api.analyzer.kettle.step.ExternalResourceStepAnalyzer; import org.pentaho.metaverse.api.IMetaverseObjectFactory; import org.pentaho.metaverse.api.model.IExternalResourceInfo; import java.util.HashSet; import java.util.Set; /** * Common functionality for Hadoop input and output step analyzers. */ public abstract class HadoopBaseStepAnalyzer extends ExternalResourceStepAnalyzer { @Override protected boolean normalizeFilePath() { return false; } @Override protected Set getUsedFields( final M meta ) { return null; } /** * The Hadoop file input step supports local and remote files. Since we can have a mix of both, we intentionally * use the generic "File Field" type, rather than the more specific "Hadoop Field" type. */ @Override public String getResourceInputNodeType() { return DictionaryConst.NODE_TYPE_FILE_FIELD; } @Override public String getResourceOutputNodeType() { return DictionaryConst.NODE_TYPE_FILE_FIELD; } @Override public Set> getSupportedSteps() { return new HashSet>() { { add( getMetaClass() ); } }; } public abstract Class getMetaClass(); // used for unit testing protected void setObjectFactory( IMetaverseObjectFactory factory ) { this.metaverseObjectFactory = factory; } @Override public IMetaverseNode createResourceNode( final IExternalResourceInfo resource ) throws MetaverseException { return createFileNode( parentTransMeta.getBowl(), resource.getName(), descriptor ); } @Override public IMetaverseNode createResourceNode( final M meta, final IExternalResourceInfo resource ) throws MetaverseException { IMetaverseNode resourceNode = null; if ( meta instanceof HadoopFileMeta ) { resourceNode = createResourceNode( resource ); final HadoopFileMeta hMeta = (HadoopFileMeta) meta; final String hostName = hMeta.getUrlHostName( resource.getName() ); if ( StringUtils.isNotBlank( hostName ) ) { resourceNode.setProperty( DictionaryConst.PROPERTY_HOST_NAME, hostName ); // update the default "File" type to "HDFS File" resourceNode.setProperty( DictionaryConst.PROPERTY_TYPE, DictionaryConst.NODE_TYPE_FILE ); final String clusterName = hMeta.getClusterName( resource.getName() ); if ( StringUtils.isNotBlank( clusterName ) ) { resourceNode.setProperty( DictionaryConst.PROPERTY_CLUSTER, clusterName ); } } } return resourceNode; } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/analyzer/HadoopFileInputExternalResourceConsumer.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans.analyzer; import org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileInputMeta; import org.pentaho.di.trans.steps.fileinput.text.TextFileInput; import org.pentaho.metaverse.api.analyzer.kettle.step.BaseStepExternalResourceConsumer; public class HadoopFileInputExternalResourceConsumer extends BaseStepExternalResourceConsumer { @Override public Class getMetaClass() { return HadoopFileInputMeta.class; } @Override public boolean isDataDriven( final HadoopFileInputMeta meta ) { return meta.isAcceptingFilenames(); } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/analyzer/HadoopFileInputStepAnalyzer.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans.analyzer; import org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileInputMeta; import org.pentaho.metaverse.api.IMetaverseNode; import org.pentaho.metaverse.api.MetaverseAnalyzerException; import org.pentaho.metaverse.api.analyzer.kettle.step.IClonableStepAnalyzer; public class HadoopFileInputStepAnalyzer extends HadoopBaseStepAnalyzer { @Override public Class getMetaClass() { return HadoopFileInputMeta.class; } @Override public boolean isOutput() { return false; } @Override public boolean isInput() { return true; } @Override protected void customAnalyze( final HadoopFileInputMeta meta, final IMetaverseNode rootNode ) throws MetaverseAnalyzerException { super.customAnalyze( meta, rootNode ); if ( meta.isAcceptingFilenames() ) { rootNode.setProperty( "fileNameStep", meta.getAcceptingStepName() ); rootNode.setProperty( "fileNameField", meta.getAcceptingField() ); rootNode.setProperty( "passingThruFields", meta.inputFiles.passingThruFields ); } rootNode.setProperty( "fileType", meta.content.fileType ); rootNode.setProperty( "separator", meta.content.separator ); rootNode.setProperty( "enclosure", meta.content.enclosure ); rootNode.setProperty( "breakInEnclosureAllowed", meta.content.breakInEnclosureAllowed ); rootNode.setProperty( "escapeCharacter", meta.content.escapeCharacter ); if ( meta.content.header ) { rootNode.setProperty( "nrHeaderLines", meta.content.nrHeaderLines ); } if ( meta.content.footer ) { rootNode.setProperty( "nrFooterLines", meta.content.nrFooterLines ); } if ( meta.content.lineWrapped ) { rootNode.setProperty( "nrWraps", meta.content.nrWraps ); } if ( meta.content.layoutPaged ) { rootNode.setProperty( "nrLinesPerPage", meta.content.nrLinesPerPage ); rootNode.setProperty( "nrLinesDocHeader", meta.content.nrLinesDocHeader ); } rootNode.setProperty( "fileCompression", meta.content.fileCompression ); rootNode.setProperty( "noEmptyLines", meta.content.noEmptyLines ); rootNode.setProperty( "includeFilename", meta.content.includeFilename ); if ( meta.content.includeFilename ) { rootNode.setProperty( "filenameField", meta.content.filenameField ); } rootNode.setProperty( "includeRowNumber", meta.content.includeRowNumber ); if ( meta.content.includeFilename ) { rootNode.setProperty( "rowNumberField", meta.content.rowNumberField ); rootNode.setProperty( "rowNumberByFile", meta.content.rowNumberByFile ); } rootNode.setProperty( "fileFormat", meta.content.fileFormat ); rootNode.setProperty( "encoding", meta.content.encoding ); rootNode.setProperty( "rowLimit", Long.toString( meta.content.rowLimit ) ); rootNode.setProperty( "dateFormatLenient", meta.content.dateFormatLenient ); rootNode.setProperty( "dateFormatLocale", meta.content.dateFormatLocale ); rootNode.setProperty( "addFilenamesToResult", meta.inputFiles.isaddresult ); } @Override public IClonableStepAnalyzer newInstance() { return new HadoopFileInputStepAnalyzer(); } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/analyzer/HadoopFileOutputExternalResourceConsumer.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans.analyzer; import org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileOutputMeta; import org.pentaho.di.trans.steps.fileinput.text.TextFileInput; import org.pentaho.metaverse.api.analyzer.kettle.step.BaseStepExternalResourceConsumer; public class HadoopFileOutputExternalResourceConsumer extends BaseStepExternalResourceConsumer { @Override public Class getMetaClass() { return HadoopFileOutputMeta.class; } @Override public boolean isDataDriven( final HadoopFileOutputMeta meta ) { return meta.isFileNameInField(); } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/analyzer/HadoopFileOutputStepAnalyzer.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans.analyzer; import org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileOutputMeta; import org.pentaho.metaverse.api.IMetaverseNode; import org.pentaho.metaverse.api.MetaverseAnalyzerException; import org.pentaho.metaverse.api.analyzer.kettle.step.IClonableStepAnalyzer; public class HadoopFileOutputStepAnalyzer extends HadoopBaseStepAnalyzer { @Override public Class getMetaClass() { return HadoopFileOutputMeta.class; } @Override public boolean isOutput() { return true; } @Override public boolean isInput() { return false; } @Override protected void customAnalyze( final HadoopFileOutputMeta meta, final IMetaverseNode rootNode ) throws MetaverseAnalyzerException { super.customAnalyze( meta, rootNode ); rootNode.setProperty( "createParentFolder", meta.isCreateParentFolder() ); rootNode.setProperty( "doNotOpenNewFileInit", meta.isDoNotOpenNewFileInit() ); if ( meta.isFileNameInField() ) { rootNode.setProperty( "fileNameField", meta.getFileNameField() ); } rootNode.setProperty( "extension", meta.getExtension() ); rootNode.setProperty( "stepNrInFilename", meta.isStepNrInFilename() ); rootNode.setProperty( "partNrInFilename", meta.isPartNrInFilename() ); rootNode.setProperty( "dateInFilename", meta.isDateInFilename() ); rootNode.setProperty( "timeInFilename", meta.isTimeInFilename() ); if ( meta.isSpecifyingFormat() ) { rootNode.setProperty( "dateTimeFormat", meta.getDateTimeFormat() ); } rootNode.setProperty( "addFilenamesToResult", meta.isAddToResultFiles() ); rootNode.setProperty( "append", meta.isFileAppended() ); rootNode.setProperty( "separator", meta.getSeparator() ); rootNode.setProperty( "enclosure", meta.getEnclosure() ); rootNode.setProperty( "forceEnclosure", meta.isEnclosureForced() ); rootNode.setProperty( "addHeader", meta.isHeaderEnabled() ); rootNode.setProperty( "addFooter", meta.isFooterEnabled() ); rootNode.setProperty( "fileFormat", meta.getFileFormat() ); rootNode.setProperty( "fileCompression", meta.getFileCompression() ); rootNode.setProperty( "encoding", meta.getEncoding() ); rootNode.setProperty( "rightPadFields", meta.isPadded() ); rootNode.setProperty( "fastDataDump", meta.isFastDump() ); rootNode.setProperty( "splitEveryRows", meta.getSplitEveryRows() ); rootNode.setProperty( "endingLine", meta.getEndedLine() ); } @Override public IClonableStepAnalyzer newInstance() { return new HadoopFileOutputStepAnalyzer(); } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/vfs/HadoopVfsConnection.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.vfs; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.di.core.Props; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.variables.VariableSpace; /** * @author Tatsiana_Kasiankova * */ public class HadoopVfsConnection { private static final String COLON = ":"; private static final String EMPTY = ""; private static final String SCHEME_NAME = "hdfs"; private String hostname; private String port; private String username; private String password; public HadoopVfsConnection( String ncHostname, String ncPort, String ncUsername, String ncPassword ) { super(); this.hostname = ncHostname; this.port = ncPort; this.username = ncUsername; this.password = ncPassword; } public HadoopVfsConnection() { this( EMPTY, EMPTY, EMPTY, EMPTY ); } public HadoopVfsConnection( NamedCluster nCluster, VariableSpace vs ) { this( EMPTY, EMPTY, EMPTY, EMPTY ); loadNamedCluster( nCluster, vs ); } /** * Build an HDFS URL given a URL and Port provided by the user. * * @return a String containing the HDFS URL */ public String getConnectionString( String schemeName ) { if ( Schemes.MAPRFS_SCHEME.equals( schemeName ) ) { return Schemes.MAPRFS_SCHEME.concat( "://" ); } StringBuffer urlString = new StringBuffer( !Utils.isEmpty( schemeName ) ? schemeName : SCHEME_NAME ).append( "://" ); if ( !Utils.isEmpty( getUsername() ) ) { urlString.append( getUsername() ).append( COLON ).append( getPassword() ).append( "@" ); } urlString.append( getHostname() ); if ( !Utils.isEmpty( getPort() ) ) { urlString.append( COLON ).append( getPort() ); } return urlString.toString(); } private void loadNamedCluster( NamedCluster nCluster, VariableSpace vs ) { if ( nCluster != null ) { hostname = nCluster.getHdfsHost() != null ? nCluster.getHdfsHost() : EMPTY; port = nCluster.getHdfsPort() != null ? nCluster.getHdfsPort() : EMPTY; username = nCluster.getHdfsUsername() != null ? nCluster.getHdfsUsername() : EMPTY; password = nCluster.getHdfsPassword() != null ? nCluster.decodePassword( nCluster.getHdfsPassword() ) : EMPTY; hostname = vs.environmentSubstitute( hostname ); port = vs.environmentSubstitute( port ); username = vs.environmentSubstitute( username ); password = vs.environmentSubstitute( password ); } } public void setCustomParameters( Props pr ) { pr.setCustomParameter( "HadoopVfsFileChooserDialog.host", getHostname() ); pr.setCustomParameter( "HadoopVfsFileChooserDialog.port", getPort() ); pr.setCustomParameter( "HadoopVfsFileChooserDialog.user", getUsername() ); pr.setCustomParameter( "HadoopVfsFileChooserDialog.password", getPassword() ); } public String getHostname() { return hostname; } public String getPort() { return port; } public String getUsername() { return username; } public String getPassword() { return password; } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/vfs/HadoopVfsFileChooserDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.vfs; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemOptions; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.MessageBox; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.big.data.plugins.common.ui.NamedClusterWidgetImpl; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.logging.LogChannel; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.vfs.ui.CustomVfsUiPanel; import org.pentaho.vfs.ui.VfsFileChooserDialog; public class HadoopVfsFileChooserDialog extends CustomVfsUiPanel { // for message resolution private static final Class PKG = HadoopVfsFileChooserDialog.class; // for logging private LogChannel log = new LogChannel( this ); // Default root file - used to avoid NPE when rootFile was not provided // and the browser is resolved FileObject defaultInitialFile = null; // File objects to keep track of when the user selects the radio buttons FileObject hadoopRootFile = null; String hadoopOpenFromFolder = null; FileObject rootFile = null; FileObject initialFile = null; VfsFileChooserDialog vfsFileChooserDialog = null; String schemeName = "hdfs"; private NamedClusterWidgetImpl namedClusterWidget = null; private String namedCluster = null; private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; public HadoopVfsFileChooserDialog( String schemeName, String displayName, VfsFileChooserDialog vfsFileChooserDialog, FileObject rootFile, FileObject initialFile, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) { super( schemeName, displayName, vfsFileChooserDialog, SWT.NONE ); this.schemeName = schemeName; this.rootFile = rootFile; this.initialFile = initialFile; this.vfsFileChooserDialog = vfsFileChooserDialog; this.namedClusterService = namedClusterService; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; // Create the Hadoop panel GridData gridData = new GridData( SWT.FILL, SWT.CENTER, true, false ); setLayoutData( gridData ); setLayout( new GridLayout( 1, false ) ); createConnectionPanel(); } private void createConnectionPanel() { // The Connection group Group connectionGroup = new Group( this, SWT.SHADOW_ETCHED_IN ); connectionGroup.setText( BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.ConnectionGroup.Label" ) ); //$NON-NLS-1$ ; GridLayout connectionGroupLayout = new GridLayout(); connectionGroupLayout.marginWidth = 5; connectionGroupLayout.marginHeight = 5; connectionGroupLayout.verticalSpacing = 5; connectionGroupLayout.horizontalSpacing = 5; GridData gData = new GridData( SWT.FILL, SWT.FILL, true, false ); connectionGroup.setLayoutData( gData ); connectionGroup.setLayout( connectionGroupLayout ); setNamedClusterWidget( new NamedClusterWidgetImpl( connectionGroup, true, namedClusterService, runtimeTestActionService, runtimeTester, false ) ); getNamedClusterWidget().addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent evt ) { try { connect(); } catch ( Exception e ) { // To prevent errors from multiple event firings. } } } ); // The composite we need in the group Composite textFieldPanel = new Composite( connectionGroup, SWT.NONE ); GridData gridData = new GridData( SWT.FILL, SWT.FILL, true, false ); textFieldPanel.setLayoutData( gridData ); textFieldPanel.setLayout( new GridLayout( 5, false ) ); } public void initializeConnectionPanel( FileObject file ) { initialFile = file; /* * if ( initialFile != null && initialFile.getName().getScheme().equals( HadoopSpoonPlugin.HDFS_SCHEME ) ) { //TODO * activate HDFS } */ } private void showMessageAndLog( String title, String message, String messageToLog ) { MessageBox box = new MessageBox( this.getShell() ); box.setText( title ); // $NON-NLS-1$ box.setMessage( message ); log.logError( messageToLog ); box.open(); } public VariableSpace getVariableSpace() { if ( Spoon.getInstance().getActiveTransformation() != null ) { return Spoon.getInstance().getActiveTransformation(); } else if ( Spoon.getInstance().getActiveJob() != null ) { return Spoon.getInstance().getActiveJob(); } else { return new Variables(); } } public NamedClusterWidgetImpl getNamedClusterWidget() { return namedClusterWidget; } protected void setNamedClusterWidget( NamedClusterWidgetImpl namedClusterWidget ) { this.namedClusterWidget = namedClusterWidget; } public void setNamedCluster( String namedCluster ) { this.namedCluster = namedCluster; } public void activate() { vfsFileChooserDialog.setRootFile( null ); vfsFileChooserDialog.setInitialFile( null ); vfsFileChooserDialog.openFileCombo.setText( "hdfs://" ); vfsFileChooserDialog.vfsBrowser.fileSystemTree.removeAll(); getNamedClusterWidget().initiate(); getNamedClusterWidget().setSelectedNamedCluster( namedCluster ); super.activate(); } public void connect() { NamedCluster nc = getNamedClusterWidget().getSelectedNamedCluster(); // The Named Cluster may be hdfs, maprfs or wasb. We need to detect it here since the named // cluster was just selected. schemeName = "wasb".equals( nc.getStorageScheme() ) ? "wasb" : "hdfs"; FileObject root = rootFile; try { Spoon spoon = Spoon.getInstance(); root = KettleVFS.getInstance( spoon.getExecutionBowl() ) .getFileObject( nc.processURLsubstitution( FileName.ROOT_PATH, Spoon.getInstance().getMetaStore(), getVariableSpace() ) ); } catch ( KettleFileException exc ) { showMessageAndLog( BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.error" ), BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.Connection.error" ), exc.getMessage() ); } vfsFileChooserDialog.setRootFile( root ); vfsFileChooserDialog.setSelectedFile( root ); rootFile = root; } public FileObject resolveFile( String fileUri ) throws FileSystemException { Spoon spoon = Spoon.getInstance(); try { return KettleVFS.getInstance( spoon.getExecutionBowl() ) .getFileObject( fileUri, getVariableSpace(), getFileSystemOptions() ); } catch ( KettleFileException e ) { throw new FileSystemException( e ); } } public FileObject resolveFile( String fileUri, FileSystemOptions opts ) throws FileSystemException { Spoon spoon = Spoon.getInstance(); try { return KettleVFS.getInstance( spoon.getExecutionBowl() ).getFileObject( fileUri, getVariableSpace(), opts ); } catch ( KettleFileException e ) { throw new FileSystemException( e ); } } protected FileSystemOptions getFileSystemOptions() throws FileSystemException { FileSystemOptions opts = new FileSystemOptions(); return opts; } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/vfs/MapRFSFileChooserDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.vfs; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemOptions; import org.eclipse.swt.SWT; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.vfs.ui.CustomVfsUiPanel; import org.pentaho.vfs.ui.VfsFileChooserDialog; public class MapRFSFileChooserDialog extends CustomVfsUiPanel { private VfsFileChooserDialog vfsFileChooserDialog; public MapRFSFileChooserDialog( String schemeName, String displayName, VfsFileChooserDialog vfsFileChooserDialog ) { super( schemeName, displayName, vfsFileChooserDialog, SWT.NONE ); this.vfsFileChooserDialog = vfsFileChooserDialog; } public void activate() { vfsFileChooserDialog.setRootFile( null ); vfsFileChooserDialog.setInitialFile( null ); vfsFileChooserDialog.openFileCombo.setText( "maprfs://" ); vfsFileChooserDialog.vfsBrowser.fileSystemTree.removeAll(); super.activate(); try { FileObject newRoot = resolveFile( vfsFileChooserDialog.openFileCombo.getText() ); vfsFileChooserDialog.vfsBrowser.resetVfsRoot( newRoot ); } catch ( FileSystemException ignored ) { //ignored } } public FileObject resolveFile( String fileUri ) throws FileSystemException { Spoon spoon = Spoon.getInstance(); try { return KettleVFS.getInstance( spoon.getExecutionBowl() ) .getFileObject( fileUri, getVariableSpace(), getFileSystemOptions() ); } catch ( KettleFileException e ) { throw new FileSystemException( e ); } } public FileObject resolveFile( String fileUri, FileSystemOptions opts ) throws FileSystemException { Spoon spoon = Spoon.getInstance(); try { return KettleVFS.getInstance( spoon.getExecutionBowl() ).getFileObject( fileUri, getVariableSpace(), opts ); } catch ( KettleFileException e ) { throw new FileSystemException( e ); } } protected FileSystemOptions getFileSystemOptions() throws FileSystemException { FileSystemOptions opts = new FileSystemOptions(); return opts; } private VariableSpace getVariableSpace() { if ( Spoon.getInstance().getActiveTransformation() != null ) { return Spoon.getInstance().getActiveTransformation(); } else if ( Spoon.getInstance().getActiveJob() != null ) { return Spoon.getInstance().getActiveJob(); } else { return new Variables(); } } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/vfs/NamedClusterVfsFileChooserDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.vfs; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemOptions; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.MessageBox; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.big.data.plugins.common.ui.NamedClusterWidgetImpl; import org.pentaho.di.core.Props; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.logging.LogChannel; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.vfs.ui.CustomVfsUiPanel; import org.pentaho.vfs.ui.VfsFileChooserDialog; public class NamedClusterVfsFileChooserDialog extends CustomVfsUiPanel { // for message resolution private static final Class PKG = NamedClusterVfsFileChooserDialog.class; // for logging private LogChannel log = new LogChannel( this ); // Default root file - used to avoid NPE when rootFile was not provided // and the browser is resolved FileObject defaultInitialFile = null; // File objects to keep track of when the user selects the radio buttons FileObject hadoopRootFile = null; String hadoopOpenFromFolder = null; FileObject rootFile = null; FileObject initialFile = null; VfsFileChooserDialog vfsFileChooserDialog = null; String schemeName = Schemes.NAMED_CLUSTER_SCHEME; private NamedClusterWidgetImpl namedClusterWidget = null; private String namedCluster = null; private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; public NamedClusterVfsFileChooserDialog( String schemeName, String displayName, VfsFileChooserDialog vfsFileChooserDialog, FileObject rootFile, FileObject initialFile, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester ) { super( schemeName, displayName, vfsFileChooserDialog, SWT.NONE ); this.schemeName = schemeName; this.rootFile = rootFile; this.initialFile = initialFile; this.vfsFileChooserDialog = vfsFileChooserDialog; this.namedClusterService = namedClusterService; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; // Create the Hadoop panel GridData gridData = new GridData( SWT.FILL, SWT.CENTER, true, false ); setLayoutData( gridData ); setLayout( new GridLayout( 1, false ) ); createConnectionPanel(); } private void createConnectionPanel() { // The Connection group Group connectionGroup = new Group( this, SWT.SHADOW_ETCHED_IN ); connectionGroup .setText( BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.ConnectionGroup.Label" ) ); //$NON-NLS-1$ ; GridLayout connectionGroupLayout = new GridLayout(); connectionGroupLayout.marginWidth = 5; connectionGroupLayout.marginHeight = 5; connectionGroupLayout.verticalSpacing = 5; connectionGroupLayout.horizontalSpacing = 5; GridData gData = new GridData( SWT.FILL, SWT.FILL, true, false ); connectionGroup.setLayoutData( gData ); connectionGroup.setLayout( connectionGroupLayout ); setNamedClusterWidget( new NamedClusterWidgetImpl( connectionGroup, true, namedClusterService, runtimeTestActionService, runtimeTester, false ) ); getNamedClusterWidget().addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent evt ) { try { connect(); } catch ( Exception e ) { // To prevent errors from multiple event firings. log.logDebug( e.getMessage() ); } } } ); // The composite we need in the group Composite textFieldPanel = new Composite( connectionGroup, SWT.NONE ); GridData gridData = new GridData( SWT.FILL, SWT.FILL, true, false ); textFieldPanel.setLayoutData( gridData ); textFieldPanel.setLayout( new GridLayout( 5, false ) ); } public void initializeConnectionPanel( FileObject file ) { initialFile = file; /* * if ( initialFile != null && initialFile.getName().getScheme().equals( HadoopSpoonPlugin.HDFS_SCHEME ) ) { //TODO * activate HDFS } */ } private void showMessageAndLog( String title, String message, String messageToLog ) { MessageBox box = new MessageBox( this.getShell() ); box.setText( title ); // $NON-NLS-1$ box.setMessage( message ); log.logError( messageToLog ); box.open(); } public VariableSpace getVariableSpace() { if ( Spoon.getInstance().getActiveTransformation() != null ) { return Spoon.getInstance().getActiveTransformation(); } else if ( Spoon.getInstance().getActiveJob() != null ) { return Spoon.getInstance().getActiveJob(); } else { return new Variables(); } } public NamedClusterWidgetImpl getNamedClusterWidget() { return namedClusterWidget; } protected void setNamedClusterWidget( NamedClusterWidgetImpl namedClusterWidget ) { this.namedClusterWidget = namedClusterWidget; } public void setNamedCluster( String namedCluster ) { this.namedCluster = namedCluster; } @Override public void activate() { vfsFileChooserDialog.setRootFile( null ); vfsFileChooserDialog.setInitialFile( null ); vfsFileChooserDialog.openFileCombo.setText( Schemes.NAMED_CLUSTER_SCHEME + "://" ); vfsFileChooserDialog.vfsBrowser.fileSystemTree.removeAll(); getNamedClusterWidget().initiate(); getNamedClusterWidget().setSelectedNamedCluster( namedCluster ); super.activate(); } public void connect() { NamedCluster nc = getNamedClusterWidget().getSelectedNamedCluster(); HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( nc, getVariableSpace() ); hdfsConnection.setCustomParameters( Props.getInstance() ); // The Named Cluster may be hdfs, maprfs or wasb. We need to detect it here since the named // cluster was just selected. //schemeName = "wasb".equals( nc.getStorageScheme() ) ? "wasb" : "hdfs"; String connectionString = Schemes.NAMED_CLUSTER_SCHEME + "://" + nc.getName(); FileSystemOptions fsoptions = new FileSystemOptions(); FileObject root = rootFile; try { Spoon spoon = Spoon.getInstance(); root = KettleVFS.getInstance( spoon.getExecutionBowl() ).getFileObject( connectionString, fsoptions ); } catch ( KettleFileException exc ) { showMessageAndLog( BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.error" ), BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.Connection.error" ), exc.getMessage() ); } vfsFileChooserDialog.setRootFile( root ); vfsFileChooserDialog.setSelectedFile( root ); rootFile = root; } /** * resolve file with new File SystemOptions. */ @Override public FileObject resolveFile( String fileUri ) throws FileSystemException { try { Spoon spoon = Spoon.getInstance(); //should we use new instance of FileSystemOptions? should it be depdrecated? return KettleVFS.getInstance( spoon.getExecutionBowl() ) .getFileObject( fileUri, getVariableSpace(), getFileSystemOptions() ); } catch ( KettleFileException e ) { throw new FileSystemException( e ); } } @Override public FileObject resolveFile( String fileUri, FileSystemOptions opts ) throws FileSystemException { try { Spoon spoon = Spoon.getInstance(); return KettleVFS.getInstance( spoon.getExecutionBowl() ).getFileObject( fileUri, getVariableSpace(), opts ); } catch ( KettleFileException e ) { throw new FileSystemException( e ); } } /** * @return newFileSystem Options * @throws FileSystemException */ protected FileSystemOptions getFileSystemOptions() throws FileSystemException { FileSystemOptions opts = new FileSystemOptions(); return opts; } } ================================================ FILE: kettle-plugins/hdfs/core/src/main/java/org/pentaho/big/data/kettle/plugins/hdfs/vfs/Schemes.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.vfs; /** * Created by bryan on 11/23/15. */ public class Schemes { public static final String HDFS_SCHEME = "hdfs"; public static final String HDFS_SCHEME_DISPLAY_NAME = "HDFS"; public static final String MAPRFS_SCHEME = "maprfs"; public static final String MAPRFS_SCHEME_DISPLAY_NAME = "MapRFS"; public static final String NAMED_CLUSTER_SCHEME = "hc"; public static final String NAMED_CLUSTER_SCHEME_DISPLAY_NAME = "Hadoop Cluster"; public static final String S3_SCHEME = "s3"; public static final String S3N_SCHEME = "s3n"; } ================================================ FILE: kettle-plugins/hdfs/core/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: kettle-plugins/hdfs/core/src/main/resources/graph.properties ================================================ blueprints.graph=com.tinkerpop.blueprints.impls.tg.TinkerGraph ================================================ FILE: kettle-plugins/hdfs/core/src/main/resources/org/pentaho/big/data/kettle/plugins/hdfs/job/messages/messages_en_US.properties ================================================ HadoopCopyFilesPlugin.Name=Hadoop copy files HadoopCopyFilesPlugin.Description=Copy files to and from HDFS JobCopyFiles.Browse.Label=Browse... JobHadoopCopyFiles.Log.ArgFromPrevious.Found=found [{0}] argument(s) from previous result JobHadoopCopyFiles.Tab.General.Label=General JobHadoopCopyFiles.BrowseFiles.Label=File... JobHadoopCopyFiles.Error.Exception.CopyProcessError=There was an error copying file [{0}] to [{1}] \: [{2}] JobHadoopCopyFiles.Fields.Label=Files/Folders\: JobHadoopCopyFiles.Filetype.All=All files JobHadoopCopyFiles.SourceFileFolder.Tooltip=Enter here the file or folder to copy\n If it's a folder, PDI will fetch only if ''Include subfolders'' is checked\! JobHadoopCopyFiles.Fields.Wildcard.Tooltip=Specify here the regular expressions wildcard to match.\n Only files that match the wildcard will be copied. JobHadoopCopyFiles.DestinationFileFolder.Tooltip=Enter here the destination folder to hit.\n If you selected file as source,you can define a file as destination. JobHadoopCopyFiles.Error.DestinationFolderNotFound=Destination folder does not exist\! JobHadoopCopyFiles.Log.FileExistsInfos=File exists\! JobHadoopCopyFiles.Log.FileOverwrite=File [{0}] was overwritten JobHadoopCopyFiles.DestinationIsAFile.Tooltip=PDI will consider that destination is a file. JobHadoopCopyFiles.Error.Exception.CopyProcess=Can not copy file/folder [{0}] to [{1}]. Exception \: [{2}] JobHadoopCopyFiles.FilenameDelete.Button=&Delete JobHadoopCopyFiles.Log.FolderCopied=Folder [{0}] was copied to [{1}] JobHadoopCopyFiles.Fields.DestinationFileFolder.Label=File/Folder destination JobHadoopCopyFiles.Log.FileRemoved=File [{0}] was deleted JobHadoopCopyFiles.Error.CanNotRemoveFile=Can not delete file JobHadoopCopyFiles.Log.FileCopied=File [{0}] was copied to [{1}] JobHadoopCopyFiles.FilenameAdd.Button=&Add JobHadoopCopyFiles.Title=Hadoop copy files JobHadoopCopyFiles.Log.FetchFolder=Fetching \: [{0}] JobHadoopCopyFiles.Error.SourceFileNotExists=File/folder [{0}] does not exists\! JobHadoopCopyFiles.FilenameEdit.Button=&Edit JobHadoopCopyFiles.Log.FolderExistsInfos=Folder exists\! JobHadoopCopyFiles.RemoveSourceFiles.Tooltip=Remove source files after copy process\nOnly files will be removed. JobHadoopCopyFiles.CreateDestinationFolder.Label=Create destination folder JobHadoopCopyFiles.Log.ProcessingRow=Processing row source File/folder source \: [{0}] ... destination file/folder \: [{1}]... wildcard \: [{2}] JobHadoopCopyFiles.Log.FileOverwriteInfos=File JobHadoopCopyFiles.Log.FileFolderRemoved=File/folder [{0}] was deleted JobHadoopCopyFiles.Log.CanNotCopyFolderToFile=Can not copy folder [{0}] to file [{1}] JobHadoopCopyFiles.IncludeSubfolders.Label=Include Subfolders JobHadoopCopyFiles.Log.Forbidden=FORBIDDEN JobHadoopCopyFiles.AddFileToResult.Label=Add files to result files name JobHadoopCopyFiles.FilenameEdit.Tooltip=Edit selected files JobHadoopCopyFiles.Log.Starting=Starting ... JobHadoopCopyFiles.Log.FolderOverwriteInfos=Folder JobHadoopCopyFiles.Log.FileRemovedInfos=file deleted JobHadoopCopyFiles.CopyEmptyFolders.Label=Copy empty folders JobHadoopCopyFiles.DestinationIsAFile.Label=Destination is a file JobHadoopCopyFiles.RemoveSourceFiles.Label=Remove source files JobHadoopCopyFiles.Fields.Wildcard.Label=Wildcard (RegExp) JobHadoopCopyFiles.OverwriteFiles.Tooltip=When the destination file exists,If you want to replace it check this option.\nOtherwise, PDI will ignore it. JobHadoopCopyFiles.Log.FileAddedToResultFilesName=File [{0}] was added to result filesname JobHadoopCopyFiles.Log.FileCopiedInfos=File copied JobHadoopCopyFiles.Error.Exception.CanRemoveFileFolder=Can not delete file/folder [{0}] JobHadoopCopyFiles.CopyEmptyFolders.Tooltip=Copy empty folders\n Will work only when no wildcard was specified and ''Include subfolders'' is checked\! JobHadoopCopyFiles.FileResult.Group.Label=Result files name JobHadoopCopyFiles.Error.Exception.UnableSaveRep=Unable to save job entry of type ''copyfiles'' to the repository for id_job\= JobHadoopCopyFiles.Log.Error=Error JobHadoopCopyFiles.FilenameDelete.Tooltip=Remove selected files from the grid JobHadoopCopyFiles.OverwriteFiles.Label=Replace existing files JobHadoopCopyFiles.Log.FolderOverwrite=Folder [{0}] was overwritten JobHadoopCopyFiles.CreateDestinationFolder.Tooltip=Create destination folder if necessary.\nIf destination is a file, parent folder will be created if necessary. JobHadoopCopyFiles.Wildcard.Tooltip=Specify here the regular expressions wildcard to match.\n Only files that match the wildcard will be copied. JobHadoopCopyFiles.Fields.DestinationFileFolder.Tooltip=Enter here the destination folder to hit.\n If you selected file as source,you can define a file as destination. JobHadoopCopyFiles.Previous.Tooltip=Check this to pass the results of the previous entry to the arguments of this entry.\nBe careful, arguments must be in the same order that arguments\!\n ie \: (1) source folder/file, (2) destination folder/file, (3) wildcard JobHadoopCopyFiles.Settings.Label=Settings JobHadoopCopyFiles.Log.FolderCopiedInfos=Folder copied JobHadoopCopyFiles.Name.Label=Entry name\: JobHadoopCopyFiles.Error.Exception.UnableLoadXML=Unable to load job entry of type ''copyfiles'' from XML node JobHadoopCopyFiles.Name.Default=HDFS Copy files JobHadoopCopyFiles.Log.FolderExists=Folder [{0}] exists\! JobHadoopCopyFiles.Previous.Label=Copy previous results to args JobHadoopCopyFiles.Log.FileExists=file [{0}] exists\! JobHadoopCopyFiles.Tab.AddResultFilesName.Label=Result files name JobHadoopCopyFiles.Error.Exception.UnableLoadRep=Unable to load job entry of type ''copyfiles'' from the repository for id_jobentry\= JobHadoopCopyFiles.Fields.SourceFileFolder.Tooltip=Enter here the file or folder to copy\n If it's a folder, PDI will fetch only if ''Include subfolders'' is checked\! JobHadoopCopyFiles.Log.IgnoringRow=Ignoring row with source or destination is NULL. Source File/folder source \: [{0}], destination file/folder \: [{1}], wildcard \: [{2}] JobHadoopCopyFiles.DestinationFileFolder.Label=File/Folder destination JobHadoopCopyFiles.Log.FileFolderRemovedInfos=File/folder deleted JobHadoopCopyFiles.BrowseFolders.Label=Folder... JobHadoopCopyFiles.Log.ResultFilesName=Result filesname JobHadoopCopyFiles.IncludeSubfolders.Tooltip=Check this if you want to fetch also sub folders\nThis option will work only when the source is a folder. JobHadoopCopyFiles.Wildcard.Label=Wildcard (RegExp) JobHadoopCopyFiles.SourceFileFolder.Label=File/Folder source JobHadoopCopyFiles.AddFileToResult.Tooltip=Add destination files to result files name.\nIt is helpful if you want to attach theses files to an email thanks to send mail job entry. JobHadoopCopyFiles.Fields.SourceFileFolder.Label=File/Folder source JobHadoopCopyFiles.Connection.Error.title=Unable to Connect JobHadoopCopyFiles.Connection.error=You don''t seem to be getting a connection to the Hadoop Cluster. Check the cluster configuration you''re using. ================================================ FILE: kettle-plugins/hdfs/core/src/main/resources/org/pentaho/big/data/kettle/plugins/hdfs/job/messages/messages_ko_KR.properties ================================================ JobCopyFiles.Browse.Label=\uCC3E\uC544\uBCF4\uAE30... JobHadoopCopyFiles.AddFileToResult.Label=\uD30C\uC77C\uC744 \uACB0\uACFC \uD30C\uC77C \uC774\uB984\uC5D0 \uCD94\uAC00 JobHadoopCopyFiles.BrowseFiles.Label =\uD30C\uC77C... JobHadoopCopyFiles.BrowseFolders.Label =\uD3F4\uB354... JobHadoopCopyFiles.CopyEmptyFolders.Label=\uBE48 \uD3F4\uB354 \uBCF5\uC0AC JobHadoopCopyFiles.CreateDestinationFolder.Label=\uB300\uC0C1 \uD3F4\uB354 \uC0DD\uC131 JobHadoopCopyFiles.DestinationFileFolder.Label=\uB300\uC0C1 \uD30C\uC77C/\uD3F4\uB354 JobHadoopCopyFiles.DestinationIsAFile.Label=\uB300\uC0C1\uC740 \uD30C\uC77C JobHadoopCopyFiles.Error.CanNotRemoveFile=\uD30C\uC77C\uC744 \uC0AD\uC81C\uD560 \uC218 \uC5C6\uC2B5\uB2C8\uB2E4 JobHadoopCopyFiles.Fields.DestinationFileFolder.Label=\uB300\uC0C1 \uD30C\uC77C/\uD3F4\uB354 JobHadoopCopyFiles.Fields.Label =\uD30C\uC77C/\uD3F4\uB354: JobHadoopCopyFiles.Fields.SourceFileFolder.Label=\uD30C\uC77C/\uD3F4\uB354 \uC18C\uC2A4 JobHadoopCopyFiles.Fields.Wildcard.Label=\uC640\uC77C\uB4DC\uCE74\uB4DC (\uC815\uADDC\uD45C\uD604\uC2DD) JobHadoopCopyFiles.FileResult.Group.Label=\uACB0\uACFC \uD30C\uC77C \uC774\uB984 JobHadoopCopyFiles.FilenameAdd.Button =\uCD94\uAC00(&A) JobHadoopCopyFiles.FilenameDelete.Button=\uC0AD\uC81C(&D) JobHadoopCopyFiles.FilenameEdit.Button =\uD3B8\uC9D1(&E) JobHadoopCopyFiles.Filetype.All =\uBAA8\uB4E0 \uD30C\uC77C JobHadoopCopyFiles.Log.Error =\uC624\uB958 JobHadoopCopyFiles.Log.Starting =\uC2DC\uC791 ... JobHadoopCopyFiles.Name.Label =Job \uC5D4\uD2B8\uB9AC \uC774\uB984: JobHadoopCopyFiles.Settings.Label =\uC124\uC815 JobHadoopCopyFiles.SourceFileFolder.Label=\uD30C\uC77C/\uD3F4\uB354 \uC18C\uC2A4 JobHadoopCopyFiles.Tab.General.Label =\uC77C\uBC18 JobHadoopCopyFiles.Wildcard.Label =\uC640\uC77C\uB4DC\uCE74\uB4DC (\uC815\uADDC\uD45C\uD604\uC2DD) ================================================ FILE: kettle-plugins/hdfs/core/src/main/resources/org/pentaho/big/data/kettle/plugins/hdfs/trans/messages/messages_en_US.properties ================================================ HadoopFileOutputPlugin.Name=Hadoop file output HadoopFileOutputPlugin.Description=Create files in an HDFS location HadoopFileOutputDialog.DialogTitle=Hadoop file output HadoopFileOutput.MethodNotSupportedException.Message=Method not supported HadoopFileOutputDialog.Filename.Label=Folder/File HadoopFileOutputDialog.Connection.Error.title=Unable to Connect HadoopFileOutputDialog.Connection.error=You don''t seem to be getting a connection to the Hadoop Cluster. Check the cluster configuration you''re using. HadoopFileInputPlugin.Name=Hadoop file input HadoopFileInputPlugin.Description=Process files from an HDFS location HadoopFileInputDialog.DialogTitle=Hadoop file input HadoopFileInputDialog.Environment=Environment HadoopFileInputDialog.FileFolderColumn.Column=File/Folder HadoopFileInputDialog.Connection.Error.title=Unable to Connect HadoopFileInputDialog.Connection.error=You don''t seem to be getting a connection to the Hadoop Cluster. Check the cluster configuration you''re using. #File Tab HadoopFileInput.Injection.ENVIRONMENT=The environment of the selected file/folder. HadoopFileOutput.Injection.FILENAME=The name of the file to write to. HadoopFileOutput.Injection.CREATE_PARENT_FOLDER=This option indicates whether a parent folder should be created for the file when it''s created. HadoopFileOutput.Injection.DO_NOT_CREATE_FILE_AT_STARTUP=This option will not write empty files if no rows are processed. HadoopFileOutput.Injection.FILENAME_IN_FIELD=This option allows you to specify the file name(s) in a field in the input stream. HadoopFileOutput.Injection.FILENAME_FIELD=When "Accept File name from field?" is enabled, this option lets you specify the field that contains the file name(s). HadoopFileOutput.Injection.EXTENSION=This option allows you to specify the extension at the end of the file name (.txt). HadoopFileOutput.Injection.INC_STEPNR_IN_FILENAME=This option will include the copy number before the extension if you run multiple copies of the step (_0). HadoopFileOutput.Injection.INC_PARTNR_IN_FILENAME=This option will include the data partition number in the file name. HadoopFileOutput.Injection.INC_DATE_IN_FILENAME=This option will include the system date in the file name. HadoopFileOutput.Injection.INC_TIME_IN_FILENAME=This option will include the system time in the file name. HadoopFileOutput.Injection.SPECIFY_DATE_FORMAT=This option will allow you to specify the date and time format for the file name. HadoopFileOutput.Injection.DATE_FORMAT=Specify which date & time format you want to go into each file name. HadoopFileOutput.Injection.ADD_TO_RESULT=This option will let you add the file name to the internal file result set. #Content Tab HadoopFileOutput.Injection.APPEND=This option will cause lines to be appended to the specified file. HadoopFileOutput.Injection.SEPARATOR=This option will let you specify the character that will separate the fields in a single line of text. HadoopFileOutput.Injection.ENCLOSURE=This is an optional property that will let you specify the character that will enclose fields. HadoopFileOutput.Injection.FORCE_ENCLOSURE=This option forces all field names to be enclosed with the character specified in the Enclosure property. HadoopFileOutput.Injection.DISABLE_ENCLOSURE_FIX=This option enables backwards compatibility for a bug fix around Date formats that affected enclosures. HadoopFileOutput.Injection.HEADER=This option will include a header row in the text file. HadoopFileOutput.Injection.FOOTER=This option will include a footer row in the text file. HadoopFileOutput.Injection.FORMAT=Specify the line terminator format (DOS/UNIX/CR/None). HadoopFileOutput.Injection.COMPRESSION=This option will let you specify the type of compression to use on the file output. HadoopFileOutput.Injection.ENCODING=This option will let you specify the text file encoding. HadoopFileOutput.Injection.RIGHT_PAD_FIELDS=This option will pad fields to their specified length. HadoopFileOutput.Injection.FAST_DATA_DUMP=This option lets you improve the performance by not including any formatting information. HadoopFileOutput.Injection.SPLIT_EVERY=Split the data every (x) rows into additional output files. HadoopFileOutput.Injection.ADD_ENDING_LINE=This option lets you specify an ending line to the output file. #Fields Tab HadoopFileOutput.Injection.OUTPUT_FIELDS=The fields to include in the output. HadoopFileOutput.Injection.OUTPUT_FIELDNAME=The name of the field. HadoopFileOutput.Injection.OUTPUT_TYPE=This option will let you specify the type of field (string, date, number). HadoopFileOutput.Injection.OUTPUT_FORMAT=The format mask to convert with. HadoopFileOutput.Injection.OUTPUT_LENGTH=This option indicates the length of the field. HadoopFileOutput.Injection.OUTPUT_PRECISION=This option indicates the amount of precision to use on numeric values. HadoopFileOutput.Injection.OUTPUT_CURRENCY=The currenty symbol that is used. HadoopFileOutput.Injection.OUTPUT_DECIMAL=The decimal symbol that is used. HadoopFileOutput.Injection.OUTPUT_GROUP=The grouping symbol that is used. HadoopFileOutput.Injection.OUTPUT_TRIM=The trimming method to apply on the string (none, left, both, right). HadoopFileOutput.Injection.OUTPUT_NULL=The string to insert into the text file if the value of the field is null. ================================================ FILE: kettle-plugins/hdfs/core/src/main/resources/org/pentaho/big/data/kettle/plugins/hdfs/trans/messages/messages_ko_KR.properties ================================================ HadoopFileOutput.MethodNotSupportedException.Message=\uBA54\uC18C\uB4DC\uB97C \uC9C0\uC6D0\uD558\uC9C0 \uC54A\uC2B5\uB2C8\uB2E4 ================================================ FILE: kettle-plugins/hdfs/core/src/main/resources/org/pentaho/big/data/kettle/plugins/hdfs/vfs/messages/messages_en_US.properties ================================================ HadoopVfsFileChooserDialog.openFile=Open File HadoopVfsFileChooserDialog.SaveAs=Save as HadoopVfsFileChooserDialog.FileSystemChoice.Label=Look in HadoopVfsFileChooserDialog.FileSystemChoice.Hadoop.Label=Hadoop HadoopVfsFileChooserDialog.FileSystemChoice.Local.Label=Local HadoopVfsFileChooserDialog.ConnectionGroup.Label=Connection HadoopVfsFileChooserDialog.URL.Label=Server: HadoopVfsFileChooserDialog.Port.Label=Port: HadoopVfsFileChooserDialog.UserID.Label=User ID: HadoopVfsFileChooserDialog.Password.Label=Password: HadoopVfsFileChooserDialog.ConnectionButton.Label=Connect HadoopVfsFileChooserDialog.warning=Warning HadoopVfsFileChooserDialog.noWriteSupport=This file system does not support write operations. HadoopVfsFileChooserDialog.error=Error HadoopVfsFileChooserDialog.FileSystem.error=A file system error occurred. See log for details. HadoopVfsFileChooserDialog.Connection.Error.title=Unable to Connect HadoopVfsFileChooserDialog.Connection.error=You don''t seem to be getting a connection to the Hadoop Cluster. Check the cluster configuration you''re using. HadoopVfsFileChooserDialog.Connection.schemeError=The file system scheme is not supported by the {0} Hadoop configuration. ================================================ FILE: kettle-plugins/hdfs/core/src/main/resources/org/pentaho/big/data/kettle/plugins/hdfs/vfs/messages/messages_ko_KR.properties ================================================ HadoopVfsFileChooserDialog.Connection.error=HDFS \uC11C\uBC84\uC5D0 \uC5F0\uACB0\uD560 \uC218 \uC5C6\uC2B5\uB2C8\uB2E4. HadoopVfsFileChooserDialog.ConnectionButton.Label=\uC5F0\uACB0 HadoopVfsFileChooserDialog.ConnectionGroup.Label=\uC5F0\uACB0 HadoopVfsFileChooserDialog.FileSystem.error=\uD30C\uC77C \uC2DC\uC2A4\uD15C \uC624\uB958\uAC00 \uBC1C\uC0DD\uD558\uC600\uC2B5\uB2C8\uB2E4. \uC790\uC138\uD55C \uB0B4\uC6A9\uC740 \uB85C\uADF8\uB97C \uCC38\uACE0\uD558\uC2ED\uC2DC\uC624. HadoopVfsFileChooserDialog.FileSystemChoice.Local.Label=\uB85C\uCEEC HadoopVfsFileChooserDialog.Password.Label =\uC554\uD638: HadoopVfsFileChooserDialog.Port.Label =\uD3EC\uD2B8: HadoopVfsFileChooserDialog.SaveAs =\uB2E4\uB978 \uC774\uB984\uC73C\uB85C \uC800\uC7A5 HadoopVfsFileChooserDialog.URL.Label =\uC11C\uBC84: HadoopVfsFileChooserDialog.UserID.Label =\uC0AC\uC6A9\uC790 ID: HadoopVfsFileChooserDialog.error =\uC624\uB958 HadoopVfsFileChooserDialog.noWriteSupport =\uD30C\uC77C \uC2DC\uC2A4\uD15C\uC774 \uC4F0\uAE30 \uC5F0\uC0B0\uC744 \uC9C0\uC6D0\uD558\uC9C0 \uC54A\uC2B5\uB2C8\uB2E4. HadoopVfsFileChooserDialog.openFile =\uD30C\uC77C \uC5F4\uAE30 HadoopVfsFileChooserDialog.warning =\uACBD\uACE0 ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/job/JobEntryHadoopCopyFilesLoadSaveTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.job; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.big.data.impl.cluster.NamedClusterImpl; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.job.entry.loadSave.LoadSaveTester; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; public class JobEntryHadoopCopyFilesLoadSaveTest { private NamedClusterService namedClusterService; private RuntimeTestActionService runtimeTestActionService; private RuntimeTester runtimeTester; @Before public void setup() throws ClusterInitializationException { namedClusterService = mock( NamedClusterService.class ); when( namedClusterService.getClusterTemplate() ).thenReturn( new NamedClusterImpl() ); mock( NamedClusterServiceLocator.class ); runtimeTester = mock( RuntimeTester.class ); runtimeTestActionService = mock( RuntimeTestActionService.class ); new JobEntryHadoopCopyFiles( namedClusterService, runtimeTestActionService, runtimeTester ); } @Test public void testLoadSave() throws KettleException { List commonAttributes = Arrays.asList( "copy_empty_folders", "arg_from_previous", "overwrite_files", "include_subfolders", "remove_source_files", "add_result_filesname", "destination_is_a_file", "create_destination_folder" ); Map getterMap = new HashMap(); getterMap.put( "copy_empty_folders", "isCopyEmptyFolders" ); getterMap.put( "arg_from_previous", "isArgFromPrevious" ); getterMap.put( "overwrite_files", "isoverwrite_files" ); getterMap.put( "include_subfolders", "isIncludeSubfolders" ); getterMap.put( "remove_source_files", "isRemoveSourceFiles" ); getterMap.put( "add_result_filesname", "isAddresultfilesname" ); getterMap.put( "destination_is_a_file", "isDestinationIsAFile" ); getterMap.put( "create_destination_folder", "isCreateDestinationFolder" ); Map setterMap = new HashMap(); setterMap.put( "copy_empty_folders", "setCopyEmptyFolders" ); setterMap.put( "arg_from_previous", "setArgFromPrevious" ); setterMap.put( "overwrite_files", "setoverwrite_files" ); setterMap.put( "include_subfolders", "setIncludeSubfolders" ); setterMap.put( "remove_source_files", "setRemoveSourceFiles" ); setterMap.put( "add_result_filesname", "setAddresultfilesname" ); setterMap.put( "destination_is_a_file", "setDestinationIsAFile" ); setterMap.put( "create_destination_folder", "setCreateDestinationFolder" ); LoadSaveTester tester = new LoadSaveTester( JobEntryHadoopCopyFiles.class, commonAttributes, getterMap, setterMap ) { @Override public JobEntryHadoopCopyFiles createMeta() { return new JobEntryHadoopCopyFiles( namedClusterService, runtimeTestActionService, runtimeTester ); } }; tester.testSerialization(); } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/job/JobEntryHadoopCopyFilesTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.job; import org.junit.Before; import org.junit.Test; import org.pentaho.di.job.entries.copyfiles.JobEntryCopyFiles; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.core.hadoop.HadoopSpoonPlugin; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNotNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; /** * Created by bryan on 11/23/15. */ public class JobEntryHadoopCopyFilesTest { private JobEntryHadoopCopyFiles jobEntryHadoopCopyFiles; private String testName; private NamedClusterService namedClusterManager; private String testUrl; private String testNcName; private IMetaStore metaStore; private Map mappings; private NamedCluster namedCluster; @Before public void setup() { testName = "testName"; namedClusterManager = mock( NamedClusterService.class ); jobEntryHadoopCopyFiles = new JobEntryHadoopCopyFiles( namedClusterManager, mock( RuntimeTestActionService.class ), mock( RuntimeTester.class ) ); jobEntryHadoopCopyFiles.setName( testName ); testUrl = "testUrl"; testNcName = "testNcName"; metaStore = mock( IMetaStore.class ); mappings = mock( Map.class ); namedCluster = mock( NamedCluster.class ); } @Test public void testLoadUrlNullNcName() { when( namedClusterManager.getNamedClusterByName( testNcName, metaStore ) ).thenReturn( null ); String loadURL = jobEntryHadoopCopyFiles.loadURL( testUrl, null, metaStore, mappings ); assertNotNull( loadURL ); verifyNoMoreInteractions( mappings ); } @Test public void testLoadUrlNull() { when( namedClusterManager.getNamedClusterByName( testNcName, metaStore ) ).thenReturn( null ); String loadURL = jobEntryHadoopCopyFiles.loadURL( null, null, metaStore, mappings ); assertNull( loadURL ); verifyNoMoreInteractions( mappings ); } @Test public void testLoadUrlNotNullForNotCluster() { testNcName = "LOCAL-SOURCE-FILE-1"; when( namedClusterManager.getNamedClusterByName( testNcName, metaStore ) ).thenReturn( null ); String loadURL = jobEntryHadoopCopyFiles.loadURL( testUrl, testNcName, metaStore, mappings ); assertNotNull( loadURL ); assertEquals( testUrl, loadURL ); verify( mappings ).put( testUrl, testNcName ); } @Test public void testLoadUrlMapRNull() { when( namedClusterManager.getNamedClusterByName( testNcName, metaStore ) ).thenReturn( namedCluster ); when( namedCluster.isMapr() ).thenReturn( true ); assertNull( jobEntryHadoopCopyFiles.loadURL( testUrl, testNcName, metaStore, mappings ) ); verifyNoMoreInteractions( mappings ); } @Test public void testLoadUrlMapRNotNullNoPrefix() { when( namedClusterManager.getNamedClusterByName( testNcName, metaStore ) ).thenReturn( namedCluster ); when( namedCluster.isMapr() ).thenReturn( true ); String testNewUrl = "testNewUrl"; when( namedCluster.processURLsubstitution( testUrl, metaStore, jobEntryHadoopCopyFiles.getVariables() ) ) .thenReturn( testNewUrl ); assertEquals( testNewUrl, jobEntryHadoopCopyFiles.loadURL( testUrl, testNcName, metaStore, mappings ) ); verify( mappings ).put( testNewUrl, testNcName ); assertEquals( testUrl, jobEntryHadoopCopyFiles.fileFolderUrlMappings.get( testNewUrl ) ); } @Test public void testLoadUrlMapRNotNullPrefix() { when( namedClusterManager.getNamedClusterByName( testNcName, metaStore ) ).thenReturn( namedCluster ); when( namedCluster.isMapr() ).thenReturn( true ); String testNewUrl = HadoopSpoonPlugin.MAPRFS_SCHEME + "://" + "testNewUrl"; when( namedCluster.processURLsubstitution( testUrl, metaStore, jobEntryHadoopCopyFiles.getVariables() ) ) .thenReturn( testNewUrl ); assertEquals( testNewUrl, jobEntryHadoopCopyFiles.loadURL( testUrl, testNcName, metaStore, mappings ) ); verify( mappings ).put( testNewUrl, testNcName ); assertEquals( testUrl, jobEntryHadoopCopyFiles.fileFolderUrlMappings.get( testNewUrl ) ); } @Test public void testLoadUrlNotMapR() { when( namedClusterManager.getNamedClusterByName( testNcName, metaStore ) ).thenReturn( namedCluster ); when( namedCluster.isMapr() ).thenReturn( false ); String testNewUrl = HadoopSpoonPlugin.HDFS_SCHEME + "://" + "testNewUrl"; when( namedCluster.processURLsubstitution( testUrl, metaStore, jobEntryHadoopCopyFiles.getVariables() ) ) .thenReturn( testNewUrl ); assertEquals( testNewUrl, jobEntryHadoopCopyFiles.loadURL( testUrl, testNcName, metaStore, mappings ) ); verify( mappings ).put( testNewUrl, testNcName ); assertEquals( testUrl, jobEntryHadoopCopyFiles.fileFolderUrlMappings.get( testNewUrl ) ); } @Test public void testLoadUrlHdfsEMPTY_SOURCE_URL() { when( namedClusterManager.getNamedClusterByName( testNcName, metaStore ) ).thenReturn( namedCluster ); when( namedCluster.isMapr() ).thenReturn( false ); String testNewUrl = HadoopSpoonPlugin.HDFS_SCHEME + "://" + "testNewUrl"; when( namedCluster.processURLsubstitution( testUrl, metaStore, jobEntryHadoopCopyFiles.getVariables() ) ) .thenReturn( testNewUrl ); String prefixUrlSource = JobEntryCopyFiles.SOURCE_URL + 8 + "-"; String testPrefixSourceUrl = prefixUrlSource + testUrl; String expectedPrefixSourceLoadUrl = prefixUrlSource + testNewUrl; assertEquals( expectedPrefixSourceLoadUrl, jobEntryHadoopCopyFiles.loadURL( testPrefixSourceUrl, testNcName, metaStore, mappings ) ); verify( mappings ).put( expectedPrefixSourceLoadUrl, testNcName ); assertEquals( testPrefixSourceUrl, jobEntryHadoopCopyFiles.fileFolderUrlMappings.get( expectedPrefixSourceLoadUrl ) ); } @Test public void testLoadUrlHdfsEMPTY_DEST_URL() { when( namedClusterManager.getNamedClusterByName( testNcName, metaStore ) ).thenReturn( namedCluster ); when( namedCluster.isMapr() ).thenReturn( false ); String testNewUrl = HadoopSpoonPlugin.HDFS_SCHEME + "://" + "testNewUrl"; when( namedCluster.processURLsubstitution( testUrl, metaStore, jobEntryHadoopCopyFiles.getVariables() ) ) .thenReturn( testNewUrl ); String prefixUrlDest = JobEntryCopyFiles.DEST_URL + 5 + "-"; String testPrefixDestUrl = prefixUrlDest + testUrl; String expectedPrefixDestLoadUrl = prefixUrlDest + testNewUrl; assertEquals( expectedPrefixDestLoadUrl, jobEntryHadoopCopyFiles.loadURL( testPrefixDestUrl, testNcName, metaStore, mappings ) ); verify( mappings ).put( expectedPrefixDestLoadUrl, testNcName ); assertEquals( testPrefixDestUrl, jobEntryHadoopCopyFiles.fileFolderUrlMappings.get( expectedPrefixDestLoadUrl ) ); } @Test public void testSaveUrlMappingsKeyMisses() { String testUrl = "/src/path/"; jobEntryHadoopCopyFiles.fileFolderUrlMappings.clear(); // populating with other values jobEntryHadoopCopyFiles.fileFolderUrlMappings.put( "KeyA", "ValueA" ); jobEntryHadoopCopyFiles.fileFolderUrlMappings.put( "KeyB", "ValueB" ); jobEntryHadoopCopyFiles.fileFolderUrlMappings.put( "/src", "ValueC" ); jobEntryHadoopCopyFiles.fileFolderUrlMappings.put( "/src/path/anotherPath", "ValueD" ); assertEquals( testUrl, jobEntryHadoopCopyFiles.saveURL( testUrl, testNcName, metaStore, mappings ) ); assertNull( testUrl, jobEntryHadoopCopyFiles.saveURL( null, testNcName, metaStore, mappings ) ); } @Test public void testSaveUrlMappingsKeyHits() { String testUrl = "/src/path/"; String testUrlSubstituted = "hdfs://someHostname/src/path"; // populating with other values jobEntryHadoopCopyFiles.fileFolderUrlMappings.put( "KeyA", "ValueA" ); jobEntryHadoopCopyFiles.fileFolderUrlMappings.put( "KeyB", "ValueB" ); jobEntryHadoopCopyFiles.fileFolderUrlMappings.put( "/src", "ValueC" ); jobEntryHadoopCopyFiles.fileFolderUrlMappings.put( "/src/path/anotherPath", "ValueD" ); jobEntryHadoopCopyFiles.fileFolderUrlMappings.put( testUrlSubstituted, testUrl ); assertEquals( testUrl, jobEntryHadoopCopyFiles.saveURL( testUrl, testNcName, metaStore, mappings ) ); } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/HadoopFileInputDialogTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class HadoopFileInputDialogTest { @Test public void getFriendlyURIsUnsecure() { HadoopFileInputDialog dialog = mock( HadoopFileInputDialog.class ); when( dialog.getFriendlyURIs( any() ) ).thenCallRealMethod(); String[] files = new String[] { "hdfs://clouderaserver01.pentaho.com:8020/wordcount/parse/weblogsA.txt", "hdfs://clouderaserver02.pentaho.com:8020/wordcount/parse/weblogsB.txt", "hdfs://clouderaserver03.pentaho.com:8020/wordcount/parse/weblogsC.txt" }; String[] friendly = dialog.getFriendlyURIs( files ); assertEquals( files[0], friendly[0] ); assertEquals( files[1], friendly[1] ); assertEquals( files[2], friendly[2] ); } @Test public void getFriendlyURIsSecure() { HadoopFileInputDialog dialog = mock( HadoopFileInputDialog.class ); when( dialog.getFriendlyURIs( any() ) ).thenCallRealMethod(); String[] files = new String[] { "hdfs://user01:pwd01@clouderaserver01.pentaho.com:8020/wordcount/parse/weblogsA.txt", "hdfs://user02@clouderaserver02.pentaho.com:8020/wordcount/parse/weblogsB.txt", "hdfs://user03:pwd03@clouderaserver03.pentaho.com:8020/wordcount/parse/weblogsC.txt" }; String[] friendly = dialog.getFriendlyURIs( files ); assertEquals( "hdfs://user01:***@clouderaserver01.pentaho.com:8020/wordcount/parse/weblogsA.txt", friendly[0] ); assertEquals( files[1], friendly[1] ); assertEquals( "hdfs://user03:***@clouderaserver03.pentaho.com:8020/wordcount/parse/weblogsC.txt", friendly[2] ); } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/HadoopFileInputMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans; import org.apache.commons.vfs2.provider.URLFileName; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jdom.Document; import org.jdom.Element; import org.jdom.input.SAXBuilder; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.fileinput.FileInputList; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.file.BaseFileField; import org.pentaho.di.trans.steps.fileinput.text.TextFileFilter; import org.pentaho.di.trans.steps.named.cluster.NamedClusterEmbedManager; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystemLocator; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; import java.net.URI; import java.net.URL; import java.util.Collections; import java.util.HashMap; import java.util.Locale; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.anyInt; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * @author Vasilina Terehova */ public class HadoopFileInputMetaTest { public static final String TEST_CLUSTER_NAME = "TEST-CLUSTER-NAME"; public static final String SAMPLE_HADOOP_FILE_INPUT_STEP = "sample-hadoop-file-input-step.xml"; public static final String TEST_FILE_NAME = "test-file-name"; public static final String TEST_FOLDER_NAME = "test-folder-name"; private static Logger logger = LogManager.getLogger( HadoopFileInputMetaTest.class ); // for message resolution private NamedClusterService namedClusterService; private HadoopFileSystemLocator hadoopFileSystemLocator; @Before public void setUp() throws Exception { namedClusterService = mock( NamedClusterService.class ); hadoopFileSystemLocator = mock( HadoopFileSystemLocator.class ); } /** * BACKLOG-7972 - Hadoop File Output: Hadoop Clusters dropdown doesn't preserve selected cluster after reopen a * transformation after changing signature of loadSource in , saveSource in HadoopFileOutputMeta wasn't called * * @throws Exception */ @Test public void testSaveSourceCalledFromGetXml() throws Exception { HadoopFileInputMeta hadoopFileInputMeta = new HadoopFileInputMeta( namedClusterService, hadoopFileSystemLocator ); hadoopFileInputMeta.allocateFiles( 1 ); //create spy to check whether saveSource now is called HadoopFileInputMeta spy = initHadoopMetaInput( hadoopFileInputMeta ); HashMap mappings = new HashMap<>(); mappings.put( TEST_FILE_NAME, HadoopFileOutputMetaTest.TEST_CLUSTER_NAME ); spy.setNamedClusterURLMapping( mappings ); StepMeta parentStepMeta = mock( StepMeta.class ); TransMeta parentTransMeta = mock( TransMeta.class ); when( parentStepMeta.getParentTransMeta() ).thenReturn( parentTransMeta ); NamedClusterEmbedManager embedManager = mock( NamedClusterEmbedManager.class ); when( parentTransMeta.getNamedClusterEmbedManager() ).thenReturn( embedManager ); spy.setParentStepMeta( parentStepMeta ); String xml = spy.getXML(); Document hadoopOutputMetaStep = HadoopFileOutputMetaTest.getDocumentFromString( xml, new SAXBuilder() ); Element fileElement = HadoopFileOutputMetaTest.getChildElementByTagName( hadoopOutputMetaStep.getRootElement(), "file" ); //getting from file node cluster attribute value Element clusterNameElement = HadoopFileOutputMetaTest.getChildElementByTagName( fileElement, HadoopFileInputMeta.SOURCE_CONFIGURATION_NAME ); assertEquals( TEST_CLUSTER_NAME, clusterNameElement.getValue() ); //check that saveSource is called from TextFileOutputMeta verify( spy, times( 1 ) ).saveSource( any( StringBuilder.class ), any( String.class ) ); verify( embedManager ).registerUrl( "test-file-name" ); } private HadoopFileInputMeta initHadoopMetaInput( HadoopFileInputMeta hadoopFileInputMeta ) { HadoopFileInputMeta spy = Mockito.spy( hadoopFileInputMeta ); when( spy.getFileName() ).thenReturn( new String[] {} ); spy.setFileName( new String[] { TEST_FILE_NAME } ); spy.setFilter( new TextFileFilter[] {} ); spy.inputFields = new BaseFileField[] {}; spy.inputFiles.fileMask = new String[] { TEST_FILE_NAME }; spy.inputFiles.fileRequired = new String[] { TEST_FILE_NAME }; spy.inputFiles.includeSubFolders = new String[] { TEST_FOLDER_NAME }; spy.content.dateFormatLocale = Locale.getDefault(); return spy; } public Node loadNodeFromXml( String fileName ) throws Exception { URL resource = getClass().getClassLoader().getResource( fileName ); if ( resource == null ) { logger.error( "no file " + fileName + " found in resources" ); throw new IllegalArgumentException( "no file " + fileName + " found in resources" ); } else { return XMLHandler.getSubNode( XMLHandler.loadXMLFile( resource ), "entry" ); } } @Test public void testLoadSourceCalledFromLoadXml() throws Exception { HadoopFileInputMeta hadoopFileInputMeta = new HadoopFileInputMeta( namedClusterService, hadoopFileSystemLocator ); //set required data for step - empty HadoopFileInputMeta spy = Mockito.spy( hadoopFileInputMeta ); Node node = loadNodeFromXml( SAMPLE_HADOOP_FILE_INPUT_STEP ); //create spy to check whether saveSource now is called IMetaStore metaStore = mock( IMetaStore.class ); spy.loadXML( node, Collections.emptyList(), metaStore ); assertEquals( TEST_CLUSTER_NAME, hadoopFileInputMeta.getNamedClusterURLMapping().get( TEST_FILE_NAME ) ); verify( spy, times( 1 ) ).loadSource( any( Node.class ), any( Node.class ), anyInt(), any( IMetaStore.class ) ); } @Test public void testLoadSourceRepForUrlRefresh() throws Exception { final String URL_FROM_CLUSTER = "urlFromCluster"; IMetaStore mockMetaStore = mock( IMetaStore.class ); NamedCluster mockNamedCluster = mock( NamedCluster.class ); when( mockNamedCluster.processURLsubstitution( any(), eq( mockMetaStore ), any() ) ).thenReturn( URL_FROM_CLUSTER ); when( namedClusterService.getNamedClusterByName( TEST_CLUSTER_NAME, mockMetaStore ) ).thenReturn( mockNamedCluster ); Repository mockRep = mock( Repository.class ); when( mockRep.getJobEntryAttributeString( any(), eq( 0 ), eq( "source_configuration_name" ) ) ).thenReturn( TEST_CLUSTER_NAME ); HadoopFileInputMeta hadoopFileInputMeta = new HadoopFileInputMeta( namedClusterService, hadoopFileSystemLocator ); when( mockRep.getStepAttributeString( any(), eq( 0 ), eq( "file_name" ) ) ).thenReturn( URL_FROM_CLUSTER ); assertEquals( URL_FROM_CLUSTER, hadoopFileInputMeta.loadSourceRep( mockRep, null, 0, mockMetaStore ) ); } @Test public void testGetFileInputList() { KettleLogStore.init(); final String URL_FROM_CLUSTER = "urlFromCluster"; StepMeta parentStepMeta = mock( StepMeta.class ); IMetaStore mockMetaStore = mock( IMetaStore.class ); NamedCluster mockNamedCluster = mock( NamedCluster.class ); TransMeta parentTransMeta = mock( TransMeta.class ); when( parentStepMeta.getParentTransMeta() ).thenReturn( parentTransMeta ); when( parentTransMeta.getMetaStore() ).thenReturn( mockMetaStore ); when( mockNamedCluster.processURLsubstitution( any(), eq( mockMetaStore ), any() ) ).thenReturn( URL_FROM_CLUSTER ); when( namedClusterService.getNamedClusterByName( TEST_CLUSTER_NAME, mockMetaStore ) ).thenReturn( mockNamedCluster ); HadoopFileInputMeta hadoopFileInputMetaSpy = initHadoopMetaInput( new HadoopFileInputMeta( namedClusterService, hadoopFileSystemLocator ) ); hadoopFileInputMetaSpy.environment = new String[] { TEST_CLUSTER_NAME }; hadoopFileInputMetaSpy.setParentStepMeta( parentStepMeta ); doReturn( new FileInputList() ).when( hadoopFileInputMetaSpy ).createFileList( any( VariableSpace.class ) ); hadoopFileInputMetaSpy.getFileInputList( new Variables() ); assertEquals( "urlFromCluster", hadoopFileInputMetaSpy.inputFiles.fileName[0] ); } @Test public void testGetUrl() { final HadoopFileInputMeta meta = Mockito.mock( HadoopFileInputMeta.class ); final URLFileName mockFileName = Mockito.mock( URLFileName.class ); final String scheme = "hdfs"; final String hostName = "svqxbdcn6cdh512n1.pentahoqa.com"; final String rootUrl = scheme + "://" + hostName + ":8020/"; final String path = "wordcount/input"; final String url = rootUrl + path; Mockito.doReturn( hostName ).when( mockFileName ).getHostName(); Mockito.doReturn( scheme ).when( mockFileName ).getScheme(); Mockito.doReturn( mockFileName ).when( meta ).getUrlFileName( url ); Mockito.doReturn( rootUrl ).when( mockFileName ).getRootURI(); Mockito.doCallRealMethod().when( meta ).getUrlHostName( url ); Mockito.doCallRealMethod().when( meta ).getUrlPath( url ); Assert.assertEquals( hostName, meta.getUrlHostName( url ) ); Assert.assertEquals( "/" + path, meta.getUrlPath( url ) ); } @Test public void testEncryption() throws Exception { KettleEnvironment.init(); HadoopFileInputMeta meta = new HadoopFileInputMeta(); String url = "hdfs://user:password@myhost:8020/myfile"; String encrypted = meta.encryptDecryptPassword( url, HadoopFileInputMeta.EncryptDirection.ENCRYPT ); assertTrue( !encrypted.contains( "password" ) ); assertEquals( url, meta.encryptDecryptPassword( encrypted, HadoopFileInputMeta.EncryptDirection.DECRYPT ) ); } @Test public void testNoPassword() throws Exception { KettleEnvironment.init(); HadoopFileInputMeta meta = new HadoopFileInputMeta(); String url = "hdfs://user@myhost:8020/myfile"; String encrypted = meta.encryptDecryptPassword( url, HadoopFileInputMeta.EncryptDirection.ENCRYPT ); assertEquals( url, meta.encryptDecryptPassword( encrypted, HadoopFileInputMeta.EncryptDirection.DECRYPT ) ); } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/HadoopFileOutputDialogTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans; import org.apache.commons.vfs2.VFS; import org.apache.commons.vfs2.impl.StandardFileSystemManager; import org.apache.commons.vfs2.provider.UriParser; import org.eclipse.swt.custom.CCombo; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.mockito.stubbing.Answer; import org.pentaho.di.core.Const; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.any; import static org.mockito.internal.verification.VerificationModeFactory.times; import static org.mockito.ArgumentMatchers.eq; /** * Created by bryan on 11/23/15. */ @RunWith( MockitoJUnitRunner.class ) public class HadoopFileOutputDialogTest { private static final String HDFS_PREFIX = "hdfs"; private static final String MY_HOST_URL = "//myhost:8020"; private StandardFileSystemManager fsm; private MockedStatic uriParserMockedStatic; private MockedStatic vfsMockedStatic; @Before public void setUp() throws Exception { uriParserMockedStatic = Mockito.mockStatic( UriParser.class ); vfsMockedStatic = Mockito.mockStatic( VFS.class ); fsm = mock( StandardFileSystemManager.class ); vfsMockedStatic.when( VFS::getManager ).thenReturn( fsm ); } @After public void cleanup() { vfsMockedStatic.close(); uriParserMockedStatic.close(); Mockito.validateMockitoUsage(); } @Test public void testGetUrlPathHdfsPrefix() { String prefix = HDFS_PREFIX; String pathBase = MY_HOST_URL; String expected = "/path/to/file"; String fullPath = prefix + ":" + pathBase + expected; buildExtractSchemeMocks( prefix, fullPath, pathBase + expected ); assertEquals( expected, HadoopFileOutputDialog.getUrlPath( fullPath ) ); } @Test public void testGetUrlPathMapRPRefix() { String prefix = "maprfs"; String pathBase = "//"; String expected = "/path/to/file"; String fullPath = prefix + ":" + pathBase + expected; buildExtractSchemeMocks( prefix, fullPath, pathBase + expected ); assertEquals( expected, HadoopFileOutputDialog.getUrlPath( fullPath ) ); } @Test public void testGetUrlPathSpecialPrefix() { String prefix = "mySpecialPrefix"; String pathBase = "//host"; String expected = "/path/to/file"; String fullPath = prefix + ":" + pathBase + expected; buildExtractSchemeMocks( prefix, fullPath, pathBase + expected ); assertEquals( expected, HadoopFileOutputDialog.getUrlPath( fullPath ) ); } @Test public void testGetUrlPathNoPrefix() { String expected = "/path/to/file"; assertEquals( expected, HadoopFileOutputDialog.getUrlPath( expected ) ); } @Test public void testGetUrlPathVariablePrefix() { String expected = "${myTestVar}"; assertEquals( expected, HadoopFileOutputDialog.getUrlPath( expected ) ); } @Test public void testGetUrlPathRootPath() { String prefix = HDFS_PREFIX; String pathBase = MY_HOST_URL; String expected = "/"; String fullPath = prefix + ":" + pathBase + expected; buildExtractSchemeMocks( prefix, fullPath, pathBase + expected ); assertEquals( expected, HadoopFileOutputDialog.getUrlPath( fullPath ) ); } @Test public void testGetUrlPathRootPathWithoutSlash() { String prefix = HDFS_PREFIX; String pathBase = MY_HOST_URL; String expected = "/"; String fullPath = prefix + ":" + pathBase; buildExtractSchemeMocks( prefix, fullPath, pathBase ); assertEquals( expected, HadoopFileOutputDialog.getUrlPath( fullPath ) ); } @Test public void testFillWithSupportedDateFormats() { HadoopFileOutputDialog dialog = mock( HadoopFileOutputDialog.class ); CCombo combo = mock( CCombo.class ); String[] dates = Const.getDateFormats(); assertEquals( 20, dates.length ); // currently there are 20 date formats, 10 of which contain ':' characters which are illegal in hadoop filenames // if the formats returned change, the numbers on this test should be adjusted doCallRealMethod().when( dialog ).fillWithSupportedDateFormats( any(), any() ); dialog.fillWithSupportedDateFormats( combo, dates ); verify( combo, times( 10 ) ).add( any() ); } private Answer buildSchemeAnswer( String prefix, String buildPath ) { return invocation -> { Object[] args = invocation.getArguments(); ( (StringBuilder) args[2] ).append( buildPath ); return prefix; }; } private void buildExtractSchemeMocks( String prefix, String fullPath, String pathWithoutPrefix ) { uriParserMockedStatic.when( () -> UriParser.extractScheme( any( String[].class ), eq( fullPath ) ) ).thenReturn( prefix ); uriParserMockedStatic.when( () -> UriParser.extractScheme( any( String[].class ), eq( fullPath ), any( StringBuilder.class ) ) ).thenAnswer( buildSchemeAnswer( prefix, pathWithoutPrefix ) ); } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/HadoopFileOutputMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.filter.ElementFilter; import org.jdom.input.SAXBuilder; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.metastore.MetaStoreConst; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.steps.textfileoutput.TextFileField; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.w3c.dom.Node; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.URL; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.AdditionalMatchers.or; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.times; /** * Created by bryan on 11/23/15. */ public class HadoopFileOutputMetaTest { public static final String TEST_CLUSTER_NAME = "TEST-CLUSTER-NAME"; public static final String SAMPLE_HADOOP_FILE_OUTPUT_STEP = "sample-hadoop-file-output-step.xml"; public static final String ENTRY_TAG_NAME = "entry"; public static final String EMBEDDED_XML = "embed"; public static final String NAMED_CLUSTER_TAG = "NamedCluster"; private static final Logger logger = LogManager.getLogger( HadoopFileOutputMetaTest.class ); // for message resolution private NamedClusterService namedClusterService; private RuntimeTestActionService runtimeTestActionService; private RuntimeTester runtimeTester; @Before public void setUp() throws Exception { namedClusterService = mock( NamedClusterService.class ); runtimeTestActionService = mock( RuntimeTestActionService.class ); runtimeTester = mock( RuntimeTester.class ); MetaStoreConst.disableMetaStore = false; } @Test public void testProcessedUrl() { String sourceConfigurationName = "scName"; String desiredUrl = "desiredUrl"; String url = "url"; HadoopFileOutputMeta hadoopFileOutputMeta = new HadoopFileOutputMeta( namedClusterService, runtimeTestActionService, runtimeTester ); IMetaStore metaStore = mock( IMetaStore.class ); assertTrue( null == hadoopFileOutputMeta.getProcessedUrl( metaStore, null ) ); hadoopFileOutputMeta.setSourceConfigurationName( sourceConfigurationName ); NamedCluster nc = mock( NamedCluster.class ); when( namedClusterService.getNamedClusterByName( eq( sourceConfigurationName ), any()) ) .thenReturn( null ); assertEquals( url, hadoopFileOutputMeta.getProcessedUrl( metaStore, url ) ); when( namedClusterService.getNamedClusterByName( eq( sourceConfigurationName ), any()) ) .thenReturn( nc ); when( nc.processURLsubstitution( eq( url ), any(), any()) ) .thenReturn( desiredUrl ); assertEquals( desiredUrl, hadoopFileOutputMeta.getProcessedUrl( metaStore, url ) ); } @Test public void testProcessedUrlUsingEmbeddedCluster() { String desiredUrl = "desiredUrl"; String url = "url"; HadoopFileOutputMeta hadoopFileOutputMeta = new HadoopFileOutputMeta( namedClusterService, runtimeTestActionService, runtimeTester ); NamedCluster nc = mock( NamedCluster.class ); NamedCluster nc2 = mock( NamedCluster.class ); MetaStoreConst.disableMetaStore = true; when( namedClusterService.getClusterTemplate() ).thenReturn( nc ); when( nc.fromXmlForEmbed( any() ) ).thenReturn( nc2 ); when( nc2.processURLsubstitution( eq( url ), any(), any() ) ).thenReturn( desiredUrl ); assertEquals( desiredUrl, hadoopFileOutputMeta.getProcessedUrl( null, url ) ); } /** * BACKLOG-7972 - Hadoop File Output: Hadoop Clusters dropdown doesn't preserve selected cluster after reopen a * transformation after changing signature of loadSource in , saveSource in HadoopFileOutputMeta wasn't called * * @throws Exception */ @Test public void testSaveSourceCalledFromGetXml() throws Exception { HadoopFileOutputMeta hadoopFileOutputMeta = new HadoopFileOutputMeta( namedClusterService, runtimeTestActionService, runtimeTester ); hadoopFileOutputMeta.setSourceConfigurationName( TEST_CLUSTER_NAME ); //set required data for step - empty hadoopFileOutputMeta.setOutputFields( new TextFileField[] {} ); //create spy to check whether saveSource now is called HadoopFileOutputMeta spy = Mockito.spy( hadoopFileOutputMeta ); //getting from structure file node Document hadoopOutputMetaStep = getDocumentFromString( spy.getXML(), new SAXBuilder() ); Element fileElement = getChildElementByTagName( hadoopOutputMetaStep.getRootElement(), "file" ); //getting from file node cluster attribute value Element clusterNameElement = getChildElementByTagName( fileElement, HadoopFileInputMeta.SOURCE_CONFIGURATION_NAME ); assertEquals( TEST_CLUSTER_NAME, clusterNameElement.getValue() ); //check that saveSource is called from TextFileOutputMeta verify( spy, times( 1 ) ).saveSource( any( StringBuilder.class ), or( any( String.class ), isNull() ) ); } public Node getChildElementByTagName( String fileName ) throws Exception { URL resource = getClass().getClassLoader().getResource( fileName ); if ( resource == null ) { logger.error( "no file " + fileName + " found in resources" ); throw new IllegalArgumentException( "no file " + fileName + " found in resources" ); } else { return XMLHandler.getSubNode( XMLHandler.loadXMLFile( resource ), "entry" ); } } public static Element getChildElementByTagName( Element element, String tagName ) { return (Element) element.getContent( new ElementFilter( tagName ) ).get( 0 ); } @Test public void testLoadSourceCalledFromReadData() throws Exception { HadoopFileOutputMeta hadoopFileOutputMeta = new HadoopFileOutputMeta( namedClusterService, runtimeTestActionService, runtimeTester ); hadoopFileOutputMeta.setSourceConfigurationName( TEST_CLUSTER_NAME ); //set required data for step - empty hadoopFileOutputMeta.setOutputFields( new TextFileField[] {} ); HadoopFileOutputMeta spy = Mockito.spy( hadoopFileOutputMeta ); Node node = getChildElementByTagName( SAMPLE_HADOOP_FILE_OUTPUT_STEP ); //create spy to check whether saveSource now is called from readData spy.readData( node ); assertEquals( TEST_CLUSTER_NAME, hadoopFileOutputMeta.getSourceConfigurationName() ); verify( spy, times( 1 ) ).loadSource( any( Node.class ), or( any( IMetaStore.class ), isNull() ) ); } @Test public void testLoadSourceRepForUrlRefresh() throws Exception { final String URL_FROM_CLUSTER = "urlFromCluster"; IMetaStore mockMetaStore = mock( IMetaStore.class ); NamedCluster mockNamedCluster = mock( NamedCluster.class ); when( mockNamedCluster.processURLsubstitution( any(), eq( mockMetaStore ), any() ) ).thenReturn( URL_FROM_CLUSTER ); when( namedClusterService.getNamedClusterByName( TEST_CLUSTER_NAME, mockMetaStore ) ) .thenReturn( mockNamedCluster ); Repository mockRep = mock( Repository.class ); when( mockRep.getStepAttributeString( any(), eq( "source_configuration_name" ) ) ).thenReturn( TEST_CLUSTER_NAME ); HadoopFileOutputMeta hadoopFileOutputMeta = new HadoopFileOutputMeta( namedClusterService, runtimeTestActionService, runtimeTester ); hadoopFileOutputMeta.setSourceConfigurationName( TEST_CLUSTER_NAME ); when( mockRep.getStepAttributeString( any(), eq( "file_name" ) ) ).thenReturn( "Bad Url In Repo" ); assertEquals( URL_FROM_CLUSTER, hadoopFileOutputMeta.loadSourceRep( mockRep, null, mockMetaStore ) ); } @Test public void testSaveSourceCalledFromGetXmlWithEmbeddedCluster() throws Exception { HadoopFileOutputMeta hadoopFileOutputMeta = new HadoopFileOutputMeta( namedClusterService, runtimeTestActionService, runtimeTester ); hadoopFileOutputMeta.setSourceConfigurationName( TEST_CLUSTER_NAME ); // set required data for step - empty hadoopFileOutputMeta.setOutputFields( new TextFileField[] {} ); // create spy to check whether saveSource now is called HadoopFileOutputMeta spy = Mockito.spy( hadoopFileOutputMeta ); // getting from structure file node NamedCluster mockNamedCluster = mock( NamedCluster.class ); when( namedClusterService.getNamedClusterByName( eq( TEST_CLUSTER_NAME ), any() ) ).thenReturn( mockNamedCluster ); when( mockNamedCluster.toXmlForEmbed( NAMED_CLUSTER_TAG ) ).thenReturn( "<" + NAMED_CLUSTER_TAG + ">" + EMBEDDED_XML + "" ); Document hadoopOutputMetaStep = getDocumentFromString( spy.getXML(), new SAXBuilder() ); Element clusterElement = getChildElementByTagName( hadoopOutputMetaStep.getRootElement(), NAMED_CLUSTER_TAG ); // getting from file node cluster attribute value assertEquals( EMBEDDED_XML, clusterElement.getValue() ); // check that saveSource is called from TextFileOutputMeta verify( spy, times( 1 ) ).saveSource( any( StringBuilder.class ), or( any( String.class ), isNull() ) ); } public static Document getDocumentFromString( String xmlStep, SAXBuilder jdomBuilder ) throws JDOMException, IOException { String xml = XMLHandler.openTag( ENTRY_TAG_NAME ) + xmlStep + XMLHandler.closeTag( ENTRY_TAG_NAME ); return jdomBuilder.build( new ByteArrayInputStream( xml.getBytes() ) ); } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/analyzer/HadoopBaseStepAnalyzerTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans.analyzer; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.mockito.Mock; import org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileMeta; import org.pentaho.di.core.bowl.DefaultBowl; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.file.BaseFileMeta; import org.pentaho.di.trans.TransMeta; import org.pentaho.dictionary.DictionaryConst; import org.pentaho.metaverse.api.IComponentDescriptor; import org.pentaho.metaverse.api.IMetaverseBuilder; import org.pentaho.metaverse.api.IMetaverseNode; import org.pentaho.metaverse.api.INamespace; import org.pentaho.metaverse.api.MetaverseComponentDescriptor; import org.pentaho.metaverse.api.MetaverseObjectFactory; import org.pentaho.metaverse.api.model.IExternalResourceInfo; import java.util.Set; import static org.junit.Assert.*; import static org.mockito.Mockito.*; public abstract class HadoopBaseStepAnalyzerTest { protected A analyzer; @Mock private INamespace mockNamespace; private IComponentDescriptor descriptor; private M meta; @Mock private TransMeta transMeta; @Before public void setUp() throws Exception { // commented out since testCreateResourceNode is now in ignore state, fix may be related to service // when( mockNamespace.getParentNamespace() ).thenReturn( mockNamespace ); descriptor = new MetaverseComponentDescriptor( "test", DictionaryConst.NODE_TYPE_TRANS_STEP, mockNamespace ); analyzer = spy( getAnalyzer() ); analyzer.setDescriptor( descriptor ); IMetaverseBuilder builder = mock( IMetaverseBuilder.class ); analyzer.setMetaverseBuilder( builder ); analyzer.setObjectFactory( new MetaverseObjectFactory() ); meta = getMetaMock(); StepMeta mockStepMeta = mock( StepMeta.class ); lenient().when( meta.getParentStepMeta() ).thenReturn( mockStepMeta ); lenient().when( transMeta.getBowl() ).thenReturn( DefaultBowl.getInstance() ); lenient().when( mockStepMeta.getParentTransMeta() ).thenReturn( transMeta ); } protected abstract A getAnalyzer(); protected abstract M getMetaMock(); @Test public void testGetUsedFields() throws Exception { assertNull( analyzer.getUsedFields( getMetaMock() ) ); } @Test public void testGetResourceInputNodeType() throws Exception { assertEquals( DictionaryConst.NODE_TYPE_FILE_FIELD, analyzer.getResourceInputNodeType() ); } @Test public void testGetResourceOutputNodeType() throws Exception { assertEquals( DictionaryConst.NODE_TYPE_FILE_FIELD, analyzer.getResourceOutputNodeType() ); } @Test public void testGetSupportedSteps() { Set> types = analyzer.getSupportedSteps(); assertNotNull( types ); assertEquals( types.size(), 1 ); assertTrue( types.contains( getMetaClass() ) ); } protected abstract Class getMetaClass(); @Ignore @Test public void testCreateResourceNode() throws Exception { // local IExternalResourceInfo localResource = mock( IExternalResourceInfo.class ); when( localResource.getName() ).thenReturn( "file:///Users/home/tmp/xyz.ktr" ); analyzer.validateState( descriptor, getMetaMock() ); IMetaverseNode resourceNode = analyzer.createResourceNode( getMetaMock(), localResource ); assertNotNull( resourceNode ); assertEquals( DictionaryConst.NODE_TYPE_FILE, resourceNode.getType() ); // remote final HadoopFileMeta hMeta = ( HadoopFileMeta ) getMetaMock(); IExternalResourceInfo remoteResource = mock( IExternalResourceInfo.class ); final String hostName = "foo.com"; final String filePath = "hdfs://" + hostName + "/file.csv"; when( remoteResource.getName() ).thenReturn( filePath ); when( hMeta.getUrlHostName( filePath ) ).thenReturn( hostName ); resourceNode = analyzer.createResourceNode( getMetaMock(), remoteResource ); assertNotNull( resourceNode ); assertEquals( DictionaryConst.NODE_TYPE_FILE, resourceNode.getType() ); } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/analyzer/HadoopFileInputStepAnalyzerTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans.analyzer; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileInputMeta; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @RunWith(MockitoJUnitRunner.Silent.class) public class HadoopFileInputStepAnalyzerTest extends HadoopBaseStepAnalyzerTest { @Mock private HadoopFileInputMeta meta; @Override protected HadoopFileInputStepAnalyzer getAnalyzer() { return new HadoopFileInputStepAnalyzer(); } @Override protected HadoopFileInputMeta getMetaMock() { return meta; } @Override protected Class getMetaClass() { return HadoopFileInputMeta.class; } @Test public void testIsOutput() { assertFalse( analyzer.isOutput() ); } @Test public void testIsInput() { assertTrue( analyzer.isInput() ); } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/trans/analyzer/HadoopFileOutputStepAnalyzerTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.trans.analyzer; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.big.data.kettle.plugins.hdfs.trans.HadoopFileOutputMeta; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @RunWith(MockitoJUnitRunner.Silent.class) public class HadoopFileOutputStepAnalyzerTest extends HadoopBaseStepAnalyzerTest { @Mock private HadoopFileOutputMeta meta; @Override protected HadoopFileOutputStepAnalyzer getAnalyzer() { return new HadoopFileOutputStepAnalyzer(); } @Override protected HadoopFileOutputMeta getMetaMock() { return meta; } @Override protected Class getMetaClass() { return HadoopFileOutputMeta.class; } @Test public void testIsOutput() { assertTrue( analyzer.isOutput() ); } @Test public void testIsInput() { assertFalse( analyzer.isInput() ); } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/vfs/HadoopVfsConnectionTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.vfs; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.di.core.Props; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.variables.Variables; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Created by bryan on 11/23/15. */ public class HadoopVfsConnectionTest { /** * */ private static final String DEFAULT_VALUE = "default"; /** * */ private static final String EXPECTED_URL = "hdfs://testUser:testPassword@testHost:testPort"; private static final String TEST_PASSWORD = "testPassword"; private static final String TEST_USER = "testUser"; private static final String TEST_PORT = "testPort"; private static final String TEST_HOST = "testHost"; private static final String EMPTY = ""; @Test public void testDefaultConstructor() { HadoopVfsConnection hdfsConnection = new HadoopVfsConnection(); assertEquals( EMPTY, hdfsConnection.getHostname() ); assertEquals( EMPTY, hdfsConnection.getPassword() ); assertEquals( EMPTY, hdfsConnection.getPort() ); assertEquals( EMPTY, hdfsConnection.getUsername() ); } @Test public void testConstructorWithParameters() { HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( TEST_HOST, TEST_PORT, TEST_USER, TEST_PASSWORD ); assertEquals( TEST_HOST, hdfsConnection.getHostname() ); assertEquals( TEST_PORT, hdfsConnection.getPort() ); assertEquals( TEST_USER, hdfsConnection.getUsername() ); assertEquals( TEST_PASSWORD, hdfsConnection.getPassword() ); } @Test public void testConstructorWithNamedClusterAsParameter() { HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( getTestNamedCluster(), new Variables() ); assertEquals( TEST_HOST, hdfsConnection.getHostname() ); assertEquals( TEST_PORT, hdfsConnection.getPort() ); assertEquals( TEST_USER, hdfsConnection.getUsername() ); assertEquals( TEST_PASSWORD, hdfsConnection.getPassword() ); } @Test public void testConstructorWithNamedClusterAsParameter_HostNameNull() { NamedCluster testNamedCluster = mock( NamedCluster.class ); when( testNamedCluster.getHdfsHost() ).thenReturn( null ); when( testNamedCluster.getHdfsPort() ).thenReturn( TEST_PORT ); when( testNamedCluster.getHdfsUsername() ).thenReturn( TEST_USER ); when( testNamedCluster.getHdfsPassword() ).thenReturn( TEST_PASSWORD ); when( testNamedCluster.decodePassword( TEST_PASSWORD ) ).thenReturn( TEST_PASSWORD ); HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( testNamedCluster, new Variables() ); assertEquals( EMPTY, hdfsConnection.getHostname() ); assertEquals( TEST_PORT, hdfsConnection.getPort() ); assertEquals( TEST_USER, hdfsConnection.getUsername() ); assertEquals( TEST_PASSWORD, hdfsConnection.getPassword() ); } @Test public void testConstructorWithNamedClusterAsParameter_PortNull() { NamedCluster testNamedCluster = mock( NamedCluster.class ); when( testNamedCluster.getHdfsHost() ).thenReturn( TEST_HOST ); when( testNamedCluster.getHdfsPort() ).thenReturn( null ); when( testNamedCluster.getHdfsUsername() ).thenReturn( TEST_USER ); when( testNamedCluster.getHdfsPassword() ).thenReturn( TEST_PASSWORD ); when( testNamedCluster.decodePassword( TEST_PASSWORD ) ).thenReturn( TEST_PASSWORD ); HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( testNamedCluster, new Variables() ); assertEquals( TEST_HOST, hdfsConnection.getHostname() ); assertEquals( EMPTY, hdfsConnection.getPort() ); assertEquals( TEST_USER, hdfsConnection.getUsername() ); assertEquals( TEST_PASSWORD, hdfsConnection.getPassword() ); } @Test public void testConstructorWithNamedClusterAsParameter_UserNull() { NamedCluster testNamedCluster = mock( NamedCluster.class ); when( testNamedCluster.getHdfsHost() ).thenReturn( TEST_HOST ); when( testNamedCluster.getHdfsPort() ).thenReturn( TEST_PORT ); when( testNamedCluster.getHdfsUsername() ).thenReturn( null ); when( testNamedCluster.getHdfsPassword() ).thenReturn( TEST_PASSWORD ); when( testNamedCluster.decodePassword( TEST_PASSWORD ) ).thenReturn( TEST_PASSWORD ); HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( testNamedCluster, new Variables() ); assertEquals( TEST_HOST, hdfsConnection.getHostname() ); assertEquals( TEST_PORT, hdfsConnection.getPort() ); assertEquals( EMPTY, hdfsConnection.getUsername() ); assertEquals( TEST_PASSWORD, hdfsConnection.getPassword() ); } @Test public void testConstructorWithNamedClusterAsParameter_PasswordNull() { NamedCluster testNamedCluster = mock( NamedCluster.class ); when( testNamedCluster.getHdfsHost() ).thenReturn( TEST_HOST ); when( testNamedCluster.getHdfsPort() ).thenReturn( TEST_PORT ); when( testNamedCluster.getHdfsUsername() ).thenReturn( TEST_USER ); when( testNamedCluster.getHdfsPassword() ).thenReturn( null ); HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( testNamedCluster, new Variables() ); assertEquals( TEST_HOST, hdfsConnection.getHostname() ); assertEquals( TEST_PORT, hdfsConnection.getPort() ); assertEquals( TEST_USER, hdfsConnection.getUsername() ); assertEquals( EMPTY, hdfsConnection.getPassword() ); } @Test public void testConstructorWithNamedClusterNullAsParameter() { HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( null, new Variables() ); assertEquals( EMPTY, hdfsConnection.getHostname() ); assertEquals( EMPTY, hdfsConnection.getPort() ); assertEquals( EMPTY, hdfsConnection.getUsername() ); assertEquals( EMPTY, hdfsConnection.getPassword() ); } @Test public void testGetConnectionStringForHDFSScheme() { HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( getTestNamedCluster(), new Variables() ); assertEquals( EXPECTED_URL, hdfsConnection.getConnectionString( "hdfs" ) ); } @Test public void testGetConnectionStringForNullInputScheme() { HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( getTestNamedCluster(), new Variables() ); assertEquals( EXPECTED_URL, hdfsConnection.getConnectionString( null ) ); } @Test public void testGetConnectionStringForEmptyInputScheme() { HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( getTestNamedCluster(), new Variables() ); assertEquals( EXPECTED_URL, hdfsConnection.getConnectionString( EMPTY ) ); } private NamedCluster getTestNamedCluster() { NamedCluster nCluster = mock( NamedCluster.class ); when( nCluster.getHdfsHost() ).thenReturn( TEST_HOST ); when( nCluster.getHdfsPort() ).thenReturn( TEST_PORT ); when( nCluster.getHdfsUsername() ).thenReturn( TEST_USER ); when( nCluster.getHdfsPassword() ).thenReturn( TEST_PASSWORD ); when( nCluster.decodePassword( TEST_PASSWORD ) ).thenReturn( TEST_PASSWORD ); return nCluster; } @Test public void tesSetSustomParameters() throws KettleFileException { Props.init( 0 ); HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( getTestNamedCluster(), new Variables() ); hdfsConnection.setCustomParameters( Props.getInstance() ); assertEquals( TEST_HOST, Props.getInstance().getCustomParameter( "HadoopVfsFileChooserDialog.host", DEFAULT_VALUE ) ); assertEquals( TEST_PORT, Props.getInstance().getCustomParameter( "HadoopVfsFileChooserDialog.port", DEFAULT_VALUE ) ); assertEquals( TEST_USER, Props.getInstance().getCustomParameter( "HadoopVfsFileChooserDialog.user", DEFAULT_VALUE ) ); assertEquals( TEST_PASSWORD, Props.getInstance().getCustomParameter( "HadoopVfsFileChooserDialog.password", DEFAULT_VALUE ) ); } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/java/org/pentaho/big/data/kettle/plugins/hdfs/vfs/HadoopVfsFileChooserDialogTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hdfs.vfs; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Tree; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.big.data.plugins.common.ui.NamedClusterWidgetImpl; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.vfs.ui.VfsBrowser; import org.pentaho.vfs.ui.VfsFileChooserDialog; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.times; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; public class HadoopVfsFileChooserDialogTest { private HadoopVfsFileChooserDialog hadoopVfsFileChooserDialog = null; private static final Integer SELECTED_INDEX = -1; private static final String[] NAMED_CLUSTER_NAMES = {"name1", "name2", "name3"}; @Before public void Initialization() { hadoopVfsFileChooserDialog = mock( HadoopVfsFileChooserDialog.class ); } @After public void finalize() { hadoopVfsFileChooserDialog = null; } @Test public void testActivate() { doCallRealMethod().when( hadoopVfsFileChooserDialog ).activate(); VfsFileChooserDialog vfsFileChooserDialog = mock( VfsFileChooserDialog.class ); Combo combo = mock( Combo.class ); Tree tree = mock( Tree.class ); VfsBrowser vfsBrowser = mock( VfsBrowser.class ); doNothing().when( combo ).setText( anyString() ); vfsFileChooserDialog.openFileCombo = combo; doNothing().when( tree ).removeAll(); vfsBrowser.fileSystemTree = tree; vfsFileChooserDialog.vfsBrowser = vfsBrowser; doCallRealMethod().when( vfsFileChooserDialog ).setRootFile( null ); doCallRealMethod().when( vfsFileChooserDialog ).setInitialFile( null ); hadoopVfsFileChooserDialog.vfsFileChooserDialog = vfsFileChooserDialog; NamedClusterWidgetImplExtend namedClusterWidgetImpl = mock( NamedClusterWidgetImplExtend.class ); Combo namedClusterCombo = mock( Combo.class ); when( namedClusterCombo.getSelectionIndex() ).thenReturn( SELECTED_INDEX ); doNothing().when( namedClusterCombo ).removeAll(); doNothing().when( namedClusterCombo ).setItems( any() ); doNothing().when( namedClusterCombo ).select( SELECTED_INDEX ); when( namedClusterWidgetImpl.getNameClusterCombo() ).thenReturn( namedClusterCombo ); when( namedClusterWidgetImpl.getNamedClusterNames() ).thenReturn( NAMED_CLUSTER_NAMES ); doCallRealMethod().when( namedClusterWidgetImpl ).initiate(); doNothing().when( namedClusterWidgetImpl ).setSelectedNamedCluster( anyString() ); when( hadoopVfsFileChooserDialog.getNamedClusterWidget() ).thenReturn( namedClusterWidgetImpl ); hadoopVfsFileChooserDialog.activate(); verify( namedClusterWidgetImpl, times( 1 ) ).initiate(); } private class NamedClusterWidgetImplExtend extends NamedClusterWidgetImpl { public NamedClusterWidgetImplExtend( Composite parent, boolean showLabel, NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester clusterTester ) { super( parent, showLabel, namedClusterService, runtimeTestActionService, clusterTester, false ); } /*Overriding for visibility change only*/ @Override public String[] getNamedClusterNames() { return super.getNamedClusterNames(); } } } ================================================ FILE: kettle-plugins/hdfs/core/src/test/resources/graph.properties ================================================ blueprints.graph=com.tinkerpop.blueprints.impls.tg.TinkerGraph ================================================ FILE: kettle-plugins/hdfs/core/src/test/resources/sample-hadoop-file-input-step.xml ================================================ N N N
N
0
N
0 N 0 N 0 0 N N N N N test-file-name TEST-CLUSTER-NAME test-file-mask N N None 0 N N N N en_US
================================================ FILE: kettle-plugins/hdfs/core/src/test/resources/sample-hadoop-file-output-step.xml ================================================ N N
N
N
N Y TEST-CLUSTER-NAME N N N N N N N N N N N N 0
================================================ FILE: kettle-plugins/hdfs/pom.xml ================================================ 4.0.0 pentaho-big-data-kettle-plugins pentaho 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-hdfs 11.1.0.0-SNAPSHOT pom Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com Apache License, Version 2.0 https://www.apache.org/licenses/LICENSE-2.0.txt repo A business-friendly OSS license assemblies core ================================================ FILE: kettle-plugins/hive/assemblies/plugin/pom.xml ================================================ 4.0.0 hive-assemblies pentaho 11.1.0.0-SNAPSHOT pdi-hive-plugin pom PDI Hive Plugin Distribution ${project.basedir}/src/main/resources ${project.build.directory}/assembly pentaho pdi-hive-core ${project.version} ================================================ FILE: kettle-plugins/hive/assemblies/plugin/src/assembly/assembly.xml ================================================ zip zip ${resources.directory} . true ${assembly.dir} . . pentaho:pdi-hive-core:jar false runtime . false false pentaho:pdi-hive-core:jar runtime false lib pentaho:pdi-hive-core:* org.apache.hive:hive ================================================ FILE: kettle-plugins/hive/assemblies/plugin/src/main/resources/version.xml ================================================ ${project.version} ================================================ FILE: kettle-plugins/hive/assemblies/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-hive 11.1.0.0-SNAPSHOT hive-assemblies pom PDI Hive Plugin Assemblies plugin ================================================ FILE: kettle-plugins/hive/core/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-hive 11.1.0.0-SNAPSHOT pdi-hive-core PDI Hive Core src/main/resources false src/main/resources-filtered true site org.slf4j slf4j-api provided org.pentaho commons-database-model ${commons-database.version} provided org.pentaho shim-api ${pentaho-hadoop-shims.version} org.pentaho.di.plugins pentaho-metastore-locator-api ${pdi.version} org.pentaho shim-api ${pentaho-hadoop-shims.version} pentaho-kettle kettle-core ${pdi.version} provided pentaho pentaho-platform-core ${pdi.version} provided pentaho pentaho-big-data-impl-cluster ${project.version} provided org.osgi osgi.core org.pentaho pentaho-hadoop-shims-common-services-api ${pentaho-hadoop-shims.version} org.hamcrest java-hamcrest 2.0.0.0 test org.mockito mockito-core ${mockito.version} test ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/apache/hadoop/hive/jdbc/HiveDriver.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hadoop.hive.jdbc; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; /** * DummyDriver implementation to avoid CNF exception * when Hive2DatabaseMeta is loaded. See DummyDriver. */ public class HiveDriver extends DummyDriver { } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/apache/hive/jdbc/HiveDriver.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hive.jdbc; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; /** * DummyDriver implementation to avoid CNF exception * when HiveDatabaseMeta is loaded. See DummyDriver. */ public class HiveDriver extends DummyDriver { } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/apache/hive/jdbc/HiveSimbaDriver.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hive.jdbc; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; /** * DummyDriver implementation to avoid CNF exception * when HiveSimbaDatabaseMeta is loaded. See DummyDriver. */ public class HiveSimbaDriver extends DummyDriver { } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/apache/hive/jdbc/ImpalaDriver.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hive.jdbc; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; /** * DummyDriver implementation to avoid CNF exception * when ImpalaDatabaseMeta is loaded. See DummyDriver. */ public class ImpalaDriver extends DummyDriver { } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/apache/hive/jdbc/ImpalaSimbaDriver.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hive.jdbc; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; /** * DummyDriver implementation to avoid CNF exception * when ImpalaSimbaDatabaseMeta is loaded. See DummyDriver. */ public class ImpalaSimbaDriver extends DummyDriver { } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/apache/hive/jdbc/SparkSqlSimbaDriver.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hive.jdbc; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; /** * DummyDriver implementation to avoid CNF exception * when SparkSqlSimbaDriver is loaded. See DummyDriver. */ public class SparkSqlSimbaDriver extends DummyDriver { } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/Activator.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.osgi.framework.BundleActivator; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.pentaho.database.IDatabaseDialect; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Supplier; import java.util.stream.Collectors; /** * Created by bryan on 5/6/16. */ public class Activator implements BundleActivator { private static final String I_DATABASE_DIALECT_CANONICAL_NAME = IDatabaseDialect.class.getCanonicalName(); private final List serviceRegistrations = new ArrayList<>(); private final List> databaseDialectSuppliers = Collections.unmodifiableList( Arrays .asList( Hive2DatabaseDialect::new, ImpalaDatabaseDialect::new, ImpalaSimbaDatabaseDialect::new, SparkSimbaDatabaseDialect::new ) ); @Override public void start( BundleContext context ) throws Exception { serviceRegistrations.addAll( databaseDialectSuppliers.stream() .map( supplier -> (ServiceRegistration) context .registerService( I_DATABASE_DIALECT_CANONICAL_NAME, supplier.get(), null ) ) .collect( Collectors.toList() ) ); } @Override public void stop( BundleContext context ) throws Exception { serviceRegistrations.forEach( ServiceRegistration::unregister ); serviceRegistrations.clear(); } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/BaseSimbaDatabaseMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import com.google.common.annotations.VisibleForTesting; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.metastore.locator.api.MetastoreLocator; import static com.google.common.base.Strings.isNullOrEmpty; import static org.pentaho.big.data.kettle.plugins.hive.SimbaUrl.KRB_HOST_FQDN; import static org.pentaho.big.data.kettle.plugins.hive.SimbaUrl.KRB_SERVICE_NAME; abstract class BaseSimbaDatabaseMeta extends Hive2DatabaseMeta { @VisibleForTesting static final String URL_IS_CONFIGURED_THROUGH_JNDI = "Url is configured through JNDI"; BaseSimbaDatabaseMeta( DriverLocator driverLocator, NamedClusterService namedClusterService, MetastoreLocator metastoreLocator ) { super( driverLocator, namedClusterService, metastoreLocator ); } BaseSimbaDatabaseMeta( DriverLocator driverLocator, NamedClusterService namedClusterService ) { super( driverLocator, namedClusterService ); } protected abstract String getJdbcPrefix(); @Override public abstract String getDriverClass(); @Override public int[] getAccessTypeList() { return new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE, DatabaseMeta.TYPE_ACCESS_JNDI }; } @Override public String getURL( String hostname, String port, String databaseName ) { return SimbaUrl.Builder.create() .withAccessType( getAccessType() ) .withDatabaseName( databaseName ) .withPort( port ) .withDefaultPort( getDefaultDatabasePort() ) .withHostname( hostname ) .withJdbcPrefix( getJdbcPrefix() ) .withUsername( getUsername() ) .withPassword( getPassword() ) .withIsKerberos( isKerberos() ) .build() .getURL(); } private String getExtraProperty( String key ) { return getAttributes().getProperty( ATTRIBUTE_PREFIX_EXTRA_OPTION + getPluginId() + "." + key ); } private String getProperty( String key ) { return getAttributes().getProperty( key ); } /** * This method assumes that Hive has no concept of primary and technical keys and auto increment columns. We are * ignoring the tk, pk and useAutoinc parameters. */ @Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc, boolean addFieldname, boolean addCr ) { StringBuilder retval = new StringBuilder(); String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if ( addFieldname ) { retval.append( fieldname ).append( ' ' ); } int type = v.getType(); switch ( type ) { case ValueMetaInterface.TYPE_BOOLEAN: retval.append( "BOOLEAN" ); break; case ValueMetaInterface.TYPE_DATE: retval.append( "DATE" ); break; case ValueMetaInterface.TYPE_TIMESTAMP: retval.append( "TIMESTAMP" ); break; case ValueMetaInterface.TYPE_STRING: retval.append( "VARCHAR" ); break; case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: // Integer values... if ( precision == 0 ) { if ( length > 9 ) { if ( length < 19 ) { // can hold signed values between -9223372036854775808 and 9223372036854775807 // 18 significant digits retval.append( "BIGINT" ); } else { retval.append( "FLOAT" ); } } else { retval.append( "INT" ); } } else { // Floating point values... if ( length > 15 ) { retval.append( "FLOAT" ); } else { // A double-precision floating-point number is accurate to approximately 15 decimal places. // http://mysql.mirrors-r-us.net/doc/refman/5.1/en/numeric-type-overview.html retval.append( "DOUBLE" ); } } break; } return retval.toString(); } /** * Assume kerberos if any of the kerb props have been set. */ private boolean isKerberos() { return !( isNullOrEmpty( getProperty( KRB_HOST_FQDN ) ) && isNullOrEmpty( getExtraProperty( KRB_HOST_FQDN ) ) && isNullOrEmpty( getProperty( KRB_SERVICE_NAME ) ) && isNullOrEmpty( getExtraProperty( KRB_SERVICE_NAME ) ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/DatabaseMetaWithVersion.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.di.core.database.BaseDatabaseMeta; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.platform.api.data.DBDatasourceServiceException; import org.pentaho.platform.api.data.IDBDatasourceService; import org.pentaho.platform.engine.core.system.PentahoSystem; import javax.sql.DataSource; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.Driver; import java.sql.SQLException; /** * Created by bryan on 4/14/16. */ public abstract class DatabaseMetaWithVersion extends BaseDatabaseMeta { private static final Logger logger = LogManager.getLogger( DatabaseMetaWithVersion.class ); private final DriverLocator driverLocator; protected DatabaseMetaWithVersion( DriverLocator driverLocator ) { this.driverLocator = driverLocator; } @Override public abstract String getURL( String hostname, String port, String databaseName ); /** * Check that the version of the driver being used is at least the driver you want. If you do not care about the minor * version, pass in a 0 (The assumption being that the minor version will ALWAYS be 0 or greater) * * @return true: the version being used is equal to or newer than the one you requested false: the version being used * is older than the one you requested */ protected boolean isDriverVersion( int majorVersion, int minorVersion ) { int driverMajorVersion; int driverMinorVersion; // If it is a JNDI connection if ( getAccessType() == DatabaseMeta.TYPE_ACCESS_JNDI ) { IDBDatasourceService dss = PentahoSystem.get( IDBDatasourceService.class ); DataSource dataSource = null; try { dataSource = dss.getDataSource( this.getDatabaseName() ); } catch ( DBDatasourceServiceException e ) { logger.error( e.getMessage(), e ); } DatabaseMetaData meta = null; try ( Connection connection = dataSource.getConnection() ) { meta = connection.getMetaData(); } catch ( SQLException e ) { logger.error( e.getMessage(), e ); } driverMajorVersion = meta.getDriverMajorVersion(); driverMinorVersion = meta.getDriverMinorVersion(); // if it is a JDBC or ODBC connection } else { String url = getURL( "localhost", "10000", "default" ); Driver driver = driverLocator.getDriver( url ); driverMajorVersion = driver.getMajorVersion(); driverMinorVersion = driver.getMinorVersion(); } return driverMajorVersion > majorVersion || ( driverMajorVersion == majorVersion && driverMinorVersion >= minorVersion ); } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/DummyDriver.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import java.sql.Connection; import java.sql.Driver; import java.sql.DriverPropertyInfo; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.Properties; import java.util.logging.Logger; /** * DummyDriver is a bare Driver implementation used as a way * to avoid ClassNotFoundException when kettle attempts to load * the class associated with each meta. * * The classes which extend DummyDriver have the same unique * names as the name exposed by .getDriverClass() in the * DatabaseMeta implementation. * * Created by bryan on 3/30/16. */ public class DummyDriver implements Driver { @Override public Connection connect( String url, Properties info ) throws SQLException { return null; } @Override public boolean acceptsURL( String url ) throws SQLException { return false; } @Override public DriverPropertyInfo[] getPropertyInfo( String url, Properties info ) throws SQLException { return new DriverPropertyInfo[ 0 ]; } @Override public int getMajorVersion() { return 0; } @Override public int getMinorVersion() { return 0; } @Override public boolean jdbcCompliant() { return false; } @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException { return null; } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/Hive2DatabaseDialect.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.pentaho.database.DatabaseDialectException; import org.pentaho.database.IValueMeta; import org.pentaho.database.dialect.AbstractDatabaseDialect; import org.pentaho.database.model.DatabaseAccessType; import org.pentaho.database.model.DatabaseType; import org.pentaho.database.model.IDatabaseConnection; import org.pentaho.database.model.IDatabaseType; public class Hive2DatabaseDialect extends AbstractDatabaseDialect { public Hive2DatabaseDialect() { super(); } /** * UID for serialization */ private static final long serialVersionUID = -8456961348836455937L; protected static final int DEFAULT_PORT = 10000; private static final IDatabaseType DBTYPE = new DatabaseType( "Hadoop Hive 2", "HIVE2", DatabaseAccessType.getList( DatabaseAccessType.NATIVE, DatabaseAccessType.JNDI ), DEFAULT_PORT, "http://www.cloudera.com/content/support/en/documentation/cloudera-impala/cloudera-impala-documentation-v1" + "-latest.html" ); public IDatabaseType getDatabaseType() { return DBTYPE; } @Override public String getNativeDriver() { return "org.apache.hive.jdbc.HiveDriver"; } @Override public String getURL( IDatabaseConnection connection ) throws DatabaseDialectException { StringBuffer urlBuffer = new StringBuffer( getNativeJdbcPre() ); /* * String username = connection.getUsername(); if(username != null && !"".equals(username)) { * urlBuffer.append(username); String password = connection.getPassword(); if(password != null && * !"".equals(password)) { urlBuffer.append(":"); urlBuffer.append(password); } urlBuffer.append("@"); } */ urlBuffer.append( connection.getHostname() ); urlBuffer.append( ":" ); urlBuffer.append( connection.getDatabasePort() ); urlBuffer.append( "/" ); urlBuffer.append( connection.getDatabaseName() ); return urlBuffer.toString(); } @Override public String getNativeJdbcPre() { return "jdbc:hive2://"; } /** * Generates the SQL statement to add a column to the specified table * * @param tablename The table to add * @param v The column defined as a value * @param tk the name of the technical key field * @param use_autoinc whether or not this field uses auto increment * @param pk the name of the primary key field * @param semicolon whether or not to add a semi-colon behind the statement. * @return the SQL statement to add a column to the specified table */ @Override public String getAddColumnStatement( String tablename, IValueMeta v, String tk, boolean use_autoinc, String pk, boolean semicolon ) { return "ALTER TABLE " + tablename + " ADD " + getFieldDefinition( v, tk, pk, use_autoinc, true, false ); } /** * Generates the SQL statement to modify a column in the specified table * * @param tablename The table to add * @param v The column defined as a value * @param tk the name of the technical key field * @param use_autoinc whether or not this field uses auto increment * @param pk the name of the primary key field * @param semicolon whether or not to add a semi-colon behind the statement. * @return the SQL statement to modify a column in the specified table */ @Override public String getModifyColumnStatement( String tablename, IValueMeta v, String tk, boolean use_autoinc, String pk, boolean semicolon ) { return "ALTER TABLE " + tablename + " MODIFY " + getFieldDefinition( v, tk, pk, use_autoinc, true, false ); } @Override public String getFieldDefinition( IValueMeta v, String tk, String pk, boolean use_autoinc, boolean add_fieldname, boolean add_cr ) { String retval = ""; String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if ( add_fieldname ) { retval += fieldname + " "; } int type = v.getType(); switch ( type ) { case IValueMeta.TYPE_DATE: retval += "DATETIME"; break; case IValueMeta.TYPE_BOOLEAN: if ( supportsBooleanDataType() ) { retval += "BOOLEAN"; } else { retval += "CHAR(1)"; } break; case IValueMeta.TYPE_NUMBER: case IValueMeta.TYPE_INTEGER: case IValueMeta.TYPE_BIGNUMBER: if ( fieldname.equalsIgnoreCase( tk ) || // Technical key fieldname.equalsIgnoreCase( pk ) // Primary key ) { if ( use_autoinc ) { retval += "BIGINT AUTO_INCREMENT NOT NULL PRIMARY KEY"; } else { retval += "BIGINT NOT NULL PRIMARY KEY"; } } else { // Integer values... if ( precision == 0 ) { if ( length > 9 ) { if ( length < 19 ) { // can hold signed values between -9223372036854775808 and 9223372036854775807 // 18 significant digits retval += "BIGINT"; } else { retval += "DECIMAL(" + length + ")"; } } else { retval += "INT"; } } else { // Floating point values... if ( length > 15 ) { retval += "DECIMAL(" + length; if ( precision > 0 ) { retval += ", " + precision; } retval += ")"; } else { // A double-precision floating-point number is accurate to approximately 15 decimal places. // http://mysql.mirrors-r-us.net/doc/refman/5.1/en/numeric-type-overview.html retval += "DOUBLE"; } } } break; case IValueMeta.TYPE_STRING: if ( length > 0 ) { if ( length == 1 ) { retval += "CHAR(1)"; } else if ( length < 256 ) { retval += "VARCHAR(" + length + ")"; } else if ( length < 65536 ) { retval += "TEXT"; } else if ( length < 16777215 ) { retval += "MEDIUMTEXT"; } else { retval += "LONGTEXT"; } } else { retval += "TINYTEXT"; } break; case IValueMeta.TYPE_BINARY: retval += "LONGBLOB"; break; default: retval += " UNKNOWN"; break; } if ( add_cr ) { retval += CR; } return retval; } @Override public String[] getUsedLibraries() { return new String[] { "pentaho-hadoop-hive-jdbc-shim-1.4-SNAPSHOT.jar" }; } @Override public int getDefaultDatabasePort() { return DEFAULT_PORT; } /* * (non-Javadoc) * * @see org.pentaho.database.dialect.AbstractDatabaseDialect#supportsSchemas() */ @Override public boolean supportsSchemas() { return false; } @Override public boolean initialize( String classname ) { return true; } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/Hive2DatabaseMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import com.google.common.annotations.VisibleForTesting; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.pentaho.big.data.api.jdbc.impl.DriverLocatorImpl; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.plugins.DatabaseMetaPlugin; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.service.PluginServiceLoader; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.util.Collection; import java.util.List; import java.util.Map; @DatabaseMetaPlugin( type = "HIVE2", typeDescription = "Hadoop Hive 2/3" ) public class Hive2DatabaseMeta extends DatabaseMetaWithVersion { public static final String URL_PREFIX = "jdbc:hive2://"; public static final String SELECT_COUNT_1_FROM = "select count(1) from "; public static final String ALIAS_SUFFIX = "_col"; public static final String VIEW = "VIEW"; public static final String VIRTUAL_VIEW = "VIRTUAL_VIEW"; public static final String TRUNCATE_TABLE = "TRUNCATE TABLE "; public static final int[] ACCESS_TYPE_LIST = new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE }; protected static final String JAR_FILE = "hive-jdbc-0.10.0-pentaho.jar"; protected static final String DRIVER_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver"; protected NamedClusterService namedClusterService; protected MetastoreLocator metastoreLocator; private final Logger logger = LogManager.getLogger( Hive2DatabaseMeta.class ); public Hive2DatabaseMeta(){ this( DriverLocatorImpl.getInstance() ); } public Hive2DatabaseMeta( DriverLocator driverLocator ) { this( driverLocator, NamedClusterManager.getInstance() ); } //OSGi constructor public Hive2DatabaseMeta( DriverLocator driverLocator, NamedClusterService namedClusterService ) { super( driverLocator ); this.namedClusterService = namedClusterService; } public synchronized MetastoreLocator getMetastoreLocator() { if ( this.metastoreLocator == null ) { try { Collection metastoreLocators = PluginServiceLoader.loadServices( MetastoreLocator.class ); this.metastoreLocator = metastoreLocators.stream().findFirst().get(); } catch ( Exception e ) { logger.error( "Error getting metastore locator", e ); } } return this.metastoreLocator; } @VisibleForTesting protected Hive2DatabaseMeta( DriverLocator driverLocator, NamedClusterService namedClusterService, MetastoreLocator metastoreLocator ) { super( driverLocator ); this.namedClusterService = namedClusterService; this.metastoreLocator = metastoreLocator; } @Override public int[] getAccessTypeList() { return ACCESS_TYPE_LIST; } @Override public String getAddColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean useAutoinc, String pk, boolean semicolon ) { return "ALTER TABLE " + tablename + " ADD " + getFieldDefinition( v, tk, pk, useAutoinc, true, false ); } @Override public String getDriverClass() { // !!! We will probably have to change this if we are providing our own driver, // i.e., before our code is committed to the Hadoop Hive project. return DRIVER_CLASS_NAME; } /** * This method assumes that Hive has no concept of primary and technical keys and auto increment columns. We are * ignoring the tk, pk and useAutoinc parameters. */ @Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc, boolean addFieldname, boolean addCr ) { String retval = ""; String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if ( addFieldname ) { retval += fieldname + " "; } int type = v.getType(); switch ( type ) { case ValueMetaInterface.TYPE_BOOLEAN: retval += "BOOLEAN"; break; case ValueMetaInterface.TYPE_DATE: retval += "DATE"; break; case ValueMetaInterface.TYPE_TIMESTAMP: retval += "TIMESTAMP"; break; case ValueMetaInterface.TYPE_STRING: retval += "STRING"; break; case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: // Integer values... if ( precision == 0 ) { if ( length > 9 ) { if ( length < 19 ) { // can hold signed values between -9223372036854775808 and 9223372036854775807 // 18 significant digits retval += "BIGINT"; } else { retval += "FLOAT"; } } else { retval += "INT"; } } else { // Floating point values... if ( length > 15 ) { retval += "FLOAT"; } else { // A double-precision floating-point number is accurate to approximately 15 decimal places. // http://mysql.mirrors-r-us.net/doc/refman/5.1/en/numeric-type-overview.html retval += "DOUBLE"; } } break; } return retval; } @Override public String getModifyColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean useAutoinc, String pk, boolean semicolon ) { return "ALTER TABLE " + tablename + " MODIFY " + getFieldDefinition( v, tk, pk, useAutoinc, true, false ); } @Override public String getURL( String hostname, String port, String databaseName ) { return URL_PREFIX + hostname + ":" + port + "/" + databaseName; } @Override public String[] getUsedLibraries() { return new String[] { JAR_FILE }; } /** * Build the SQL to count the number of rows in the passed table. * * @param tableName * @return */ @Override public String getSelectCountStatement( String tableName ) { return SELECT_COUNT_1_FROM + tableName; } @Override public String generateColumnAlias( int columnIndex, String suggestedName ) { return suggestedName; } /** * Quotes around table names are not valid Hive QL *

* return an empty string for the start quote */ public String getStartQuote() { return ""; } /** * Quotes around table names are not valid Hive QL *

* return an empty string for the end quote */ public String getEndQuote() { return ""; } /** * @return a list of table types to retrieve tables for the database */ @Override public String[] getTableTypes() { return null; } /** * @return a list of table types to retrieve views for the database */ @Override public String[] getViewTypes() { return new String[] { VIEW, VIRTUAL_VIEW }; } /** * @param tableName The table to be truncated. * @return The SQL statement to truncate a table: remove all rows from it without a transaction */ @Override public String getTruncateTableStatement( String tableName ) { return TRUNCATE_TABLE + tableName; } @Override public boolean supportsSetCharacterStream() { return false; } @Override public boolean supportsBatchUpdates() { return false; } @Override public boolean supportsTimeStampToDateConversion() { return false; } @Override public List getNamedClusterList() { try { return namedClusterService.listNames( getMetastoreLocator().getMetastore() ); } catch ( MetaStoreException e ) { e.printStackTrace(); return null; } } @Override public void putOptionalOptions( Map extraOptions ) { if ( getNamedCluster() != null && getNamedCluster().trim().length() > 0 ) { extraOptions.put( getPluginId() + ".pentahoNamedCluster", getNamedCluster() ); } } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/Hive2SimbaDatabaseDialect.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.pentaho.database.DatabaseDialectException; import org.pentaho.database.model.DatabaseAccessType; import org.pentaho.database.model.DatabaseConnection; import org.pentaho.database.model.DatabaseType; import org.pentaho.database.model.IDatabaseConnection; import org.pentaho.database.model.IDatabaseType; import static com.google.common.base.Strings.isNullOrEmpty; import static org.pentaho.big.data.kettle.plugins.hive.SimbaUrl.KRB_HOST_FQDN; import static org.pentaho.big.data.kettle.plugins.hive.SimbaUrl.KRB_SERVICE_NAME; /** * User: Dzmitry Stsiapanau Date: 8/28/2015 Time: 10:23 */ public class Hive2SimbaDatabaseDialect extends Hive2DatabaseDialect { public static final String SOCKET_TIMEOUT_OPTION = "SocketTimeout"; public static final String DEFAULT_SOCKET_TIMEOUT = "10"; public Hive2SimbaDatabaseDialect() { super(); } /** * UID for serialization */ private static final long serialVersionUID = -8456961348836455937L; private static final IDatabaseType DBTYPE = new DatabaseType( "Hadoop Hive 2 (Simba)", "HIVE2SIMBA", DatabaseAccessType.getList( DatabaseAccessType.NATIVE, DatabaseAccessType.JNDI ), DEFAULT_PORT, "http://www.simba.com/connectors/apache-hadoop-hive-driver" ); public IDatabaseType getDatabaseType() { return DBTYPE; } @Override public String getNativeDriver() { return "org.apache.hive.jdbc.HiveSimbaDriver"; } @Override public String getURL( IDatabaseConnection databaseConnection ) throws DatabaseDialectException { return SimbaUrl.Builder.create() .withAccessType( databaseConnection.getAccessType().ordinal() ) .withDatabaseName( databaseConnection.getDatabaseName() ) .withPort( databaseConnection.getDatabasePort() ) .withDefaultPort( getDefaultDatabasePort() ) .withHostname( databaseConnection.getHostname() ) .withJdbcPrefix( getNativeJdbcPre() ) .withUsername( databaseConnection.getUsername() ) .withPassword( databaseConnection.getPassword() ) .withIsKerberos( isKerberos( databaseConnection ) ) .build() .getURL(); } private String getExtraProperty( String key, IDatabaseConnection databaseConnection ) { return databaseConnection.getAttributes() .get( DatabaseConnection.ATTRIBUTE_PREFIX_EXTRA_OPTION + getDatabaseType().getShortName() + "." + key ); } private String getProperty( String key, IDatabaseConnection databaseConnection ) { return databaseConnection.getExtraOptions().get( getDatabaseType().getShortName() + "." + key ); } @Override public String getNativeJdbcPre() { return "jdbc:hive2://"; } @Override public String[] getUsedLibraries() { return new String[] { "HiveJDBC41.jar" }; } @Override public boolean initialize( String classname ) { return true; } public boolean isKerberos( IDatabaseConnection databaseConnection ) { return !( isNullOrEmpty( getProperty( KRB_HOST_FQDN, databaseConnection ) ) && isNullOrEmpty( getExtraProperty( KRB_HOST_FQDN, databaseConnection ) ) && isNullOrEmpty( getProperty( KRB_SERVICE_NAME, databaseConnection ) ) && isNullOrEmpty( getExtraProperty( KRB_SERVICE_NAME, databaseConnection ) ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/Hive2SimbaDatabaseMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import com.google.common.annotations.VisibleForTesting; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; // Intenionally disabled. The Simba Hive driver is currently unsupported. //@DatabaseMetaPlugin( type = "HIVE2SIMBA", typeDescription = "Hadoop Hive 2 with Simba Driver" ) public class Hive2SimbaDatabaseMeta extends BaseSimbaDatabaseMeta { @VisibleForTesting static final String JAR_FILE = "HiveJDBC41.jar"; @VisibleForTesting static final String DRIVER_CLASS_NAME = "org.apache.hive.jdbc.HiveSimbaDriver"; @VisibleForTesting static final String JDBC_URL_PREFIX = "jdbc:hive2://"; @VisibleForTesting static final int DEFAULT_PORT = 10000; public Hive2SimbaDatabaseMeta( DriverLocator driverLocator, NamedClusterService namedClusterService ) { super( driverLocator, namedClusterService ); } @Override protected String getJdbcPrefix() { return JDBC_URL_PREFIX; } @Override public String getDriverClass() { return DRIVER_CLASS_NAME; } @Override public String[] getUsedLibraries() { return new String[] { JAR_FILE }; } @Override public int getDefaultDatabasePort() { return DEFAULT_PORT; } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/HiveDatabaseDialect.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.pentaho.database.DatabaseDialectException; import org.pentaho.database.IValueMeta; import org.pentaho.database.dialect.AbstractDatabaseDialect; import org.pentaho.database.model.DatabaseAccessType; import org.pentaho.database.model.DatabaseType; import org.pentaho.database.model.IDatabaseConnection; import org.pentaho.database.model.IDatabaseType; public class HiveDatabaseDialect extends AbstractDatabaseDialect { public HiveDatabaseDialect() { super(); } /** * UID for serialization */ private static final long serialVersionUID = -8456961348836455937L; private static final int DEFAULT_PORT = 10000; private static final IDatabaseType DBTYPE = new DatabaseType( "Hadoop Hive (deprecated)", "HIVE", DatabaseAccessType.getList( DatabaseAccessType.NATIVE, DatabaseAccessType.JNDI ), DEFAULT_PORT, "https://cwiki.apache.org/Hive/hiveclient.html" ); public IDatabaseType getDatabaseType() { return DBTYPE; } @Override public String getNativeDriver() { return "org.apache.hadoop.hive.jdbc.HiveDriver"; } @Override public String getURL( IDatabaseConnection connection ) throws DatabaseDialectException { StringBuffer urlBuffer = new StringBuffer( getNativeJdbcPre() ); /* * String username = connection.getUsername(); if(username != null && !"".equals(username)) { * urlBuffer.append(username); String password = connection.getPassword(); if(password != null && * !"".equals(password)) { urlBuffer.append(":"); urlBuffer.append(password); } urlBuffer.append("@"); } */ urlBuffer.append( connection.getHostname() ); urlBuffer.append( ":" ); urlBuffer.append( connection.getDatabasePort() ); urlBuffer.append( "/" ); urlBuffer.append( connection.getDatabaseName() ); return urlBuffer.toString(); } @Override public String getNativeJdbcPre() { return "jdbc:hive://"; } /** * Generates the SQL statement to add a column to the specified table * * @param tablename * The table to add * @param v * The column defined as a value * @param tk * the name of the technical key field * @param use_autoinc * whether or not this field uses auto increment * @param pk * the name of the primary key field * @param semicolon * whether or not to add a semi-colon behind the statement. * @return the SQL statement to add a column to the specified table */ @Override public String getAddColumnStatement( String tablename, IValueMeta v, String tk, boolean use_autoinc, String pk, boolean semicolon ) { return "ALTER TABLE " + tablename + " ADD " + getFieldDefinition( v, tk, pk, use_autoinc, true, false ); } /** * Generates the SQL statement to modify a column in the specified table * * @param tablename * The table to add * @param v * The column defined as a value * @param tk * the name of the technical key field * @param use_autoinc * whether or not this field uses auto increment * @param pk * the name of the primary key field * @param semicolon * whether or not to add a semi-colon behind the statement. * @return the SQL statement to modify a column in the specified table */ @Override public String getModifyColumnStatement( String tablename, IValueMeta v, String tk, boolean use_autoinc, String pk, boolean semicolon ) { return "ALTER TABLE " + tablename + " MODIFY " + getFieldDefinition( v, tk, pk, use_autoinc, true, false ); } @Override public String getFieldDefinition( IValueMeta v, String tk, String pk, boolean use_autoinc, boolean add_fieldname, boolean add_cr ) { String retval = ""; String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if ( add_fieldname ) { retval += fieldname + " "; } int type = v.getType(); switch ( type ) { case IValueMeta.TYPE_DATE: retval += "DATETIME"; break; case IValueMeta.TYPE_BOOLEAN: if ( supportsBooleanDataType() ) { retval += "BOOLEAN"; } else { retval += "CHAR(1)"; } break; case IValueMeta.TYPE_NUMBER: case IValueMeta.TYPE_INTEGER: case IValueMeta.TYPE_BIGNUMBER: if ( fieldname.equalsIgnoreCase( tk ) || // Technical key fieldname.equalsIgnoreCase( pk ) // Primary key ) { if ( use_autoinc ) { retval += "BIGINT AUTO_INCREMENT NOT NULL PRIMARY KEY"; } else { retval += "BIGINT NOT NULL PRIMARY KEY"; } } else { // Integer values... if ( precision == 0 ) { if ( length > 9 ) { if ( length < 19 ) { // can hold signed values between -9223372036854775808 and 9223372036854775807 // 18 significant digits retval += "BIGINT"; } else { retval += "DECIMAL(" + length + ")"; } } else { retval += "INT"; } } else { // Floating point values... if ( length > 15 ) { retval += "DECIMAL(" + length; if ( precision > 0 ) { retval += ", " + precision; } retval += ")"; } else { // A double-precision floating-point number is accurate to approximately 15 decimal places. // http://mysql.mirrors-r-us.net/doc/refman/5.1/en/numeric-type-overview.html retval += "DOUBLE"; } } } break; case IValueMeta.TYPE_STRING: if ( length > 0 ) { if ( length == 1 ) { retval += "CHAR(1)"; } else if ( length < 256 ) { retval += "VARCHAR(" + length + ")"; } else if ( length < 65536 ) { retval += "TEXT"; } else if ( length < 16777215 ) { retval += "MEDIUMTEXT"; } else { retval += "LONGTEXT"; } } else { retval += "TINYTEXT"; } break; case IValueMeta.TYPE_BINARY: retval += "LONGBLOB"; break; default: retval += " UNKNOWN"; break; } if ( add_cr ) { retval += CR; } return retval; } @Override public String[] getUsedLibraries() { return new String[] { "pentaho-hadoop-hive-jdbc-shim-1.4-SNAPSHOT.jar" }; } @Override public int getDefaultDatabasePort() { return DEFAULT_PORT; } /* * (non-Javadoc) * * @see org.pentaho.database.dialect.AbstractDatabaseDialect#supportsSchemas() */ @Override public boolean supportsSchemas() { return false; } @Override public boolean initialize( String classname ) { return true; } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/HiveDatabaseMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.pentaho.big.data.api.jdbc.impl.DriverLocatorImpl; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.plugins.DatabaseMetaPlugin; import org.pentaho.di.core.row.ValueMetaInterface; @DatabaseMetaPlugin( type = "HIVE", typeDescription = "Hadoop Hive (deprecated)" ) public class HiveDatabaseMeta extends DatabaseMetaWithVersion { public static final String URL_PREFIX = "jdbc:hive://"; public static final String SELECT_COUNT_1_FROM = "select count(1) from "; public static final String VIEW = "VIEW"; public static final String VIRTUAL_VIEW = "VIRTUAL_VIEW"; public static final String TRUNCATE_TABLE = "TRUNCATE TABLE "; protected static final String JAR_FILE = "hive-jdbc-cdh4.2.0-release-pentaho.jar"; protected static final String DRIVER_CLASS_NAME = "org.apache.hadoop.hive.jdbc.HiveDriver"; protected static final int DEFAULT_PORT = 10000; public HiveDatabaseMeta() { this( DriverLocatorImpl.getInstance() ); } public HiveDatabaseMeta( DriverLocator driverLocator ) { super( driverLocator ); } @Override public int[] getAccessTypeList() { return new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE }; } @Override public String getAddColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean useAutoinc, String pk, boolean semicolon ) { return "ALTER TABLE " + tablename + " ADD " + getFieldDefinition( v, tk, pk, useAutoinc, true, false ); } @Override public String getDriverClass() { // !!! We will probably have to change this if we are providing our own driver, // i.e., before our code is committed to the Hadoop Hive project. return DRIVER_CLASS_NAME; } /** * This method assumes that Hive has no concept of primary and technical keys and auto increment columns. We are * ignoring the tk, pk and useAutoinc parameters. */ @Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc, boolean addFieldname, boolean addCr ) { String retval = ""; String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if ( addFieldname ) { retval += fieldname + " "; } int type = v.getType(); switch ( type ) { case ValueMetaInterface.TYPE_BOOLEAN: retval += "BOOLEAN"; break; // Hive does not support DATE until 0.12 case ValueMetaInterface.TYPE_DATE: if ( isDriverVersion( 0, 12 ) ) { retval += "DATE"; } else { throw new IllegalArgumentException( "Date types not supported in this version of Hive" ); } break; // Hive does not support DATE until 0.8 case ValueMetaInterface.TYPE_TIMESTAMP: if ( isDriverVersion( 0, 8 ) ) { retval += "TIMESTAMP"; } else { throw new IllegalArgumentException( "Timestamp types not supported in this version of Hive" ); } break; case ValueMetaInterface.TYPE_STRING: retval += "STRING"; break; case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: // Integer values... if ( precision == 0 ) { if ( length > 9 ) { if ( length < 19 ) { // can hold signed values between -9223372036854775808 and 9223372036854775807 // 18 significant digits retval += "BIGINT"; } else { retval += "FLOAT"; } } else { retval += "INT"; } } else { // Floating point values... if ( length > 15 ) { retval += "FLOAT"; } else { // A double-precision floating-point number is accurate to approximately 15 decimal places. // http://mysql.mirrors-r-us.net/doc/refman/5.1/en/numeric-type-overview.html retval += "DOUBLE"; } } break; } return retval; } @Override public String getModifyColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean useAutoinc, String pk, boolean semicolon ) { return "ALTER TABLE " + tablename + " MODIFY " + getFieldDefinition( v, tk, pk, useAutoinc, true, false ); } @Override public String getURL( String hostname, String port, String databaseName ) { if ( Const.isEmpty( port ) ) { port = Integer.toString( getDefaultDatabasePort() ); } return URL_PREFIX + hostname + ":" + port + "/" + databaseName; } @Override public String[] getUsedLibraries() { return new String[] { JAR_FILE }; } /** * Build the SQL to count the number of rows in the passed table. * * @param tableName * @return */ @Override public String getSelectCountStatement( String tableName ) { return SELECT_COUNT_1_FROM + tableName; } @Override public String generateColumnAlias( int columnIndex, String suggestedName ) { if ( isDriverVersion( 0, 6 ) ) { return suggestedName; } else { // For version 0.5 and prior: // Column aliases are currently not supported in Hive. The default column alias // generated is in the format '_col##' where ## = column index. Use this format // so the result can be mapped back correctly. return "_col" + String.valueOf( columnIndex ); //$NON-NLS-1$ } } /** * Quotes around table names are not valid Hive QL *

* return an empty string for the start quote */ public String getStartQuote() { return ""; } /** * Quotes around table names are not valid Hive QL *

* return an empty string for the end quote */ public String getEndQuote() { return ""; } @Override public int getDefaultDatabasePort() { return DEFAULT_PORT; } /** * @return a list of table types to retrieve tables for the database */ @Override public String[] getTableTypes() { return null; } /** * @return a list of table types to retrieve views for the database */ @Override public String[] getViewTypes() { return new String[] { VIEW, VIRTUAL_VIEW }; } /** * @param tableName The table to be truncated. * @return The SQL statement to truncate a table: remove all rows from it without a transaction */ @Override public String getTruncateTableStatement( String tableName ) { if ( isDriverVersion( 0, 11 ) ) { return TRUNCATE_TABLE + tableName; } return null; } @Override public boolean supportsSetCharacterStream() { return false; } @Override public boolean supportsBatchUpdates() { return false; } @Override public boolean supportsTimeStampToDateConversion() { return false; } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/HiveWarehouseDatabaseMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.pentaho.big.data.api.jdbc.impl.DriverLocatorImpl; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.di.core.plugins.DatabaseMetaPlugin; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; @DatabaseMetaPlugin( type = "HIVEWAREHOUSE", typeDescription = "Hive Warehouse Connector" ) public class HiveWarehouseDatabaseMeta extends Hive2DatabaseMeta { public HiveWarehouseDatabaseMeta() { this( DriverLocatorImpl.getInstance(), NamedClusterManager.getInstance() ); } public HiveWarehouseDatabaseMeta( DriverLocator driverLocator, NamedClusterService namedClusterService ) { super( driverLocator, namedClusterService ); } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/ImpalaDatabaseDialect.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.pentaho.database.DatabaseDialectException; import org.pentaho.database.model.DatabaseAccessType; import org.pentaho.database.model.DatabaseConnection; import org.pentaho.database.model.DatabaseType; import org.pentaho.database.model.IDatabaseConnection; import org.pentaho.database.model.IDatabaseType; public class ImpalaDatabaseDialect extends Hive2DatabaseDialect { public ImpalaDatabaseDialect() { super(); } /** * UID for serialization */ private static final long serialVersionUID = -6685869374136347923L; private static final int DEFAULT_PORT = 21050; private static final IDatabaseType DBTYPE = new DatabaseType( "Impala", "IMPALA", DatabaseAccessType.getList( DatabaseAccessType.NATIVE, DatabaseAccessType.JNDI ), DEFAULT_PORT, "http://www.cloudera.com/content/support/en/documentation/cloudera-impala/cloudera-impala-documentation-v1" + "-latest.html" ); public IDatabaseType getDatabaseType() { return DBTYPE; } @Override public String getNativeDriver() { return "org.apache.hive.jdbc.ImpalaDriver"; } @Override public String getURL( IDatabaseConnection connection ) throws DatabaseDialectException { StringBuffer urlBuffer = new StringBuffer( getNativeJdbcPre() ); /* * String username = connection.getUsername(); if(username != null && !"".equals(username)) { * urlBuffer.append(username); String password = connection.getPassword(); if(password != null && * !"".equals(password)) { urlBuffer.append(":"); urlBuffer.append(password); } urlBuffer.append("@"); } */ urlBuffer.append( connection.getHostname() ); urlBuffer.append( ":" ); urlBuffer.append( connection.getDatabasePort() ); urlBuffer.append( "/" ); urlBuffer.append( connection.getDatabaseName() ); String principalPropertyName = getDatabaseType().getShortName() + ".principal"; String principal = connection.getExtraOptions().get( principalPropertyName ); String extraPrincipal = connection.getAttributes().get( DatabaseConnection.ATTRIBUTE_PREFIX_EXTRA_OPTION + principalPropertyName ); urlBuffer.append( ";impala_db=true" ); if ( principal != null || extraPrincipal != null ) { return urlBuffer.toString(); } urlBuffer.append( ";auth=noSasl" ); return urlBuffer.toString(); } @Override public String getNativeJdbcPre() { return "jdbc:hive2://"; } @Override public String[] getUsedLibraries() { return new String[] { "pentaho-hadoop-hive-jdbc-shim-1.4-SNAPSHOT.jar" }; } @Override public int getDefaultDatabasePort() { return DEFAULT_PORT; } @Override public boolean initialize( String classname ) { return true; } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/ImpalaDatabaseMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import com.google.common.annotations.VisibleForTesting; import org.pentaho.big.data.api.jdbc.impl.DriverLocatorImpl; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseInterface; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.plugins.DatabaseMetaPlugin; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.metastore.locator.api.MetastoreLocator; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.Collections; import java.util.List; import java.util.Map; @DatabaseMetaPlugin( type = "IMPALA", typeDescription = "Impala" ) public class ImpalaDatabaseMeta extends Hive2DatabaseMeta implements DatabaseInterface { public static final String AUTH_NO_SASL = ";auth=noSasl"; protected static final String JAR_FILE = "hive-jdbc-cdh4.2.0-release-pentaho.jar"; protected static final String DRIVER_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver"; protected static final int DEFAULT_PORT = 21050; private static final Logger logChannel = LogManager.getLogger( ImpalaDatabaseMeta.class ); @VisibleForTesting ImpalaDatabaseMeta( DriverLocator driverLocator, NamedClusterService namedClusterService, MetastoreLocator metastoreLocator ) { super( driverLocator, namedClusterService, metastoreLocator ); } public ImpalaDatabaseMeta() { this( DriverLocatorImpl.getInstance(), NamedClusterManager.getInstance() ); } // OSGi constructor public ImpalaDatabaseMeta( DriverLocator driverLocator, NamedClusterService namedClusterService ) { super( driverLocator, namedClusterService ); } @Override public int[] getAccessTypeList() { return new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE }; } @Override public String getDriverClass() { // !!! We will probably have to change this if we are providing our own driver, // i.e., before our code is committed to the Hadoop Hive project. return DRIVER_CLASS_NAME; } /** * This method assumes that Hive has no concept of primary and technical keys and auto increment columns. We are * ignoring the tk, pk and useAutoinc parameters. */ @Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc, boolean addFieldname, boolean addCr ) { String retval = ""; String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if ( addFieldname ) { retval += fieldname + " "; } int type = v.getType(); switch ( type ) { case ValueMetaInterface.TYPE_BOOLEAN: retval += "BOOLEAN"; break; // Hive does not support DATE until 0.12 - check Impala version against Hive case ValueMetaInterface.TYPE_DATE: case ValueMetaInterface.TYPE_TIMESTAMP: if ( isDriverVersion( 0, 8 ) ) { retval += "TIMESTAMP"; } else { throw new IllegalArgumentException( "Timestamp types not supported in this version of Impala" ); } break; case ValueMetaInterface.TYPE_STRING: retval += "STRING"; break; case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: // Integer values... if ( precision == 0 ) { if ( length > 9 ) { if ( length < 19 ) { // can hold signed values between -9223372036854775808 and 9223372036854775807 // 18 significant digits retval += "BIGINT"; } else { retval += "FLOAT"; } } else { retval += "INT"; } } else { // Floating point values... if ( length > 15 ) { retval += "FLOAT"; } else { // A double-precision floating-point number is accurate to approximately 15 decimal places. // http://mysql.mirrors-r-us.net/doc/refman/5.1/en/numeric-type-overview.html retval += "DOUBLE"; } } break; } return retval; } @Override public String getURL( String hostname, String port, String databaseName ) { StringBuilder urlBuffer = new StringBuilder(); if ( Const.isEmpty( port ) ) { port = Integer.toString( getDefaultDatabasePort() ); } String principal = getAttributes().getProperty( "principal" ); String extraPrincipal = getAttributes().getProperty( ATTRIBUTE_PREFIX_EXTRA_OPTION + getPluginId() + ".principal" ); urlBuffer.append( "jdbc:hive2://" ).append( hostname ).append( ":" ).append( port ).append( "/" ) .append( databaseName ); if ( principal == null && extraPrincipal == null ) { urlBuffer.append( AUTH_NO_SASL ); } urlBuffer.append( ";impala_db=true" ); return urlBuffer.toString(); } @Override public String[] getUsedLibraries() { return new String[] { JAR_FILE }; } @Override public int getDefaultDatabasePort() { return DEFAULT_PORT; } @Override public List getNamedClusterList() { try { return namedClusterService.listNames( metastoreLocator.getMetastore() ); } catch ( MetaStoreException e ) { logChannel.error( e.getMessage(), e ); return Collections.emptyList(); } } @Override public void putOptionalOptions( Map extraOptions ) { if ( getNamedCluster() != null && getNamedCluster().trim().length() > 0 ) { extraOptions.put( getPluginId() + ".pentahoNamedCluster", getNamedCluster() ); } } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/ImpalaSimbaDatabaseDialect.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import org.pentaho.database.model.DatabaseAccessType; import org.pentaho.database.model.DatabaseType; import org.pentaho.database.model.IDatabaseType; /** * User: Dzmitry Stsiapanau Date: 8/28/2015 Time: 10:23 */ public class ImpalaSimbaDatabaseDialect extends Hive2SimbaDatabaseDialect { public static final String DB_TYPE_NAME_SHORT = "IMPALASIMBA"; public ImpalaSimbaDatabaseDialect() { super(); } /** * UID for serialization */ private static final long serialVersionUID = -8456961348836455937L; protected static final int DEFAULT_PORT = 21050; protected static final String JDBC_URL_TEMPLATE = "jdbc:impala://%s:%s/%s;AuthMech=%d%s"; private static final IDatabaseType DBTYPE = new DatabaseType( "Cloudera Impala", DB_TYPE_NAME_SHORT, DatabaseAccessType.getList( DatabaseAccessType.NATIVE, DatabaseAccessType.JNDI ), DEFAULT_PORT, "http://go.cloudera.com/odbc-driver-hive-impala.html", "", ImmutableMap.builder().put( Joiner.on( "." ).join( DB_TYPE_NAME_SHORT, SOCKET_TIMEOUT_OPTION ), DEFAULT_SOCKET_TIMEOUT ).build() ); public IDatabaseType getDatabaseType() { return DBTYPE; } @Override public String getNativeDriver() { return "org.apache.hive.jdbc.ImpalaSimbaDriver"; } @Override public String getNativeJdbcPre() { return "jdbc:impala://"; } @Override public int getDefaultDatabasePort() { return DEFAULT_PORT; } @Override public String[] getUsedLibraries() { return new String[] { "ImpalaJDBC41.jar" }; } @Override public boolean initialize( String classname ) { return true; } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/ImpalaSimbaDatabaseMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.pentaho.big.data.api.jdbc.impl.DriverLocatorImpl; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.plugins.DatabaseMetaPlugin; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import java.util.HashMap; import java.util.Map; @DatabaseMetaPlugin( type = "IMPALASIMBA", typeDescription = "Cloudera Impala" ) public class ImpalaSimbaDatabaseMeta extends BaseSimbaDatabaseMeta { protected static final String JAR_FILE = "ImpalaJDBC41.jar"; protected static final String JDBC_URL_PREFIX = "jdbc:impala://"; protected static final String DRIVER_CLASS_NAME = "com.cloudera.impala.jdbc41.Driver"; protected static final int DEFAULT_PORT = 21050; protected static final String SOCKET_TIMEOUT_OPTION = "SocketTimeout"; public ImpalaSimbaDatabaseMeta() { this( DriverLocatorImpl.getInstance(), NamedClusterManager.getInstance() ); } public ImpalaSimbaDatabaseMeta( DriverLocator driverLocator, NamedClusterService namedClusterService ) { super( driverLocator, namedClusterService ); } @Override protected String getJdbcPrefix() { return JDBC_URL_PREFIX; } @Override public String getDriverClass() { return DRIVER_CLASS_NAME; } @Override public String[] getUsedLibraries() { return new String[] { JAR_FILE }; } @Override public int getDefaultDatabasePort() { return DEFAULT_PORT; } @Override public Map getDefaultOptions() { HashMap options = new HashMap<>(); options.put( String.format( "%s.%s", getPluginId(), SOCKET_TIMEOUT_OPTION ), "10" ); return options; } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/SimbaUrl.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import com.google.common.annotations.VisibleForTesting; import org.pentaho.di.core.database.DatabaseMeta; import static com.google.common.base.Strings.isNullOrEmpty; public class SimbaUrl { @VisibleForTesting static final String KRB_HOST_FQDN = "KrbHostFQDN"; @VisibleForTesting static final String KRB_SERVICE_NAME = "KrbServiceName"; @VisibleForTesting static final String URL_IS_CONFIGURED_THROUGH_JNDI = "Url is configured through JNDI"; final String jdbcPrefix; private String username; private String password; private boolean isKerberos; private int accessType; private int defaultPort; private String port; private String hostname; private String databaseName; private String jdbcUrlTemplate; private static final String DEFAULT_DB = "default"; public SimbaUrl( Builder builder ) { this.jdbcPrefix = builder.jdbcPrefix; this.username = builder.username; this.password = builder.password; this.isKerberos = builder.isKerberos; this.accessType = builder.accessType; this.defaultPort = builder.defaultPort; this.port = builder.port; this.hostname = builder.hostname; this.databaseName = builder.databaseName; this.jdbcUrlTemplate = jdbcPrefix + "%s:%d/%s;AuthMech=%d%s"; } public String getURL() { Integer portNumber; if ( isNullOrEmpty( port ) ) { portNumber = defaultPort; } else { portNumber = Integer.valueOf( port ); } if ( isNullOrEmpty( databaseName ) ) { databaseName = DEFAULT_DB; } switch ( accessType ) { case DatabaseMeta.TYPE_ACCESS_JNDI: { return URL_IS_CONFIGURED_THROUGH_JNDI; } case DatabaseMeta.TYPE_ACCESS_NATIVE: default: { Integer authMethod = 0; StringBuilder additional = new StringBuilder(); String userName = username; String password = this.password; if ( isKerberos ) { authMethod = 1; } else if ( !isNullOrEmpty( userName ) ) { additional.append( ";UID=" ); additional.append( userName ); if ( !isNullOrEmpty( password ) ) { authMethod = 3; additional.append( ";PWD=" ); additional.append( password ); } else { authMethod = 2; } } return String.format( jdbcUrlTemplate, hostname, portNumber, databaseName, authMethod, additional ); } } } public static final class Builder { private String jdbcPrefix; private int accessType; private String databaseName; private int defaultPort; private String hostname; private boolean isKerberos; private String password; private String port; private String username; private Builder() { } public static Builder create() { return new Builder(); } public Builder withAccessType( int accessType ) { this.accessType = accessType; return this; } public Builder withDatabaseName( String databaseName ) { this.databaseName = databaseName; return this; } public Builder withDefaultPort( int defaultPort ) { this.defaultPort = defaultPort; return this; } public Builder withHostname( String hostname ) { this.hostname = hostname; return this; } public Builder withIsKerberos( boolean isKerberos ) { this.isKerberos = isKerberos; return this; } public Builder withJdbcPrefix( String jdbcPrefix ) { this.jdbcPrefix = jdbcPrefix; return this; } public Builder withPassword( String password ) { this.password = password; return this; } public Builder withPort( String port ) { this.port = port; return this; } public Builder withUsername( String username ) { this.username = username; return this; } public SimbaUrl build() { SimbaUrl simbaUrl = new SimbaUrl( this ); return simbaUrl; } } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/SparkSimbaDatabaseDialect.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import org.pentaho.database.model.DatabaseAccessType; import org.pentaho.database.model.DatabaseType; import org.pentaho.database.model.IDatabaseType; public class SparkSimbaDatabaseDialect extends Hive2SimbaDatabaseDialect { public static final String DB_TYPE_NAME_SHORT = "SPARKSIMBA"; public SparkSimbaDatabaseDialect() { super(); } private static final long serialVersionUID = 5665821298486490578L; @VisibleForTesting static final IDatabaseType DBTYPE = new DatabaseType( "SparkSQL", DB_TYPE_NAME_SHORT, DatabaseAccessType.getList( DatabaseAccessType.NATIVE, DatabaseAccessType.JNDI ), SparkSimbaDatabaseMeta.DEFAULT_PORT, "http://www.simba.com/drivers/spark-jdbc-odbc/", "", ImmutableMap.builder().put( Joiner.on( "." ).join( DB_TYPE_NAME_SHORT, SOCKET_TIMEOUT_OPTION ), DEFAULT_SOCKET_TIMEOUT ).build() ); public IDatabaseType getDatabaseType() { return DBTYPE; } @Override public String getNativeDriver() { return SparkSimbaDatabaseMeta.DRIVER_CLASS_NAME; } @Override public String getNativeJdbcPre() { return SparkSimbaDatabaseMeta.JDBC_URL_PREFIX; } @Override public int getDefaultDatabasePort() { return DBTYPE.getDefaultDatabasePort(); } @Override public String[] getUsedLibraries() { return new String[] { SparkSimbaDatabaseMeta.JAR_FILE }; } } ================================================ FILE: kettle-plugins/hive/core/src/main/java/org/pentaho/big/data/kettle/plugins/hive/SparkSimbaDatabaseMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import com.google.common.annotations.VisibleForTesting; import org.pentaho.big.data.api.jdbc.impl.DriverLocatorImpl; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.plugins.DatabaseMetaPlugin; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import java.util.HashMap; import java.util.Map; @DatabaseMetaPlugin( type = "SPARKSIMBA", typeDescription = "SparkSQL" ) public class SparkSimbaDatabaseMeta extends BaseSimbaDatabaseMeta { @VisibleForTesting static final String JDBC_URL_PREFIX = "jdbc:spark://"; @VisibleForTesting static final String DRIVER_CLASS_NAME = "org.apache.hive.jdbc.SparkSqlSimbaDriver"; @VisibleForTesting static final String JAR_FILE = "SparkJDBC41.jar"; @VisibleForTesting static final int DEFAULT_PORT = 10015; @VisibleForTesting static final String SOCKET_TIMEOUT_OPTION = "SocketTimeout"; private final String LIMIT_1 = " LIMIT 1"; public SparkSimbaDatabaseMeta() { this( DriverLocatorImpl.getInstance(), NamedClusterManager.getInstance() ); } public SparkSimbaDatabaseMeta( DriverLocator driverLocator, NamedClusterService namedClusterService ) { super( driverLocator, namedClusterService ); } @Override public int[] getAccessTypeList() { return new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE, DatabaseMeta.TYPE_ACCESS_JNDI }; } @Override protected String getJdbcPrefix() { return JDBC_URL_PREFIX; } @Override public String getDriverClass() { return DRIVER_CLASS_NAME; } @Override public String getSQLQueryFields( String tableName ) { return "SELECT * FROM " + tableName + LIMIT_1; } @Override public String getStartQuote() { return "`"; } @Override public String getEndQuote() { return "`"; } @Override public String getSQLTableExists( String tablename ) { return "SELECT 1 FROM " + tablename + LIMIT_1; } @Override public String getTruncateTableStatement( String tableName ) { return "TRUNCATE TABLE " + tableName; } @Override public String getSQLColumnExists( String columnname, String tablename ) { return "SELECT " + columnname + " FROM " + tablename + LIMIT_1; } @Override public String getLimitClause( int nrRows ) { return " LIMIT " + nrRows; } @Override public String getSelectCountStatement( String tableName ) { return SELECT_COUNT_STATEMENT + " " + tableName; } @Override public String getDropColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean use_autoinc, String pk, boolean semicolon ) { return ""; } @Override public String getAddColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean useAutoinc, String pk, boolean semicolon ) { return ""; } @Override public String getModifyColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean useAutoinc, String pk, boolean semicolon ) { return ""; } @Override public String[] getUsedLibraries() { return new String[] { JAR_FILE }; } @Override public int getDefaultDatabasePort() { return DEFAULT_PORT; } @Override public Map getDefaultOptions() { HashMap options = new HashMap<>(); options.put( String.format( "%s.%s", getPluginId(), SOCKET_TIMEOUT_OPTION ), "10" ); return options; } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/apache/hadoop/hive/jdbc/HiveDriverTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hadoop.hive.jdbc; import org.junit.Test; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; /** * Created by bryan on 4/14/16. */ public class HiveDriverTest { @Test public void testSubclass() { DummyDriver.class.isInstance( new HiveDriver() ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/apache/hive/jdbc/HiveDriverTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hive.jdbc; import org.junit.Test; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; import static org.junit.Assert.assertTrue; /** * Created by bryan on 4/14/16. */ public class HiveDriverTest { @Test public void testIsInstance() { assertTrue( DummyDriver.class.isInstance( new HiveDriver() ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/apache/hive/jdbc/HiveSimbaDriverTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hive.jdbc; import org.junit.Test; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; import static org.junit.Assert.assertTrue; /** * Created by bryan on 4/14/16. */ public class HiveSimbaDriverTest { @Test public void testIsInstance() { assertTrue( DummyDriver.class.isInstance( new HiveSimbaDriver() ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/apache/hive/jdbc/ImpalaDriverTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hive.jdbc; import org.junit.Test; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; import static org.junit.Assert.assertTrue; /** * Created by bryan on 4/14/16. */ public class ImpalaDriverTest { @Test public void testIsInstance() { assertTrue( DummyDriver.class.isInstance( new ImpalaDriver() ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/apache/hive/jdbc/ImpalaSimbaDriverTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.apache.hive.jdbc; import org.junit.Test; import org.pentaho.big.data.kettle.plugins.hive.DummyDriver; import static org.junit.Assert.assertTrue; /** * Created by bryan on 4/14/16. */ public class ImpalaSimbaDriverTest { @Test public void testIsInstance() { assertTrue( DummyDriver.class.isInstance( new ImpalaSimbaDriver() ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/BaseSimbaDatabaseMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBigNumber; import org.pentaho.di.core.row.value.ValueMetaBoolean; import org.pentaho.di.core.row.value.ValueMetaDate; import org.pentaho.di.core.row.value.ValueMetaInteger; import org.pentaho.di.core.row.value.ValueMetaInternetAddress; import org.pentaho.di.core.row.value.ValueMetaNumber; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.core.row.value.ValueMetaTimestamp; import java.net.MalformedURLException; import java.net.URL; import java.sql.Driver; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertArrayEquals; import static org.mockito.Mockito.when; import static org.pentaho.big.data.kettle.plugins.hive.SimbaUrl.KRB_HOST_FQDN; import static org.pentaho.big.data.kettle.plugins.hive.SimbaUrl.KRB_SERVICE_NAME; @RunWith( MockitoJUnitRunner.Silent.class ) public class BaseSimbaDatabaseMetaTest { private static final String LOCALHOST = "localhost"; private static final String PORT = "10000"; private static final String DEFAULT = "default"; @Mock private DriverLocator driverLocator; @Mock private Driver driver; private BaseSimbaDatabaseMeta baseSimbaDatabaseMeta; private String driverClassname = "driverClassname"; private String jdbcPrefix = "jdbc:prefix://"; @BeforeClass public static void initLogs() { KettleLogStore.init(); } @Before public void setup() throws Throwable { baseSimbaDatabaseMeta = new BaseSimbaDatabaseMeta( driverLocator, null, null ) { @Override protected String getJdbcPrefix() { return jdbcPrefix; } @Override public String getDriverClass() { return driverClassname; } }; String baseSimbaDatabaseMetaURL = baseSimbaDatabaseMeta.getURL( LOCALHOST, PORT, DEFAULT ); when( driverLocator.getDriver( baseSimbaDatabaseMetaURL ) ).thenReturn( driver ); } @Test public void testVersionConstructor() throws Throwable { int majorVersion = 22; int minorVersion = 33; when( driver.getMajorVersion() ).thenReturn( majorVersion ); when( driver.getMinorVersion() ).thenReturn( minorVersion ); assertTrue( baseSimbaDatabaseMeta.isDriverVersion( majorVersion, minorVersion ) ); assertFalse( baseSimbaDatabaseMeta.isDriverVersion( majorVersion, minorVersion + 1 ) ); assertFalse( baseSimbaDatabaseMeta.isDriverVersion( majorVersion + 1, minorVersion ) ); } @Test public void testGetAccessTypeList() { assertArrayEquals( new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE, DatabaseMeta.TYPE_ACCESS_JNDI }, baseSimbaDatabaseMeta.getAccessTypeList() ); } @Test public void testGetDriverClassOther() { assertEquals( driverClassname, baseSimbaDatabaseMeta.getDriverClass() ); } @Test public void testGetUrlDefaults() throws KettleDatabaseException, MalformedURLException { String testHost = "testHost"; String urlString = baseSimbaDatabaseMeta.getURL( testHost, "", "" ); assertTrue( urlString.startsWith( jdbcPrefix ) ); URL url = new URL( "http://" + urlString.substring( ImpalaSimbaDatabaseMeta.JDBC_URL_PREFIX.length() ) ); assertEquals( testHost, url.getHost() ); assertEquals( baseSimbaDatabaseMeta.getDefaultDatabasePort(), url.getPort() ); assertEquals( "/default;AuthMech=0", url.getPath() ); } @Test public void testGetUrlJndi() throws KettleDatabaseException { baseSimbaDatabaseMeta.setAccessType( DatabaseMeta.TYPE_ACCESS_JNDI ); assertEquals( Hive2SimbaDatabaseMeta.URL_IS_CONFIGURED_THROUGH_JNDI, baseSimbaDatabaseMeta.getURL( "", "", "" ) ); } @Test public void testGetUrlKerb() throws Throwable { String testHost = "testHost"; String testPort = "1111"; String testDb = "testDb"; // Regular properties baseSimbaDatabaseMeta.getAttributes().put( KRB_HOST_FQDN, "fqdn" ); baseSimbaDatabaseMeta.getAttributes().put( KRB_SERVICE_NAME, "service" ); String urlString = baseSimbaDatabaseMeta.getURL( testHost, testPort, testDb ); assertTrue( urlString.startsWith( jdbcPrefix ) ); URL url = new URL( "http://" + urlString.substring( ImpalaSimbaDatabaseMeta.JDBC_URL_PREFIX.length() ) ); assertEquals( testHost, url.getHost() ); assertEquals( Integer.valueOf( testPort ).intValue(), url.getPort() ); assertEquals( "/" + testDb + ";AuthMech=1", url.getPath() ); // Extra properties baseSimbaDatabaseMeta = new ImpalaSimbaDatabaseMeta( driverLocator, null ); baseSimbaDatabaseMeta.getAttributes().put( Hive2SimbaDatabaseMeta.ATTRIBUTE_PREFIX_EXTRA_OPTION + baseSimbaDatabaseMeta.getPluginId() + "." + KRB_HOST_FQDN, "fqdn" ); baseSimbaDatabaseMeta.getAttributes() .put( Hive2SimbaDatabaseMeta.ATTRIBUTE_PREFIX_EXTRA_OPTION + baseSimbaDatabaseMeta.getPluginId() + "." + KRB_SERVICE_NAME, "service" ); urlString = baseSimbaDatabaseMeta.getURL( testHost, testPort, testDb ); assertTrue( urlString.startsWith( ImpalaSimbaDatabaseMeta.JDBC_URL_PREFIX ) ); url = new URL( "http://" + urlString.substring( ImpalaSimbaDatabaseMeta.JDBC_URL_PREFIX.length() ) ); assertEquals( testHost, url.getHost() ); assertEquals( Integer.valueOf( testPort ).intValue(), url.getPort() ); assertEquals( "/" + testDb + ";AuthMech=1", url.getPath() ); } @Test public void testGetUrlUsername() throws KettleDatabaseException, MalformedURLException { String testUsername = "testUsername"; baseSimbaDatabaseMeta.setUsername( testUsername ); String testHost = "testHost"; String testPort = "1111"; String testDb = "testDb"; String urlString = baseSimbaDatabaseMeta.getURL( testHost, testPort, testDb ); assertTrue( urlString.startsWith( jdbcPrefix ) ); URL url = new URL( "http://" + urlString.substring( ImpalaSimbaDatabaseMeta.JDBC_URL_PREFIX.length() ) ); assertEquals( testHost, url.getHost() ); assertEquals( Integer.valueOf( testPort ).intValue(), url.getPort() ); assertEquals( "/" + testDb + ";AuthMech=2;UID=" + testUsername, url.getPath() ); } @Test public void testGetUrlPassword() throws KettleDatabaseException, MalformedURLException { String testUsername = "testUsername"; String testPassword = "testPassword"; baseSimbaDatabaseMeta.setUsername( testUsername ); baseSimbaDatabaseMeta.setPassword( testPassword ); String testHost = "testHost"; String testPort = "1111"; String testDb = "testDb"; String urlString = baseSimbaDatabaseMeta.getURL( testHost, testPort, testDb ); assertTrue( urlString.startsWith( jdbcPrefix ) ); URL url = new URL( "http://" + urlString.substring( ImpalaSimbaDatabaseMeta.JDBC_URL_PREFIX.length() ) ); assertEquals( testHost, url.getHost() ); assertEquals( Integer.valueOf( testPort ).intValue(), url.getPort() ); assertEquals( "/" + testDb + ";AuthMech=3;UID=" + testUsername + ";PWD=" + testPassword, url.getPath() ); } @Test public void testGetFieldDefinitionBoolean() { assertGetFieldDefinition( new ValueMetaBoolean(), "boolName", "BOOLEAN" ); } @Test public void testGetFieldDefinitionDate() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 12 ); assertGetFieldDefinition( new ValueMetaDate(), "dateName", "DATE" ); } @Test public void testGetFieldDefinitionTimestamp() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 8 ); assertGetFieldDefinition( new ValueMetaTimestamp(), "timestampName", "TIMESTAMP" ); } @Test public void testGetFieldDefinitionStringVarchar() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 12 ); assertGetFieldDefinition( new ValueMetaString(), "stringName", "VARCHAR" ); } @Test public void testGetFieldDefinitionNumber() { String numberName = "numberName"; ValueMetaInterface valueMetaInterface = new ValueMetaNumber(); valueMetaInterface.setName( numberName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setPrecision( 10 ); valueMetaInterface.setLength( 16 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setLength( 15 ); assertGetFieldDefinition( valueMetaInterface, "DOUBLE" ); } @Test public void testGetFieldDefinitionInteger() { String integerName = "integerName"; ValueMetaInterface valueMetaInterface = new ValueMetaInteger(); valueMetaInterface.setName( integerName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); } @Test public void testGetFieldDefinitionBigNumber() { String bigNumberName = "bigNumberName"; ValueMetaInterface valueMetaInterface = new ValueMetaBigNumber(); valueMetaInterface.setName( bigNumberName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setPrecision( 10 ); valueMetaInterface.setLength( 16 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setLength( 15 ); assertGetFieldDefinition( valueMetaInterface, "DOUBLE" ); } @Test public void testGetFieldDefinition() { assertGetFieldDefinition( new ValueMetaInternetAddress(), "internetAddressName", "" ); } private void assertGetFieldDefinition( ValueMetaInterface valueMetaInterface, String name, String expectedType ) { valueMetaInterface = valueMetaInterface.clone(); valueMetaInterface.setName( name ); assertGetFieldDefinition( valueMetaInterface, expectedType ); } private void assertGetFieldDefinition( ValueMetaInterface valueMetaInterface, String expectedType ) { assertEquals( expectedType, baseSimbaDatabaseMeta.getFieldDefinition( valueMetaInterface, null, null, false, false, false ) ); assertEquals( valueMetaInterface.getName() + " " + expectedType, baseSimbaDatabaseMeta.getFieldDefinition( valueMetaInterface, null, null, false, true, false ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/Hive2DatabaseDialectTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import junit.framework.Assert; import org.junit.Test; import org.pentaho.database.model.DatabaseAccessType; import org.pentaho.database.model.DatabaseConnection; import org.pentaho.database.model.IDatabaseType; public class Hive2DatabaseDialectTest { private Hive2DatabaseDialect dialect; public Hive2DatabaseDialectTest() { this.dialect = new Hive2DatabaseDialect(); } @Test public void testGetNativeDriver() { Assert.assertEquals( dialect.getNativeDriver(), "org.apache.hive.jdbc.HiveDriver" ); } @Test public void testGetURL() throws Exception { DatabaseConnection conn = new DatabaseConnection(); conn.setAccessType( DatabaseAccessType.NATIVE ); Assert.assertEquals( dialect.getURL( conn ), "jdbc:hive2://null:null/null" ); } @Test public void testGetUsedLibraries() { Assert.assertEquals( dialect.getUsedLibraries()[0], "pentaho-hadoop-hive-jdbc-shim-1.4-SNAPSHOT.jar" ); } @Test public void testGetNativeJdbcPre() { Assert.assertEquals( dialect.getNativeJdbcPre(), "jdbc:hive2://" ); } @Test public void testGetDatabaseType() { IDatabaseType dbType = dialect.getDatabaseType(); Assert.assertEquals( dbType.getName(), "Hadoop Hive 2" ); } @Test public void testSupportsSchemas() { Assert.assertFalse( dialect.supportsSchemas() ); } @Test public void testGetDefaultDatabasePort() { Assert.assertEquals( dialect.getDefaultDatabasePort(), 10000 ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/Hive2DatabaseMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBigNumber; import org.pentaho.di.core.row.value.ValueMetaBoolean; import org.pentaho.di.core.row.value.ValueMetaDate; import org.pentaho.di.core.row.value.ValueMetaInteger; import org.pentaho.di.core.row.value.ValueMetaInternetAddress; import org.pentaho.di.core.row.value.ValueMetaNumber; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.core.row.value.ValueMetaTimestamp; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.net.MalformedURLException; import java.net.URL; import java.sql.Driver; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by bryan on 4/14/16. */ @RunWith( MockitoJUnitRunner.Silent.class ) public class Hive2DatabaseMetaTest { public static final String LOCALHOST = "localhost"; public static final String PORT = "10000"; public static final String DEFAULT = "default"; @Mock DriverLocator driverLocator; @Mock Driver driver; @Mock NamedClusterService namedClusterService; @Mock MetastoreLocator metastoreLocator; @Mock IMetaStore iMetaStore; Hive2DatabaseMeta hive2DatabaseMeta; private String hive2DatabaseMetaURL; private List namedClusterList = Arrays.asList( new String[]{ "cluster1", "cluster2" } ); ArgumentCaptor iMetaStoreCaptor = ArgumentCaptor.forClass( IMetaStore.class ); private static String CLUSTER = "cluster1"; private static String PLUGIN_ID = "hive2"; @Before public void setup() throws Throwable { hive2DatabaseMeta = new Hive2DatabaseMeta( driverLocator, namedClusterService, metastoreLocator ); hive2DatabaseMetaURL = hive2DatabaseMeta.getURL( LOCALHOST, PORT, DEFAULT ); when( driverLocator.getDriver( hive2DatabaseMetaURL ) ).thenReturn( driver ); when( metastoreLocator.getMetastore() ).thenReturn( iMetaStore ); when( namedClusterService.listNames( any() ) ).thenReturn( namedClusterList ); } @Test public void testGetAccessTypeList() { assertArrayEquals( Hive2DatabaseMeta.ACCESS_TYPE_LIST, hive2DatabaseMeta.getAccessTypeList() ); } @Test public void testGetUsedLibraries() { assertArrayEquals( new String[] { Hive2DatabaseMeta.JAR_FILE }, hive2DatabaseMeta.getUsedLibraries() ); } @Test public void testGetDriverClass() { assertEquals( Hive2DatabaseMeta.DRIVER_CLASS_NAME, hive2DatabaseMeta.getDriverClass() ); } @Test public void testGetAddColumnStatement() { String testTable = "testTable"; String booleanCol = "booleanCol"; ValueMetaInterface valueMetaInterface = new ValueMetaBoolean(); valueMetaInterface.setName( booleanCol ); String addColumnStatement = hive2DatabaseMeta.getAddColumnStatement( testTable, valueMetaInterface, null, false, null, false ); assertTrue( addColumnStatement.contains( "BOOLEAN" ) ); assertTrue( addColumnStatement.contains( testTable ) ); assertTrue( addColumnStatement.contains( booleanCol ) ); } @Test public void testGetFieldDefinitionBoolean() { assertGetFieldDefinition( new ValueMetaBoolean(), "boolName", "BOOLEAN" ); } @Test public void testGetFieldDefinitionDate() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 12 ); assertGetFieldDefinition( new ValueMetaDate(), "dateName", "DATE" ); } @Test public void testGetFieldDefinitionDateUnsupported() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 11 ); assertGetFieldDefinition( new ValueMetaDate(), "dateName", "DATE" ); } @Test public void testGetFieldDefinitionTimestamp() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 8 ); assertGetFieldDefinition( new ValueMetaTimestamp(), "timestampName", "TIMESTAMP" ); } @Test public void testGetFieldDefinitionUnsupported() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 7 ); assertGetFieldDefinition( new ValueMetaTimestamp(), "timestampName", "TIMESTAMP" ); } @Test public void testGetFieldDefinitionString() { assertGetFieldDefinition( new ValueMetaString(), "stringName", "STRING" ); } @Test public void testGetFieldDefinitionNumber() { String numberName = "numberName"; ValueMetaInterface valueMetaInterface = new ValueMetaNumber(); valueMetaInterface.setName( numberName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setPrecision( 10 ); valueMetaInterface.setLength( 16 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setLength( 15 ); assertGetFieldDefinition( valueMetaInterface, "DOUBLE" ); } @Test public void testGetFieldDefinitionInteger() { String integerName = "integerName"; ValueMetaInterface valueMetaInterface = new ValueMetaInteger(); valueMetaInterface.setName( integerName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); } @Test public void testGetFieldDefinitionBigNumber() { String bigNumberName = "bigNumberName"; ValueMetaInterface valueMetaInterface = new ValueMetaBigNumber(); valueMetaInterface.setName( bigNumberName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setPrecision( 10 ); valueMetaInterface.setLength( 16 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setLength( 15 ); assertGetFieldDefinition( valueMetaInterface, "DOUBLE" ); } @Test public void testGetFieldDefinition() { assertGetFieldDefinition( new ValueMetaInternetAddress(), "internetAddressName", "" ); } @Test public void testGetModifyColumnStatement() { String testTable = "testTable"; String booleanCol = "booleanCol"; ValueMetaInterface valueMetaInterface = new ValueMetaBoolean(); valueMetaInterface.setName( booleanCol ); String addColumnStatement = hive2DatabaseMeta.getModifyColumnStatement( testTable, valueMetaInterface, null, false, null, false ); assertTrue( addColumnStatement.contains( "BOOLEAN" ) ); assertTrue( addColumnStatement.contains( testTable ) ); assertTrue( addColumnStatement.contains( booleanCol ) ); } @Test public void testGetURL() throws KettleDatabaseException, MalformedURLException { String testHostname = "testHostname"; int port = 9429; String testDbName = "testDbName"; String urlString = hive2DatabaseMeta.getURL( testHostname, "" + port, testDbName ); assertTrue( urlString.startsWith( Hive2DatabaseMeta.URL_PREFIX ) ); // Use known prefix urlString = "http://" + urlString.substring( Hive2DatabaseMeta.URL_PREFIX.length() ); URL url = new URL( urlString ); assertEquals( testHostname, url.getHost() ); assertEquals( port, url.getPort() ); assertEquals( "/" + testDbName, url.getPath() ); } @Test public void testGetSelectCountStatement() { String testTable = "testTable"; assertEquals( Hive2DatabaseMeta.SELECT_COUNT_1_FROM + testTable, hive2DatabaseMeta.getSelectCountStatement( testTable ) ); } @Test public void testGenerateColumnAlias5AndPrior() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 5 ); String suggestedName = "suggestedName"; int columnIndex = 12; assertEquals( suggestedName, hive2DatabaseMeta.generateColumnAlias( columnIndex, suggestedName ) ); } @Test public void testGenerateColumnAlias6AndLater() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 6 ); String suggestedName = "suggestedName"; int columnIndex = 12; assertEquals( suggestedName, hive2DatabaseMeta.generateColumnAlias( columnIndex, suggestedName ) ); } @Test public void testIsDriverVersionNull() { assertTrue( hive2DatabaseMeta.isDriverVersion( -1, -1 ) ); } @Test public void testIsDriverVersionMajorGreater() { when( driver.getMajorVersion() ).thenReturn( 6 ); when( driver.getMinorVersion() ).thenReturn( 0 ); assertTrue( hive2DatabaseMeta.isDriverVersion( 5, 5 ) ); } @Test public void testIsDriverVersionMajorSameMinorEqual() { when( driver.getMajorVersion() ).thenReturn( 5 ); when( driver.getMinorVersion() ).thenReturn( 5 ); assertTrue( hive2DatabaseMeta.isDriverVersion( 5, 5 ) ); } @Test public void testIsDriverVersionMajorSameMinorLess() { when( driver.getMajorVersion() ).thenReturn( 5 ); when( driver.getMinorVersion() ).thenReturn( 4 ); assertFalse( hive2DatabaseMeta.isDriverVersion( 5, 5 ) ); } @Test public void testIsDriverVersionMajorLess() { when( driver.getMajorVersion() ).thenReturn( 4 ); when( driver.getMinorVersion() ).thenReturn( 6 ); assertFalse( hive2DatabaseMeta.isDriverVersion( 5, 5 ) ); } @Test public void testGetStartQuote() { assertEquals( 0, hive2DatabaseMeta.getStartQuote().length() ); } @Test public void testGetEndQuote() { assertEquals( 0, hive2DatabaseMeta.getEndQuote().length() ); } @Test public void testGetTableTypesReturnsNull() { assertNull( hive2DatabaseMeta.getTableTypes() ); } @Test public void testGetViewTypes() { assertArrayEquals( new String[] { Hive2DatabaseMeta.VIEW, Hive2DatabaseMeta.VIRTUAL_VIEW }, hive2DatabaseMeta.getViewTypes() ); } @Test public void testGetTruncateTableStatement10OrPrior() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 10 ); String testTableName = "testTableName"; assertEquals( Hive2DatabaseMeta.TRUNCATE_TABLE + testTableName, hive2DatabaseMeta.getTruncateTableStatement( testTableName ) ); } @Test public void testGetTruncateTableStatement11AndLater() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 11 ); String testTableName = "testTableName"; assertEquals( Hive2DatabaseMeta.TRUNCATE_TABLE + testTableName, hive2DatabaseMeta.getTruncateTableStatement( testTableName ) ); } @Test public void testSupportsSetCharacterStream() { assertFalse( hive2DatabaseMeta.supportsSetCharacterStream() ); } @Test public void testSupportsBatchUpdates() { assertFalse( hive2DatabaseMeta.supportsBatchUpdates() ); } @Test public void testSupportsTimeStampToDateConversion() { assertFalse( hive2DatabaseMeta.supportsTimeStampToDateConversion() ); } @Test public void testGetNamedClusterList() throws Exception { assertEquals( namedClusterList, hive2DatabaseMeta.getNamedClusterList() ); verify( namedClusterService ).listNames( iMetaStoreCaptor.capture() ); } @Test public void testPutOptionalOptions() { hive2DatabaseMeta.setNamedCluster( CLUSTER ); hive2DatabaseMeta.setPluginId( PLUGIN_ID ); Map extraOptions = new HashMap(); hive2DatabaseMeta.putOptionalOptions( extraOptions ); String value = extraOptions.get( PLUGIN_ID + ".pentahoNamedCluster" ); assertEquals( CLUSTER, value ); } private void assertGetFieldDefinition( ValueMetaInterface valueMetaInterface, String name, String expectedType ) { valueMetaInterface = valueMetaInterface.clone(); valueMetaInterface.setName( name ); assertGetFieldDefinition( valueMetaInterface, expectedType ); } private void assertGetFieldDefinition( ValueMetaInterface valueMetaInterface, String expectedType ) { assertEquals( expectedType, hive2DatabaseMeta.getFieldDefinition( valueMetaInterface, null, null, false, false, false ) ); assertEquals( valueMetaInterface.getName() + " " + expectedType, hive2DatabaseMeta.getFieldDefinition( valueMetaInterface, null, null, false, true, false ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/Hive2SimbaDatabaseDialectTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import junit.framework.Assert; import org.junit.Test; import org.pentaho.database.DatabaseDialectException; import org.pentaho.database.model.DatabaseAccessType; import org.pentaho.database.model.DatabaseConnection; import org.pentaho.database.model.IDatabaseType; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.*; public class Hive2SimbaDatabaseDialectTest { private Hive2SimbaDatabaseDialect dialect; public Hive2SimbaDatabaseDialectTest() { this.dialect = new Hive2SimbaDatabaseDialect(); } @Test public void testGetNativeDriver() { Assert.assertEquals( dialect.getNativeDriver(), "org.apache.hive.jdbc.HiveSimbaDriver" ); } @Test public void testGetURLNative() throws Exception { DatabaseConnection conn = new DatabaseConnection(); conn.setAccessType( DatabaseAccessType.NATIVE ); conn.setUsername( "joe" ); assertThat( dialect.getURL( conn ), is( "jdbc:hive2://null:10000/default;AuthMech=2;UID=joe" ) ); } @Test public void testGetURLJndi() throws DatabaseDialectException { DatabaseConnection conn = new DatabaseConnection(); conn.setAccessType( DatabaseAccessType.JNDI ); assertThat( dialect.getURL( conn ), is( SimbaUrl.URL_IS_CONFIGURED_THROUGH_JNDI ) ); } @Test public void testGetUsedLibraries() { assertEquals( dialect.getUsedLibraries()[0], "HiveJDBC41.jar" ); } @Test public void testGetNativeJdbcPre() { Assert.assertEquals( dialect.getNativeJdbcPre(), "jdbc:hive2://" ); } @Test public void testGetDatabaseType() { IDatabaseType dbType = dialect.getDatabaseType(); assertThat( dbType.getName(), is( "Hadoop Hive 2 (Simba)" ) ); } @Test public void testGetReservedWords() { assertFalse( dialect.getReservedWords().length > 0 ); } @Test public void testSupportsBitmapIndex() { assertTrue( dialect.supportsBitmapIndex() ); } @Test public void testGetTruncateTableStatement() { String tableName = "table1"; assertEquals( dialect.getTruncateTableStatement( tableName ), "TRUNCATE TABLE " + tableName ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/Hive2SimbaDatabaseMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import java.sql.Driver; import java.util.Arrays; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; @RunWith( MockitoJUnitRunner.Silent.class ) public class Hive2SimbaDatabaseMetaTest { public static final String LOCALHOST = "localhost"; public static final String PORT = "10000"; public static final String DEFAULT = "default"; @Mock DriverLocator driverLocator; @Mock Driver driver; @InjectMocks Hive2SimbaDatabaseMeta hive2SimbaDatabaseMeta; private String hive2SimbaDatabaseMetaURL; @BeforeClass public static void initLogs() { KettleLogStore.init(); } @Before public void setup() throws Throwable { hive2SimbaDatabaseMetaURL = hive2SimbaDatabaseMeta.getURL( LOCALHOST, PORT, DEFAULT ); when( driverLocator.getDriver( hive2SimbaDatabaseMetaURL ) ).thenReturn( driver ); } @Test public void testGetDriverClassOther() { assertEquals( Hive2SimbaDatabaseMeta.DRIVER_CLASS_NAME, hive2SimbaDatabaseMeta.getDriverClass() ); } @Test public void testGetJdbcPrefix() { assertEquals( Hive2SimbaDatabaseMeta.JDBC_URL_PREFIX, hive2SimbaDatabaseMeta.getJdbcPrefix() ); } @Test public void testGetUsedLibraries() { assertTrue( Arrays.equals( hive2SimbaDatabaseMeta.getUsedLibraries(), new String[] { hive2SimbaDatabaseMeta.JAR_FILE } ) ); } @Test public void testGetDefaultDatabasePort() { assertEquals( Hive2SimbaDatabaseMeta.DEFAULT_PORT, hive2SimbaDatabaseMeta.getDefaultDatabasePort() ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/HiveDatabaseDialectTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import junit.framework.Assert; import org.junit.Test; import org.pentaho.database.model.DatabaseAccessType; import org.pentaho.database.model.DatabaseConnection; import org.pentaho.database.model.IDatabaseType; public class HiveDatabaseDialectTest { private HiveDatabaseDialect dialect; public HiveDatabaseDialectTest() { this.dialect = new HiveDatabaseDialect(); } @Test public void testGetNativeDriver() { Assert.assertEquals( dialect.getNativeDriver(), "org.apache.hadoop.hive.jdbc.HiveDriver" ); } @Test public void testGetURL() throws Exception { DatabaseConnection conn = new DatabaseConnection(); conn.setAccessType( DatabaseAccessType.NATIVE ); Assert.assertEquals( dialect.getURL( conn ), "jdbc:hive://null:null/null" ); } @Test public void testGetUsedLibraries() { Assert.assertEquals( dialect.getUsedLibraries()[0], "pentaho-hadoop-hive-jdbc-shim-1.4-SNAPSHOT.jar" ); } @Test public void testGetNativeJdbcPre() { Assert.assertEquals( dialect.getNativeJdbcPre(), "jdbc:hive://" ); } @Test public void testGetDatabaseType() { IDatabaseType dbType = dialect.getDatabaseType(); Assert.assertEquals( dbType.getName(), "Hadoop Hive (deprecated)" ); } @Test public void testSupportsSchemas() { Assert.assertFalse( dialect.supportsSchemas() ); } @Test public void testGetDefaultDatabasePort() { Assert.assertEquals( dialect.getDefaultDatabasePort(), 10000 ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/HiveDatabaseMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBigNumber; import org.pentaho.di.core.row.value.ValueMetaBoolean; import org.pentaho.di.core.row.value.ValueMetaDate; import org.pentaho.di.core.row.value.ValueMetaInteger; import org.pentaho.di.core.row.value.ValueMetaInternetAddress; import org.pentaho.di.core.row.value.ValueMetaNumber; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.core.row.value.ValueMetaTimestamp; import java.net.MalformedURLException; import java.net.URL; import java.sql.Driver; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; @RunWith( MockitoJUnitRunner.class ) public class HiveDatabaseMetaTest { public static final String LOCALHOST = "localhost"; public static final String PORT = "10000"; public static final String DEFAULT = "default"; @Mock DriverLocator driverLocator; @Mock Driver driver; private HiveDatabaseMeta hiveDatabaseMeta; private String hiveDatabaseMetaURL; @Before public void setup() throws Throwable { hiveDatabaseMeta = new HiveDatabaseMeta( driverLocator ); hiveDatabaseMetaURL = hiveDatabaseMeta.getURL( LOCALHOST, PORT, DEFAULT ); when( driverLocator.getDriver( hiveDatabaseMetaURL ) ).thenReturn( driver ); } @Test public void testColumnAlias_060_And_Later() throws Throwable { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 6 ); String alias = hiveDatabaseMeta.generateColumnAlias( 0, "alias" ); assertEquals( "alias", alias ); alias = hiveDatabaseMeta.generateColumnAlias( 1, "alias1" ); assertEquals( "alias1", alias ); alias = hiveDatabaseMeta.generateColumnAlias( 2, "alias2" ); assertEquals( "alias2", alias ); } @Test public void testColumnAlias_050() throws Throwable { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 5 ); String alias = hiveDatabaseMeta.generateColumnAlias( 0, "alias" ); assertEquals( "_col0", alias ); alias = hiveDatabaseMeta.generateColumnAlias( 1, "alias1" ); assertEquals( "_col1", alias ); alias = hiveDatabaseMeta.generateColumnAlias( 2, "alias2" ); assertEquals( "_col2", alias ); } @Test public void testGetAddColumnStatement() { String testTable = "testTable"; String booleanCol = "booleanCol"; ValueMetaInterface valueMetaInterface = new ValueMetaBoolean(); valueMetaInterface.setName( booleanCol ); String addColumnStatement = hiveDatabaseMeta.getAddColumnStatement( testTable, valueMetaInterface, null, false, null, false ); assertTrue( addColumnStatement.contains( "BOOLEAN" ) ); assertTrue( addColumnStatement.contains( testTable ) ); assertTrue( addColumnStatement.contains( booleanCol ) ); } @Test public void testGetDriverClass() { assertEquals( HiveDatabaseMeta.DRIVER_CLASS_NAME, hiveDatabaseMeta.getDriverClass() ); } @Test public void testGetFieldDefinitionBoolean() { assertGetFieldDefinition( new ValueMetaBoolean(), "boolName", "BOOLEAN" ); } @Test public void testGetFieldDefinitionDate() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 12 ); assertGetFieldDefinition( new ValueMetaDate(), "dateName", "DATE" ); } @Test( expected = IllegalArgumentException.class ) public void testGetFieldDefinitionDateUnsupported() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 11 ); assertGetFieldDefinition( new ValueMetaDate(), "dateName", "DATE" ); } @Test public void testGetFieldDefinitionTimestamp() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 8 ); assertGetFieldDefinition( new ValueMetaTimestamp(), "timestampName", "TIMESTAMP" ); } @Test( expected = IllegalArgumentException.class ) public void testGetFieldDefinitionUnsupported() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 7 ); assertGetFieldDefinition( new ValueMetaTimestamp(), "timestampName", "TIMESTAMP" ); } @Test public void testGetFieldDefinitionString() { assertGetFieldDefinition( new ValueMetaString(), "stringName", "STRING" ); } @Test public void testGetFieldDefinitionNumber() { String numberName = "numberName"; ValueMetaInterface valueMetaInterface = new ValueMetaNumber(); valueMetaInterface.setName( numberName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setPrecision( 10 ); valueMetaInterface.setLength( 16 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setLength( 15 ); assertGetFieldDefinition( valueMetaInterface, "DOUBLE" ); } @Test public void testGetFieldDefinitionInteger() { String integerName = "integerName"; ValueMetaInterface valueMetaInterface = new ValueMetaInteger(); valueMetaInterface.setName( integerName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); } @Test public void testGetFieldDefinitionBigNumber() { String bigNumberName = "bigNumberName"; ValueMetaInterface valueMetaInterface = new ValueMetaBigNumber(); valueMetaInterface.setName( bigNumberName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setPrecision( 10 ); valueMetaInterface.setLength( 16 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setLength( 15 ); assertGetFieldDefinition( valueMetaInterface, "DOUBLE" ); } @Test public void testGetFieldDefinition() { assertGetFieldDefinition( new ValueMetaInternetAddress(), "internetAddressName", "" ); } @Test public void testGetModifyColumnStatement() { String testTable = "testTable"; String booleanCol = "booleanCol"; ValueMetaInterface valueMetaInterface = new ValueMetaBoolean(); valueMetaInterface.setName( booleanCol ); String addColumnStatement = hiveDatabaseMeta.getModifyColumnStatement( testTable, valueMetaInterface, null, false, null, false ); assertTrue( addColumnStatement.contains( "BOOLEAN" ) ); assertTrue( addColumnStatement.contains( testTable ) ); assertTrue( addColumnStatement.contains( booleanCol ) ); } @Test public void testGetURL() throws KettleDatabaseException, MalformedURLException { String testHostname = "testHostname"; int port = 9429; String testDbName = "testDbName"; String urlString = hiveDatabaseMeta.getURL( testHostname, "" + port, testDbName ); assertTrue( urlString.startsWith( HiveDatabaseMeta.URL_PREFIX ) ); // Use known prefix urlString = "http://" + urlString.substring( HiveDatabaseMeta.URL_PREFIX.length() ); URL url = new URL( urlString ); assertEquals( testHostname, url.getHost() ); assertEquals( port, url.getPort() ); assertEquals( "/" + testDbName, url.getPath() ); } @Test public void testGetURLEmptyPort() throws KettleDatabaseException, MalformedURLException { String testHostname = "testHostname"; String testDbName = "testDbName"; String urlString = hiveDatabaseMeta.getURL( testHostname, "", testDbName ); assertTrue( urlString.startsWith( HiveDatabaseMeta.URL_PREFIX ) ); // Use known prefix urlString = "http://" + urlString.substring( HiveDatabaseMeta.URL_PREFIX.length() ); URL url = new URL( urlString ); assertEquals( testHostname, url.getHost() ); assertEquals( hiveDatabaseMeta.getDefaultDatabasePort(), url.getPort() ); assertEquals( "/" + testDbName, url.getPath() ); } @Test public void testGetUsedLibraries() { assertArrayEquals( new String[] { HiveDatabaseMeta.JAR_FILE }, hiveDatabaseMeta.getUsedLibraries() ); } @Test public void testGetSelectCountStatement() { String tableName = "tableName"; assertEquals( HiveDatabaseMeta.SELECT_COUNT_1_FROM + tableName, hiveDatabaseMeta.getSelectCountStatement( tableName ) ); } @Test public void testIsDriverVersionNull() { assertTrue( hiveDatabaseMeta.isDriverVersion( -1, -1 ) ); } @Test public void testIsDriverVersionMajorGreater() { when( driver.getMajorVersion() ).thenReturn( 6 ); when( driver.getMinorVersion() ).thenReturn( 0 ); assertTrue( hiveDatabaseMeta.isDriverVersion( 5, 5 ) ); } @Test public void testIsDriverVersionMajorSameMinorEqual() { when( driver.getMajorVersion() ).thenReturn( 5 ); when( driver.getMinorVersion() ).thenReturn( 5 ); assertTrue( hiveDatabaseMeta.isDriverVersion( 5, 5 ) ); } @Test public void testIsDriverVersionMajorSameMinorLess() { when( driver.getMajorVersion() ).thenReturn( 5 ); when( driver.getMinorVersion() ).thenReturn( 4 ); assertFalse( hiveDatabaseMeta.isDriverVersion( 5, 5 ) ); } @Test public void testIsDriverVersionMajorLess() { when( driver.getMajorVersion() ).thenReturn( 4 ); when( driver.getMinorVersion() ).thenReturn( 6 ); assertFalse( hiveDatabaseMeta.isDriverVersion( 5, 5 ) ); } @Test public void testGetStartQuote() { assertEquals( 0, hiveDatabaseMeta.getStartQuote().length() ); } @Test public void testGetEndQuote() { assertEquals( 0, hiveDatabaseMeta.getEndQuote().length() ); } @Test public void testGetDefaultDatabasePort() { assertEquals( HiveDatabaseMeta.DEFAULT_PORT, hiveDatabaseMeta.getDefaultDatabasePort() ); } @Test public void testGetTableTypes() { assertNull( hiveDatabaseMeta.getTableTypes() ); } @Test public void testGetViewTypes() { assertArrayEquals( new String[] { HiveDatabaseMeta.VIEW, HiveDatabaseMeta.VIRTUAL_VIEW }, hiveDatabaseMeta.getViewTypes() ); } @Test public void testGetTruncateTableStatement10OrPrior() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 10 ); assertNull( hiveDatabaseMeta.getTruncateTableStatement( "testTableName" ) ); } @Test public void testGetTruncateTableStatement11AndLater() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 11 ); String testTableName = "testTableName"; assertEquals( HiveDatabaseMeta.TRUNCATE_TABLE + testTableName, hiveDatabaseMeta.getTruncateTableStatement( testTableName ) ); } @Test public void testSupportsSetCharacterStream() { assertFalse( hiveDatabaseMeta.supportsSetCharacterStream() ); } @Test public void testSupportsBatchUpdates() { assertFalse( hiveDatabaseMeta.supportsBatchUpdates() ); } @Test public void testSupportsTimeStampToDateConversion() { assertFalse( hiveDatabaseMeta.supportsTimeStampToDateConversion() ); } private void assertGetFieldDefinition( ValueMetaInterface valueMetaInterface, String name, String expectedType ) { valueMetaInterface = valueMetaInterface.clone(); valueMetaInterface.setName( name ); assertGetFieldDefinition( valueMetaInterface, expectedType ); } private void assertGetFieldDefinition( ValueMetaInterface valueMetaInterface, String expectedType ) { assertEquals( expectedType, hiveDatabaseMeta.getFieldDefinition( valueMetaInterface, null, null, false, false, false ) ); assertEquals( valueMetaInterface.getName() + " " + expectedType, hiveDatabaseMeta.getFieldDefinition( valueMetaInterface, null, null, false, true, false ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/ImpalaDatabaseDialectTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.junit.Test; import org.pentaho.database.model.DatabaseConnection; import static org.junit.Assert.assertEquals; /** * User: Dzmitry Stsiapanau Date: 10/4/14 Time: 10:55 PM */ public class ImpalaDatabaseDialectTest { @Test public void testGetURL() throws Exception { ImpalaDatabaseDialect impala = new ImpalaDatabaseDialect(); DatabaseConnection dbconn = new DatabaseConnection(); String url = impala.getURL( dbconn ); assertEquals( "noauth url", "jdbc:hive2://null:null/null;impala_db=true;auth=noSasl", url ); dbconn.addExtraOption( impala.getDatabaseType().getShortName(), "principal", "someValue" ); url = impala.getURL( dbconn ); assertEquals( "principal url", "jdbc:hive2://null:null/null;impala_db=true", url ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/ImpalaDatabaseMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.*; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.locator.api.MetastoreLocator; import java.net.MalformedURLException; import java.net.URL; import java.sql.Driver; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by bryan on 10/20/15. */ @RunWith( MockitoJUnitRunner.class ) public class ImpalaDatabaseMetaTest { public static final String LOCALHOST = "localhost"; public static final String PORT = "10000"; public static final String DEFAULT = "default"; @Mock DriverLocator driverLocator; @Mock Driver driver; @Mock NamedClusterService namedClusterService; @Mock MetastoreLocator metastoreLocator; @Mock IMetaStore iMetaStore; private ImpalaDatabaseMeta impalaDatabaseMeta; private String impalaDatabaseMetaURL; private List namedClusterList = Arrays.asList( new String[]{ "cluster1", "cluster2" } ); ArgumentCaptor iMetaStoreCaptor = ArgumentCaptor.forClass( IMetaStore.class ); private static String CLUSTER = "cluster1"; private static String PLUGIN_ID = "impala"; @Before public void setup() throws Throwable { impalaDatabaseMeta = new ImpalaDatabaseMeta( driverLocator, namedClusterService, metastoreLocator ); impalaDatabaseMetaURL = impalaDatabaseMeta.getURL( LOCALHOST, PORT, DEFAULT ); when( driverLocator.getDriver( impalaDatabaseMetaURL ) ).thenReturn( driver ); when( metastoreLocator.getMetastore() ).thenReturn( iMetaStore ); when( namedClusterService.listNames( any() ) ).thenReturn( namedClusterList ); } @Test public void testVersionConstructor() throws Throwable { int majorVersion = 22; int minorVersion = 33; when( driver.getMajorVersion() ).thenReturn( majorVersion ); when( driver.getMinorVersion() ).thenReturn( minorVersion ); assertTrue( impalaDatabaseMeta.isDriverVersion( majorVersion, minorVersion ) ); assertFalse( impalaDatabaseMeta.isDriverVersion( majorVersion, minorVersion + 1 ) ); assertFalse( impalaDatabaseMeta.isDriverVersion( majorVersion + 1, minorVersion ) ); } @Test public void testGetAccessTypeList() { assertArrayEquals( new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE }, impalaDatabaseMeta.getAccessTypeList() ); } @Test public void testGetDriverClass() { assertEquals( ImpalaDatabaseMeta.DRIVER_CLASS_NAME, impalaDatabaseMeta.getDriverClass() ); } @Test public void testGetFieldDefinitionBoolean() { assertGetFieldDefinition( new ValueMetaBoolean(), "boolName", "BOOLEAN" ); } @Test public void testGetFieldDefinitionDate() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 8 ); assertGetFieldDefinition( new ValueMetaDate(), "dateName", "TIMESTAMP" ); } @Test( expected = IllegalArgumentException.class ) public void testGetFieldDefinitionDateUnsupported() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 7 ); assertGetFieldDefinition( new ValueMetaDate(), "dateName", "TIMESTAMP" ); } @Test public void testGetFieldDefinitionTimestamp() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 8 ); assertGetFieldDefinition( new ValueMetaTimestamp(), "timestampName", "TIMESTAMP" ); } @Test( expected = IllegalArgumentException.class ) public void testGetFieldDefinitionUnsupported() { when( driver.getMajorVersion() ).thenReturn( 0 ); when( driver.getMinorVersion() ).thenReturn( 7 ); assertGetFieldDefinition( new ValueMetaTimestamp(), "timestampName", "TIMESTAMP" ); } @Test public void testGetFieldDefinitionString() { assertGetFieldDefinition( new ValueMetaString(), "stringName", "STRING" ); } @Test public void testGetFieldDefinitionNumber() { String numberName = "numberName"; ValueMetaInterface valueMetaInterface = new ValueMetaNumber(); valueMetaInterface.setName( numberName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setPrecision( 10 ); valueMetaInterface.setLength( 16 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setLength( 15 ); assertGetFieldDefinition( valueMetaInterface, "DOUBLE" ); } @Test public void testGetFieldDefinitionInteger() { String integerName = "integerName"; ValueMetaInterface valueMetaInterface = new ValueMetaInteger(); valueMetaInterface.setName( integerName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); } @Test public void testGetFieldDefinitionBigNumber() { String bigNumberName = "bigNumberName"; ValueMetaInterface valueMetaInterface = new ValueMetaBigNumber(); valueMetaInterface.setName( bigNumberName ); valueMetaInterface.setPrecision( 0 ); valueMetaInterface.setLength( 9 ); assertGetFieldDefinition( valueMetaInterface, "INT" ); valueMetaInterface.setLength( 18 ); assertGetFieldDefinition( valueMetaInterface, "BIGINT" ); valueMetaInterface.setLength( 19 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setPrecision( 10 ); valueMetaInterface.setLength( 16 ); assertGetFieldDefinition( valueMetaInterface, "FLOAT" ); valueMetaInterface.setLength( 15 ); assertGetFieldDefinition( valueMetaInterface, "DOUBLE" ); } @Test public void testGetFieldDefinition() { assertGetFieldDefinition( new ValueMetaInternetAddress(), "internetAddressName", "" ); } @Test public void testGetURL() throws KettleDatabaseException, MalformedURLException { String testHostname = "testHostname"; int port = 9429; String testDbName = "testDbName"; String urlString = impalaDatabaseMeta.getURL( testHostname, "" + port, testDbName ); assertTrue( urlString.startsWith( ImpalaDatabaseMeta.URL_PREFIX ) ); // Use known prefix urlString = "http://" + urlString.substring( ImpalaDatabaseMeta.URL_PREFIX.length() ); URL url = new URL( urlString ); assertEquals( testHostname, url.getHost() ); assertEquals( port, url.getPort() ); assertEquals( "/" + testDbName + ImpalaDatabaseMeta.AUTH_NO_SASL + ";impala_db=true", url.getPath() ); } @Test public void testGetURLPrincipal() throws KettleDatabaseException, MalformedURLException { String testHostname = "testHostname"; int port = 9429; String testDbName = "testDbName"; impalaDatabaseMeta.getAttributes().put( "principal", "testP" ); String urlString = impalaDatabaseMeta.getURL( testHostname, "" + port, testDbName ); assertTrue( urlString.startsWith( ImpalaDatabaseMeta.URL_PREFIX ) ); // Use known prefix urlString = "http://" + urlString.substring( ImpalaDatabaseMeta.URL_PREFIX.length() ); URL url = new URL( urlString ); assertEquals( testHostname, url.getHost() ); assertEquals( port, url.getPort() ); assertEquals( "/" + testDbName + ";impala_db=true", url.getPath() ); impalaDatabaseMeta.getAttributes().remove( "principal" ); impalaDatabaseMeta.getAttributes() .put( ImpalaDatabaseMeta.ATTRIBUTE_PREFIX_EXTRA_OPTION + impalaDatabaseMeta.getPluginId() + ".principal", "testP" ); urlString = impalaDatabaseMeta.getURL( testHostname, "" + port, testDbName ); assertTrue( urlString.startsWith( ImpalaDatabaseMeta.URL_PREFIX ) ); // Use known prefix urlString = "http://" + urlString.substring( ImpalaDatabaseMeta.URL_PREFIX.length() ); url = new URL( urlString ); assertEquals( testHostname, url.getHost() ); assertEquals( port, url.getPort() ); assertEquals( "/" + testDbName + ";impala_db=true", url.getPath() ); } @Test public void testGetURLEmptyPort() throws KettleDatabaseException, MalformedURLException { String testHostname = "testHostname"; String testDbName = "testDbName"; String urlString = impalaDatabaseMeta.getURL( testHostname, "", testDbName ); assertTrue( urlString.startsWith( ImpalaDatabaseMeta.URL_PREFIX ) ); // Use known prefix urlString = "http://" + urlString.substring( ImpalaDatabaseMeta.URL_PREFIX.length() ); URL url = new URL( urlString ); assertEquals( testHostname, url.getHost() ); assertEquals( impalaDatabaseMeta.getDefaultDatabasePort(), url.getPort() ); assertEquals( "/" + testDbName + ImpalaDatabaseMeta.AUTH_NO_SASL + ";impala_db=true", url.getPath() ); } @Test public void testGetUsedLibraries() { assertArrayEquals( new String[] { ImpalaDatabaseMeta.JAR_FILE }, impalaDatabaseMeta.getUsedLibraries() ); } @Test public void testGetDefaultDatabasePort() { assertEquals( ImpalaDatabaseMeta.DEFAULT_PORT, impalaDatabaseMeta.getDefaultDatabasePort() ); } private void assertGetFieldDefinition( ValueMetaInterface valueMetaInterface, String name, String expectedType ) { valueMetaInterface = valueMetaInterface.clone(); valueMetaInterface.setName( name ); assertGetFieldDefinition( valueMetaInterface, expectedType ); } private void assertGetFieldDefinition( ValueMetaInterface valueMetaInterface, String expectedType ) { assertEquals( expectedType, impalaDatabaseMeta.getFieldDefinition( valueMetaInterface, null, null, false, false, false ) ); assertEquals( valueMetaInterface.getName() + " " + expectedType, impalaDatabaseMeta.getFieldDefinition( valueMetaInterface, null, null, false, true, false ) ); } @Test public void testGetNamedClusterList() throws Exception { assertEquals( namedClusterList, impalaDatabaseMeta.getNamedClusterList() ); verify( namedClusterService ).listNames( iMetaStoreCaptor.capture() ); } @Test public void testPutOptionalOptions() { impalaDatabaseMeta.setNamedCluster( CLUSTER ); impalaDatabaseMeta.setPluginId( PLUGIN_ID ); Map extraOptions = new HashMap(); impalaDatabaseMeta.putOptionalOptions( extraOptions ); String value = extraOptions.get( PLUGIN_ID + ".pentahoNamedCluster" ); assertEquals( CLUSTER, value ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/ImpalaSimbaDatabaseDialectTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import com.google.common.base.Joiner; import java.util.Map; import org.hamcrest.collection.IsMapContaining; import org.hamcrest.collection.IsMapWithSize; import org.junit.Test; import org.pentaho.database.DatabaseDialectException; import org.pentaho.database.model.DatabaseAccessType; import org.pentaho.database.model.DatabaseConnection; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.*; public class ImpalaSimbaDatabaseDialectTest { private ImpalaSimbaDatabaseDialect dialect = new ImpalaSimbaDatabaseDialect(); @Test public void testGetUrlNative() throws DatabaseDialectException { DatabaseConnection conn = new DatabaseConnection(); conn.setAccessType( DatabaseAccessType.NATIVE ); conn.setUsername( "jack" ); conn.setHostname( "hostname" ); assertThat( dialect.getURL( conn ), is( "jdbc:impala://hostname:21050/default;AuthMech=2;UID=jack" ) ); } @Test public void testDefaultSocketTimeout() { Map options = dialect.getDatabaseType().getDefaultOptions(); assertThat( options, IsMapWithSize.aMapWithSize( 1 ) ); assertThat( options, IsMapContaining.hasEntry( Joiner.on( "." ).join( ImpalaSimbaDatabaseDialect.DB_TYPE_NAME_SHORT, Hive2SimbaDatabaseDialect.SOCKET_TIMEOUT_OPTION ), Hive2SimbaDatabaseDialect.DEFAULT_SOCKET_TIMEOUT ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/ImpalaSimbaDatabaseMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import java.sql.Driver; import java.util.Arrays; import java.util.Map; import org.hamcrest.collection.IsMapContaining; import org.hamcrest.collection.IsMapWithSize; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.di.core.database.DatabaseMeta; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; /** * Created by bryan on 10/21/15. */ @RunWith( MockitoJUnitRunner.Silent.class ) public class ImpalaSimbaDatabaseMetaTest { public static final String LOCALHOST = "localhost"; public static final String PORT = "10000"; public static final String DEFAULT = "default"; @Mock DriverLocator driverLocator; @Mock Driver driver; @InjectMocks ImpalaSimbaDatabaseMeta impalaSimbaDatabaseMeta; private String impalaSimbaDatabaseMetaURL; @BeforeClass public static void initLogs() { KettleLogStore.init(); } @Before public void setup() throws Throwable { impalaSimbaDatabaseMetaURL = impalaSimbaDatabaseMeta.getURL( LOCALHOST, PORT, DEFAULT ); when( driverLocator.getDriver( impalaSimbaDatabaseMetaURL ) ).thenReturn( driver ); } @Test public void testGetAccessTypeList() { assertArrayEquals( new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE, DatabaseMeta.TYPE_ACCESS_JNDI }, impalaSimbaDatabaseMeta.getAccessTypeList() ); } @Test public void testGetDriverClassOther() { assertEquals( ImpalaSimbaDatabaseMeta.DRIVER_CLASS_NAME, impalaSimbaDatabaseMeta.getDriverClass() ); } @Test public void testGetJdbcPrefix() { assertEquals( ImpalaSimbaDatabaseMeta.JDBC_URL_PREFIX, impalaSimbaDatabaseMeta.getJdbcPrefix() ); } @Test public void testGetUsedLibraries() { assertTrue( Arrays.equals( impalaSimbaDatabaseMeta.getUsedLibraries(), new String[] { impalaSimbaDatabaseMeta.JAR_FILE } ) ); } @Test public void testGetDefaultDatabasePort() { assertEquals( ImpalaSimbaDatabaseMeta.DEFAULT_PORT, impalaSimbaDatabaseMeta.getDefaultDatabasePort() ); } @Test public void testGetDefaultOptions() { Map options = impalaSimbaDatabaseMeta.getDefaultOptions(); assertThat( options, IsMapWithSize.aMapWithSize( 1 ) ); assertThat( options, IsMapContaining.hasEntry( impalaSimbaDatabaseMeta.getPluginId() + "." + SparkSimbaDatabaseMeta.SOCKET_TIMEOUT_OPTION, "10" ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/SimbaUrlTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import org.junit.Test; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.*; import static org.pentaho.di.core.database.DatabaseMeta.TYPE_ACCESS_JNDI; import static org.pentaho.di.core.database.DatabaseMeta.TYPE_ACCESS_NATIVE; public class SimbaUrlTest { SimbaUrl.Builder builder = SimbaUrl.Builder.create(); @Test public void testWithDefaultPort() { assertThat( builder .withPort( "" ) .withDefaultPort( 101010 ) .withJdbcPrefix( "foo:bar://" ) .withHostname( "localhost" ) .build().getURL(), containsString( "foo:bar://localhost:101010/default" ) ); } @Test public void testWithSetPort() { assertThat( builder .withPort( "" ) .withDefaultPort( 101010 ) .withPort( "202020" ) .withJdbcPrefix( "foo:bar://" ) .withHostname( "localhost" ) .build().getURL(), containsString( "foo:bar://localhost:202020/default" ) ); } @Test public void testWithDatabaseName() { assertThat( builder .withPort( "" ) .withDefaultPort( 101010 ) .withPort( "202020" ) .withDatabaseName( "mydatabase" ) .withJdbcPrefix( "foo:bar://" ) .withHostname( "localhost" ) .build().getURL(), containsString( "foo:bar://localhost:202020/mydatabase" ) ); } @Test public void testJndi() { assertThat( builder .withAccessType( TYPE_ACCESS_JNDI ) .build().getURL(), is( SimbaUrl.URL_IS_CONFIGURED_THROUGH_JNDI ) ); } @Test public void testAuthMech0() { assertThat( builder .withAccessType( TYPE_ACCESS_NATIVE ) .withIsKerberos( false ) .withPort( "202020" ) .withDatabaseName( "mydatabase" ) .withJdbcPrefix( "foo:bar://" ) .withHostname( "localhost" ) .build().getURL(), is( "foo:bar://localhost:202020/mydatabase;AuthMech=0" ) ); } @Test public void testAuthMech1() { assertThat( builder .withAccessType( TYPE_ACCESS_NATIVE ) .withIsKerberos( false ) .withPort( "202020" ) .withIsKerberos( true ) .withDatabaseName( "mydatabase" ) .withJdbcPrefix( "foo:bar://" ) .withHostname( "localhost" ) .build().getURL(), is( "foo:bar://localhost:202020/mydatabase;AuthMech=1" ) ); } @Test public void testAuthMech2() { assertThat( builder .withAccessType( TYPE_ACCESS_NATIVE ) .withIsKerberos( false ) .withPort( "202020" ) .withUsername( "user" ) .withDatabaseName( "mydatabase" ) .withJdbcPrefix( "foo:bar://" ) .withHostname( "localhost" ) .build().getURL(), is( "foo:bar://localhost:202020/mydatabase;AuthMech=2;UID=user" ) ); } @Test public void testAuthMech3() { assertThat( builder .withAccessType( TYPE_ACCESS_NATIVE ) .withIsKerberos( false ) .withPort( "202020" ) .withUsername( "user" ) .withPassword( "password" ) .withDatabaseName( "mydatabase" ) .withJdbcPrefix( "foo:bar://" ) .withHostname( "localhost" ) .build().getURL(), is( "foo:bar://localhost:202020/mydatabase;AuthMech=3;UID=user;PWD=password" ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/SparkSimbaDatabaseDialectTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import com.google.common.base.Joiner; import java.util.Map; import org.hamcrest.collection.IsMapContaining; import org.hamcrest.collection.IsMapWithSize; import org.junit.Test; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; public class SparkSimbaDatabaseDialectTest { SparkSimbaDatabaseDialect dialect = new SparkSimbaDatabaseDialect(); @Test public void getDatabaseType() throws Exception { assertThat( dialect.getDatabaseType(), is( SparkSimbaDatabaseDialect.DBTYPE ) ); } @Test public void getNativeDriver() throws Exception { assertThat( dialect.getNativeDriver(), is( SparkSimbaDatabaseMeta.DRIVER_CLASS_NAME ) ); } @Test public void getNativeJdbcPre() throws Exception { assertThat( dialect.getNativeJdbcPre(), is( "jdbc:spark://" ) ); } @Test public void getDefaultDatabasePort() throws Exception { assertThat( dialect.getDefaultDatabasePort(), is( 10015 ) ); } @Test public void getUsedLibraries() throws Exception { assertThat( dialect.getUsedLibraries(), is( new String[] { SparkSimbaDatabaseMeta.JAR_FILE } ) ); } @Test public void testDefaultSocketTimeout() { Map options = dialect.getDatabaseType().getDefaultOptions(); assertThat( options, IsMapWithSize.aMapWithSize( 1 ) ); assertThat( options, IsMapContaining.hasEntry( Joiner.on( "." ).join( SparkSimbaDatabaseDialect.DB_TYPE_NAME_SHORT, Hive2SimbaDatabaseDialect.SOCKET_TIMEOUT_OPTION ), Hive2SimbaDatabaseDialect.DEFAULT_SOCKET_TIMEOUT ) ); } } ================================================ FILE: kettle-plugins/hive/core/src/test/java/org/pentaho/big/data/kettle/plugins/hive/SparkSimbaDatabaseMetaTest.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.hive; import java.sql.Driver; import java.util.Arrays; import java.util.Map; import org.hamcrest.collection.IsMapContaining; import org.hamcrest.collection.IsMapWithSize; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.hadoop.shim.api.jdbc.DriverLocator; import org.pentaho.di.core.database.DatabaseMeta; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @RunWith( MockitoJUnitRunner.Silent.class ) public class SparkSimbaDatabaseMetaTest { public static final String LOCALHOST = "localhost"; public static final String PORT = "10000"; public static final String DEFAULT = "default"; @Mock DriverLocator driverLocator; @Mock Driver driver; @InjectMocks private SparkSimbaDatabaseMeta sparkSimbaDatabaseMeta; @Rule public final ExpectedException exception = ExpectedException.none(); private String sparkSimbaDatabaseMetaURL; private static final String DB_NAME = "dbName"; @BeforeClass public static void initLogs() { KettleLogStore.init(); } @Before public void setup() throws Throwable { sparkSimbaDatabaseMetaURL = sparkSimbaDatabaseMeta.getURL( LOCALHOST, PORT, DEFAULT ); when( driverLocator.getDriver( sparkSimbaDatabaseMetaURL ) ).thenReturn( driver ); sparkSimbaDatabaseMeta.setDatabaseName( DB_NAME ); } @Test public void testGetAccessTypeList() { assertArrayEquals( new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE, DatabaseMeta.TYPE_ACCESS_JNDI }, sparkSimbaDatabaseMeta.getAccessTypeList() ); } @Test public void testGetJdbcPrefix() { assertEquals( SparkSimbaDatabaseMeta.JDBC_URL_PREFIX, sparkSimbaDatabaseMeta.getJdbcPrefix() ); } @Test public void testGetUsedLibraries() { assertTrue( Arrays.equals( sparkSimbaDatabaseMeta.getUsedLibraries(), new String[] { sparkSimbaDatabaseMeta.JAR_FILE } ) ); } @Test public void testGetDefaultDatabasePort() { assertEquals( SparkSimbaDatabaseMeta.DEFAULT_PORT, sparkSimbaDatabaseMeta.getDefaultDatabasePort() ); } @Test public void testQuoting() { assertEquals( "`", sparkSimbaDatabaseMeta.getStartQuote() ); assertEquals( "`", sparkSimbaDatabaseMeta.getEndQuote() ); } @Test public void testGeneratedSQLContainsSchemaReferenceWhenTableUnqualified() { verifyExpectedSql( null, "foo" ); } @Test public void testGeneratedSQLContainsSchemaReferenceWhenTableQualified() { verifyExpectedSql( DB_NAME, "foo" ); } private void verifyExpectedSql( String schemaName, String tableName ) { String expectedTableName = schemaName == null ? tableName : schemaName + "." + tableName; assertThat( sparkSimbaDatabaseMeta.getSQLTableExists( expectedTableName ), is( "SELECT 1 FROM " + expectedTableName + " LIMIT 1" ) ); assertThat( sparkSimbaDatabaseMeta.getTruncateTableStatement( expectedTableName ), is( "TRUNCATE TABLE " + expectedTableName ) ); assertThat( sparkSimbaDatabaseMeta.getSQLColumnExists( "column", expectedTableName ), is( "SELECT column FROM " + expectedTableName + " LIMIT 1" ) ); assertThat( sparkSimbaDatabaseMeta.getSQLQueryFields( expectedTableName ), is( "SELECT * FROM " + expectedTableName + " LIMIT 1" ) ); assertThat( sparkSimbaDatabaseMeta.getSelectCountStatement( expectedTableName ), is( SparkSimbaDatabaseMeta.SELECT_COUNT_STATEMENT + " " + expectedTableName ) ); } @Test public void testUnsupportedDrop() { assertThat( sparkSimbaDatabaseMeta.getDropColumnStatement( "tab", null, "tk", false, "pk", false ), is( "" ) ); } @Test public void testUnsupportedAddCol() { assertThat( sparkSimbaDatabaseMeta.getAddColumnStatement( "tab", null, "tk", false, "pk", false ), is( "" ) ); } @Test public void testUnsupportedModCol() { assertThat( sparkSimbaDatabaseMeta.getModifyColumnStatement( "tab", null, "tk", false, "pk", false ), is( "" ) ); } @Test public void testGetDriverClass() { assertThat( sparkSimbaDatabaseMeta.getDriverClass(), is( SparkSimbaDatabaseMeta.DRIVER_CLASS_NAME ) ); } @Test public void testGetDefaultOptions() { SparkSimbaDatabaseMeta meta = mock( SparkSimbaDatabaseMeta.class ); when( meta.getPluginId() ).thenReturn( "SPARKSIMBA" ); when( meta.getDefaultOptions() ).thenCallRealMethod(); Map options = meta.getDefaultOptions(); assertThat( options, IsMapWithSize.aMapWithSize( 1 ) ); assertThat( options, IsMapContaining.hasEntry( meta.getPluginId() + "." + SparkSimbaDatabaseMeta.SOCKET_TIMEOUT_OPTION, "10" ) ); } @Test public void testLimit() { assertThat( sparkSimbaDatabaseMeta.getLimitClause( 100 ), is( " LIMIT 100" ) ); } } ================================================ FILE: kettle-plugins/hive/pom.xml ================================================ 4.0.0 pentaho-big-data-kettle-plugins pentaho 11.1.0.0-SNAPSHOT pentaho-big-data-kettle-plugins-hive 11.1.0.0-SNAPSHOT pom Pentaho Community Edition Project: ${project.artifactId} a Pentaho open source project http://www.pentaho.com Apache License, Version 2.0 https://www.apache.org/licenses/LICENSE-2.0.txt repo A business-friendly OSS license assemblies core ================================================ FILE: kettle-plugins/mapreduce/assemblies/plugin/pom.xml ================================================ 4.0.0 mapreduce-assemblies pentaho 11.1.0.0-SNAPSHOT pdi-mapreduce-plugin pom PDI Mapreduce Plugin Distribution ${project.basedir}/src/main/resources ${project.build.directory}/assembly pentaho pdi-mapreduce-core ${project.version} ================================================ FILE: kettle-plugins/mapreduce/assemblies/plugin/src/assembly/assembly.xml ================================================ zip zip ${resources.directory} . true ${assembly.dir} . . pentaho:pdi-mapreduce-core:jar false runtime . false false pentaho:pdi-mapreduce-core:jar runtime false lib pentaho:pdi-mapreduce-core:* ================================================ FILE: kettle-plugins/mapreduce/assemblies/plugin/src/main/resources/version.xml ================================================ ${project.version} ================================================ FILE: kettle-plugins/mapreduce/assemblies/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-mapreduce 11.1.0.0-SNAPSHOT mapreduce-assemblies pom PDI Mapreduce Plugin Assemblies plugin ================================================ FILE: kettle-plugins/mapreduce/core/pom.xml ================================================ 4.0.0 pentaho pentaho-big-data-kettle-plugins-mapreduce 11.1.0.0-SNAPSHOT pdi-mapreduce-core PDI Mapreduce Core site 3.0 src/main/resources false src/main/resources-filtered true org.pentaho shim-api ${pentaho-hadoop-shims.version} provided pentaho pentaho-big-data-kettle-plugins-common-ui ${project.version} provided pentaho-kettle kettle-core ${pdi.version} provided pentaho-kettle kettle-engine ${pdi.version} provided pentaho-kettle kettle-ui-swt ${pdi.version} provided org.dom4j dom4j provided junit junit ${dependency.junit.revision} test org.mockito mockito-core ${mockito.version} test pentaho-kettle kettle-engine ${pdi.version} tests test pentaho pentaho-big-data-legacy-core ${project.version} provided pentaho pentaho-big-data-legacy ${project.version} test org.easymock easymock ${easymock.versin} test pentaho-kettle kettle-core ${pdi.version} tests test org.pentaho pentaho-hadoop-shims-common-services-api ${project.version} provided pentaho pentaho-big-data-impl-cluster ${project.version} provided stax stax 1.2.0 ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/DialogClassUtil.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce; /** * Created by bryan on 1/12/16. */ public class DialogClassUtil { private static final String PKG_NAME = DialogClassUtil.class.getPackage().getName(); private static final String UI_PKG_NAME = PKG_NAME + ".ui"; public static String getDialogClassName( Class clazz ) { String className = clazz.getCanonicalName().replace( PKG_NAME, UI_PKG_NAME ); if ( className.endsWith( "Meta" ) ) { className = className.substring( 0, className.length() - 4 ); } className = className + "Dialog"; return className; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/entry/NamedClusterLoadSaveUtil.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.entry; import org.apache.commons.lang.StringUtils; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.w3c.dom.Node; /** * Created by bryan on 1/6/16. */ public class NamedClusterLoadSaveUtil { public static final String CLUSTER_NAME = "cluster_name"; public static final String HDFS_HOSTNAME = "hdfs_hostname"; public static final String HDFS_PORT = "hdfs_port"; public static final String JOB_TRACKER_HOSTNAME = "job_tracker_hostname"; public static final String JOB_TRACKER_PORT = "job_tracker_port"; public void saveNamedClusterRep( NamedCluster namedCluster, NamedClusterService namedClusterService, Repository rep, IMetaStore metaStore, ObjectId id_job, ObjectId objectId, LogChannelInterface logChannelInterface ) throws KettleException { if ( namedCluster != null ) { String namedClusterName = namedCluster.getName(); if ( !Const.isEmpty( namedClusterName ) ) { rep.saveJobEntryAttribute( id_job, objectId, CLUSTER_NAME, namedClusterName ); // $NON-NLS-1$ } try { if ( !StringUtils.isEmpty( namedClusterName ) && namedClusterService.contains( namedClusterName, metaStore ) ) { // pull config from NamedCluster namedCluster = namedClusterService.read( namedClusterName, metaStore ); } } catch ( MetaStoreException e ) { logChannelInterface.logDebug( e.getMessage(), e ); } rep.saveJobEntryAttribute( id_job, objectId, HDFS_HOSTNAME, namedCluster.getHdfsHost() ); // $NON-NLS-1$ rep.saveJobEntryAttribute( id_job, objectId, HDFS_PORT, namedCluster.getHdfsPort() ); // $NON-NLS-1$ rep.saveJobEntryAttribute( id_job, objectId, JOB_TRACKER_HOSTNAME, namedCluster.getJobTrackerHost() ); // $NON-NLS-1$ rep.saveJobEntryAttribute( id_job, objectId, JOB_TRACKER_PORT, namedCluster.getJobTrackerPort() ); // $NON-NLS-1$ } } public void getXmlNamedCluster( NamedCluster namedCluster, NamedClusterService namedClusterService, IMetaStore metaStore, LogChannelInterface logChannelInterface, StringBuilder retval ) { if ( namedCluster != null ) { String namedClusterName = namedCluster.getName(); if ( !Const.isEmpty( namedClusterName ) ) { retval.append( " " ).append( XMLHandler.addTagValue( CLUSTER_NAME, namedClusterName ) ); // $NON-NLS-1$ // //$NON-NLS-2$ } try { if ( metaStore != null && !StringUtils.isEmpty( namedClusterName ) && namedClusterService .contains( namedClusterName, metaStore ) ) { // pull config from NamedCluster namedCluster = namedClusterService.read( namedClusterName, metaStore ); } } catch ( MetaStoreException e ) { logChannelInterface.logDebug( e.getMessage(), e ); } retval.append( " " ).append( XMLHandler.addTagValue( HDFS_HOSTNAME, namedCluster.getHdfsHost() ) ); // $NON-NLS-1$ // //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( HDFS_PORT, namedCluster.getHdfsPort() ) ); // $NON-NLS-1$ // //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( JOB_TRACKER_HOSTNAME, namedCluster .getJobTrackerHost() ) ); // $NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( JOB_TRACKER_PORT, namedCluster.getJobTrackerPort() ) ); // $NON-NLS-1$ // //$NON-NLS-2$ } } public NamedCluster loadClusterConfig( NamedClusterService namedClusterService, ObjectId id_jobentry, Repository rep, IMetaStore metaStore, Node entrynode, LogChannelInterface logChannelInterface ) { boolean configLoaded = false; try { String clusterName = null; // attempt to load from named cluster if ( entrynode != null ) { clusterName = XMLHandler.getTagValue( entrynode, CLUSTER_NAME ); //$NON-NLS-1$ } else if ( rep != null ) { clusterName = rep.getJobEntryAttributeString( id_jobentry, CLUSTER_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ } // load from system first, then fall back to copy stored with job (AbstractMeta) NamedCluster nc = null; if ( !StringUtils.isEmpty( clusterName ) && namedClusterService.contains( clusterName, metaStore ) ) { // pull config from NamedCluster nc = namedClusterService.read( clusterName, metaStore ); } if ( nc != null ) { return nc; } } catch ( Throwable t ) { logChannelInterface.logDebug( t.getMessage(), t ); } NamedCluster namedCluster = namedClusterService.getClusterTemplate(); if ( entrynode != null ) { // load default values for cluster & legacy fallback namedCluster.setHdfsHost( XMLHandler.getTagValue( entrynode, HDFS_HOSTNAME ) ); //$NON-NLS-1$ namedCluster.setHdfsPort( XMLHandler.getTagValue( entrynode, HDFS_PORT ) ); //$NON-NLS-1$ namedCluster.setJobTrackerHost( XMLHandler.getTagValue( entrynode, JOB_TRACKER_HOSTNAME ) ); //$NON-NLS-1$ namedCluster.setJobTrackerPort( XMLHandler.getTagValue( entrynode, JOB_TRACKER_PORT ) ); //$NON-NLS-1$ } else if ( rep != null ) { // load default values for cluster & legacy fallback try { namedCluster.setHdfsHost( rep.getJobEntryAttributeString( id_jobentry, HDFS_HOSTNAME ) ); namedCluster.setHdfsPort( rep.getJobEntryAttributeString( id_jobentry, HDFS_PORT ) ); //$NON-NLS-1$ namedCluster .setJobTrackerHost( rep.getJobEntryAttributeString( id_jobentry, JOB_TRACKER_HOSTNAME ) ); //$NON-NLS-1$ namedCluster .setJobTrackerPort( rep.getJobEntryAttributeString( id_jobentry, JOB_TRACKER_PORT ) ); //$NON-NLS-1$ } catch ( KettleException ke ) { logChannelInterface.logError( ke.getMessage(), ke ); } } return namedCluster; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/entry/UserDefinedItem.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.entry; import org.pentaho.ui.xul.XulEventSource; import java.beans.PropertyChangeListener; public class UserDefinedItem implements XulEventSource { private String name; private String value; public UserDefinedItem() { } public String getName() { return name; } public void setName( String name ) { this.name = name; } public String getValue() { return value; } public void setValue( String value ) { this.value = value; } public void addPropertyChangeListener( PropertyChangeListener listener ) { } public void removePropertyChangeListener( PropertyChangeListener listener ) { } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/entry/hadoop/JobEntryHadoopJobExecutor.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.entry.hadoop; import com.google.common.annotations.VisibleForTesting; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.big.data.kettle.plugins.mapreduce.DialogClassUtil; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.NamedClusterLoadSaveUtil; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.UserDefinedItem; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.pmr.JobEntryHadoopTransJobExecutor; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.Const; import org.pentaho.di.core.Result; import org.pentaho.di.core.annotations.JobEntry; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.mapreduce.MapReduceJobAdvanced; import org.pentaho.hadoop.shim.api.mapreduce.MapReduceJobBuilder; import org.pentaho.hadoop.shim.api.mapreduce.MapReduceJobSimple; import org.pentaho.hadoop.shim.api.mapreduce.MapReduceService; import org.pentaho.hadoop.shim.api.mapreduce.TaskCompletionEvent; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.runtime.test.action.impl.RuntimeTestActionServiceImpl; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import org.w3c.dom.Node; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; @JobEntry( id = "HadoopJobExecutorPlugin", image = "HDE.svg", name = "HadoopJobExecutorPlugin.Name", description = "HadoopJobExecutorPlugin.Description", categoryDescription = "i18n:org.pentaho.di.job:JobCategory.Category.BigData", i18nPackageName = "org.pentaho.big.data.kettle.plugins.mapreduce", documentationUrl = "https://pentaho-community.atlassian.net/wiki/display/EAI/Hadoop+Job+Executor" ) public class JobEntryHadoopJobExecutor extends JobEntryBase implements Cloneable, JobEntryInterface { private static final String DEFAULT_LOGGING_INTERVAL = "60"; public static final String CLUSTER_NAME = "cluster_name"; public static final String HDFS_HOSTNAME = "hdfs_hostname"; public static final String HDFS_PORT = "hdfs_port"; public static final String JOB_TRACKER_HOSTNAME = "job_tracker_hostname"; public static final String JOB_TRACKER_PORT = "job_tracker_port"; private static Class PKG = JobEntryHadoopJobExecutor.class; // for i18n purposes, needed by Translator2!! public static final String DIALOG_NAME = DialogClassUtil.getDialogClassName( PKG ); // $NON-NLS-1$ private final NamedClusterService namedClusterService; private final RuntimeTestActionService runtimeTestActionService; private final RuntimeTester runtimeTester; private final NamedClusterServiceLocator namedClusterServiceLocator; private final NamedClusterLoadSaveUtil namedClusterLoadSaveUtil = new NamedClusterLoadSaveUtil(); private String hadoopJobName; private String jarUrl = ""; private String driverClass = ""; private boolean isSimple = true; private String cmdLineArgs; private String outputKeyClass; private String outputValueClass; private String mapperClass; private String combinerClass; private String reducerClass; private String inputFormatClass; private String outputFormatClass; private NamedCluster namedCluster; private String inputPath; private String outputPath; private boolean blocking; private String loggingInterval = DEFAULT_LOGGING_INTERVAL; // 60 seconds default private boolean simpleBlocking; private String simpleLoggingInterval = loggingInterval; private String numMapTasks = "1"; private String numReduceTasks = "1"; private List userDefined = new ArrayList(); public JobEntryHadoopJobExecutor( NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester, NamedClusterServiceLocator namedClusterServiceLocator ) { this.namedClusterService = namedClusterService; this.runtimeTestActionService = runtimeTestActionService; this.runtimeTester = runtimeTester; this.namedClusterServiceLocator = namedClusterServiceLocator; } public JobEntryHadoopJobExecutor() { this.namedClusterService = NamedClusterManager.getInstance(); this.runtimeTester = RuntimeTesterImpl.getInstance(); this.runtimeTestActionService = RuntimeTestActionServiceImpl.getInstance(); this.namedClusterServiceLocator = BigDataServicesHelper.getNamedClusterServiceLocator(); } public NamedClusterService getNamedClusterService() { return namedClusterService; } public RuntimeTestActionService getRuntimeTestActionService() { return runtimeTestActionService; } public RuntimeTester getRuntimeTester() { return runtimeTester; } public NamedClusterServiceLocator getNamedClusterServiceLocator() { return namedClusterServiceLocator; } public String getHadoopJobName() { return hadoopJobName; } public void setHadoopJobName( String hadoopJobName ) { this.hadoopJobName = hadoopJobName; } public String getJarUrl() { return jarUrl; } public void setJarUrl( String jarUrl ) { this.jarUrl = jarUrl; } public String getDriverClass() { return driverClass; } public void setDriverClass( String driverClass ) { this.driverClass = driverClass; } public boolean isSimple() { return isSimple; } public void setSimple( boolean isSimple ) { this.isSimple = isSimple; } public String getCmdLineArgs() { return cmdLineArgs; } public void setCmdLineArgs( String cmdLineArgs ) { this.cmdLineArgs = cmdLineArgs; } public String getOutputKeyClass() { return outputKeyClass; } public void setOutputKeyClass( String outputKeyClass ) { this.outputKeyClass = outputKeyClass; } public String getOutputValueClass() { return outputValueClass; } public void setOutputValueClass( String outputValueClass ) { this.outputValueClass = outputValueClass; } public String getMapperClass() { return mapperClass; } public void setMapperClass( String mapperClass ) { this.mapperClass = mapperClass; } public String getCombinerClass() { return combinerClass; } public void setCombinerClass( String combinerClass ) { this.combinerClass = combinerClass; } public String getReducerClass() { return reducerClass; } public void setReducerClass( String reducerClass ) { this.reducerClass = reducerClass; } public String getInputFormatClass() { return inputFormatClass; } public void setInputFormatClass( String inputFormatClass ) { this.inputFormatClass = inputFormatClass; } public String getOutputFormatClass() { return outputFormatClass; } public void setOutputFormatClass( String outputFormatClass ) { this.outputFormatClass = outputFormatClass; } public NamedCluster getNamedCluster() { return namedCluster; } public void setNamedCluster( NamedCluster namedCluster ) { this.namedCluster = namedCluster; } public String getInputPath() { return inputPath; } public void setInputPath( String inputPath ) { this.inputPath = inputPath; } public String getOutputPath() { return outputPath; } public void setOutputPath( String outputPath ) { this.outputPath = outputPath; } public boolean isBlocking() { return blocking; } public void setBlocking( boolean blocking ) { this.blocking = blocking; } public String getLoggingInterval() { return loggingInterval == null ? DEFAULT_LOGGING_INTERVAL : loggingInterval; } public void setLoggingInterval( String loggingInterval ) { this.loggingInterval = loggingInterval; } public List getUserDefined() { return userDefined; } public void setUserDefined( List userDefined ) { this.userDefined = userDefined; } public String getNumMapTasks() { return numMapTasks; } public void setNumMapTasks( String numMapTasks ) { this.numMapTasks = numMapTasks; } public String getNumReduceTasks() { return numReduceTasks; } public void setNumReduceTasks( String numReduceTasks ) { this.numReduceTasks = numReduceTasks; } public Result execute( final Result result, int arg1 ) throws KettleException { result.setNrErrors( 0 ); log.setLogLevel( parentJob.getLogLevel() ); try { URL resolvedJarUrl = resolveJarUrl( jarUrl ); if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryHadoopJobExecutor.ResolvedJar", resolvedJarUrl .toExternalForm() ) ); } MapReduceService mapReduceService = namedClusterServiceLocator.getService( namedCluster, MapReduceService.class ); if ( isSimple ) { String simpleLoggingIntervalS = environmentSubstitute( getSimpleLoggingInterval() ); int simpleLogInt = 60; try { simpleLogInt = Integer.parseInt( simpleLoggingIntervalS, 10 ); } catch ( NumberFormatException e ) { logError( BaseMessages.getString( PKG, "ErrorParsingLogInterval", simpleLoggingIntervalS, simpleLogInt ) ); } MapReduceJobSimple mapReduceJobSimple = mapReduceService.executeSimple( resolvedJarUrl, environmentSubstitute( driverClass ), environmentSubstitute( cmdLineArgs ) ); String mainClass = mapReduceJobSimple.getMainClass(); if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryHadoopJobExecutor.UsingDriverClass", mainClass == null ? "null" : mainClass ) ); logDetailed( BaseMessages.getString( PKG, "JobEntryHadoopJobExecutor.SimpleMode" ) ); } if ( simpleBlocking ) { boolean done = false; do { done = mapReduceJobSimple.waitOnCompletion( simpleLogInt, TimeUnit.SECONDS, new MapReduceService.Stoppable() { @Override public boolean isStopped() { return parentJob.isStopped(); } } ); logDetailed( BaseMessages .getString( JobEntryHadoopJobExecutor.class, "JobEntryHadoopJobExecutor.Blocking", mainClass ) ); } while ( !parentJob.isStopped() && !done ); if ( !done ) { mapReduceJobSimple.killJob(); } if ( !mapReduceJobSimple.isSuccessful() ) { result.setStopped( true ); result.setNrErrors( 1 ); result.setResult( false ); log.logError( BaseMessages.getString( PKG, "JobEntryHadoopJobExecutor.FailedToExecuteClass", mainClass, mapReduceJobSimple.getStatus() ) ); } } } else { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryHadoopJobExecutor.AdvancedMode" ) ); } MapReduceJobBuilder jobBuilder = mapReduceService.createJobBuilder( log, variables ); jobBuilder.setResolvedJarUrl( resolvedJarUrl ); jobBuilder.setJarUrl( environmentSubstitute( jarUrl ) ); jobBuilder.setHadoopJobName( environmentSubstitute( hadoopJobName ) ); jobBuilder.setOutputKeyClass( environmentSubstitute( outputKeyClass ) ); jobBuilder.setOutputValueClass( environmentSubstitute( outputValueClass ) ); if ( mapperClass != null ) { jobBuilder.setMapperClass( environmentSubstitute( mapperClass ) ); } if ( combinerClass != null ) { jobBuilder.setCombinerClass( environmentSubstitute( combinerClass ) ); } if ( reducerClass != null ) { jobBuilder.setReducerClass( environmentSubstitute( reducerClass ) ); } if ( inputFormatClass != null ) { jobBuilder.setInputFormatClass( environmentSubstitute( inputFormatClass ) ); } if ( outputFormatClass != null ) { jobBuilder.setOutputFormatClass( environmentSubstitute( outputFormatClass ) ); } jobBuilder.setInputPaths( JobEntryHadoopTransJobExecutor.splitInputPaths( inputPath, variables ) ); jobBuilder.setOutputPath( environmentSubstitute( outputPath ) ); // process user defined values for ( UserDefinedItem item : userDefined ) { if ( item.getName() != null && !"".equals( item.getName() ) && item.getValue() != null && !"".equals( item.getValue() ) ) { String nameS = environmentSubstitute( item.getName() ); String valueS = environmentSubstitute( item.getValue() ); jobBuilder.set( nameS, valueS ); } } String numMapTasksS = environmentSubstitute( numMapTasks ); String numReduceTasksS = environmentSubstitute( numReduceTasks ); int numM = 1; try { numM = Integer.parseInt( numMapTasksS ); } catch ( NumberFormatException e ) { logError( "Can't parse number of map tasks '" + numMapTasksS + "'. Setting num" + "map tasks to 1" ); } int numR = 1; try { numR = Integer.parseInt( numReduceTasksS ); } catch ( NumberFormatException e ) { logError( "Can't parse number of reduce tasks '" + numReduceTasksS + "'. Setting num" + "reduce tasks to 1" ); } jobBuilder.setNumMapTasks( numM ); jobBuilder.setNumReduceTasks( numR ); MapReduceJobAdvanced mapReduceJobAdvanced = jobBuilder.submit(); String loggingIntervalS = environmentSubstitute( getLoggingInterval() ); int logIntv = 60; try { logIntv = Integer.parseInt( loggingIntervalS ); } catch ( NumberFormatException e ) { logError( BaseMessages.getString( PKG, "ErrorParsingLogInterval", loggingIntervalS, logIntv ) ); } if ( blocking ) { try { int taskCompletionEventIndex = 0; while ( !mapReduceJobAdvanced .waitOnCompletion( logIntv >= 1 ? logIntv : 60, TimeUnit.SECONDS, new MapReduceService.Stoppable() { @Override public boolean isStopped() { return parentJob.isStopped(); } } ) ) { if ( logIntv >= 1 ) { printJobStatus( mapReduceJobAdvanced ); taskCompletionEventIndex = logTaskMessages( mapReduceJobAdvanced, taskCompletionEventIndex ); } } if ( parentJob.isStopped() && !mapReduceJobAdvanced.isComplete() ) { // We must stop the job running on Hadoop mapReduceJobAdvanced.killJob(); // Indicate this job entry did not complete result.setResult( false ); } printJobStatus( mapReduceJobAdvanced ); // Log any messages we may have missed while polling logTaskMessages( mapReduceJobAdvanced, taskCompletionEventIndex ); } catch ( InterruptedException ie ) { logError( ie.getMessage(), ie ); } // Entry is successful if the MR job is successful overall result.setResult( mapReduceJobAdvanced.isSuccessful() ); } } } catch ( Throwable t ) { t.printStackTrace(); result.setStopped( true ); result.setNrErrors( 1 ); result.setResult( false ); logError( t.getMessage(), t ); } return result; } @VisibleForTesting URL resolveJarUrl( final String jarUrl ) throws MalformedURLException { return resolveJarUrl( jarUrl, this ); } public static URL resolveJarUrl( final String jarUrl, VariableSpace variableSpace ) throws MalformedURLException { String jarUrlS = variableSpace.environmentSubstitute( jarUrl ); if ( jarUrlS.indexOf( "://" ) == -1 ) { // default to file:// File jarFile = new File( jarUrlS ); return jarFile.toURI().toURL(); } else { return new URL( jarUrlS ); } } /** * Log messages indicating completion (success/failure) of component tasks for the provided running job. * * @param runningJob Running job to poll for completion events * @param startIndex Start at this event index to poll from * @return Total events consumed * @throws IOException Error fetching events */ private int logTaskMessages( MapReduceJobAdvanced runningJob, int startIndex ) throws IOException { TaskCompletionEvent[] tcEvents = runningJob.getTaskCompletionEvents( startIndex ); for ( int i = 0; i < tcEvents.length; i++ ) { String[] diags = runningJob.getTaskDiagnostics( tcEvents[ i ].getTaskAttemptId() ); StringBuilder diagsOutput = new StringBuilder(); if ( diags != null && diags.length > 0 ) { diagsOutput.append( Const.CR ); for ( String s : diags ) { diagsOutput.append( s ); diagsOutput.append( Const.CR ); } } switch ( tcEvents[ i ].getTaskStatus() ) { case KILLED: logError( BaseMessages .getString( PKG, "JobEntryHadoopJobExecutor.TaskDetails", TaskCompletionEvent.Status.KILLED, tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getEventId(), diagsOutput ) ); //$NON-NLS-1$ break; case FAILED: logError( BaseMessages .getString( PKG, "JobEntryHadoopJobExecutor.TaskDetails", TaskCompletionEvent.Status.FAILED, tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getEventId(), diagsOutput ) ); //$NON-NLS-1$ break; case SUCCEEDED: logDetailed( BaseMessages .getString( PKG, "JobEntryHadoopJobExecutor.TaskDetails", TaskCompletionEvent.Status.SUCCEEDED, tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getEventId(), diagsOutput ) ); //$NON-NLS-1$ break; } } return tcEvents.length; } /** * Execute the main method of the provided class with the current command line arguments. * * @param clazz Class with main method to execute * @throws NoSuchMethodException * @throws IllegalAccessException * @throws InvocationTargetException */ protected void executeMainMethod( Class clazz ) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { final ClassLoader cl = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader( clazz.getClassLoader() ); Method mainMethod = clazz.getMethod( "main", new Class[] { String[].class } ); String commandLineArgs = environmentSubstitute( cmdLineArgs ); Object[] args = ( commandLineArgs != null ) ? new Object[] { commandLineArgs.split( " " ) } : new Object[ 0 ]; mainMethod.invoke( null, args ); } finally { Thread.currentThread().setContextClassLoader( cl ); } } public void printJobStatus( MapReduceJobAdvanced runningJob ) throws IOException { if ( log.isBasic() ) { double setupPercent = runningJob.getSetupProgress() * 100f; double mapPercent = runningJob.getMapProgress() * 100f; double reducePercent = runningJob.getReduceProgress() * 100f; logBasic( BaseMessages.getString( PKG, "JobEntryHadoopJobExecutor.RunningPercent", setupPercent, mapPercent, reducePercent ) ); } } @Override public void loadXML( Node entrynode, List databases, List slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { super.loadXML( entrynode, databases, slaveServers ); hadoopJobName = XMLHandler.getTagValue( entrynode, "hadoop_job_name" ); isSimple = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "simple" ) ); jarUrl = XMLHandler.getTagValue( entrynode, "jar_url" ); driverClass = XMLHandler.getTagValue( entrynode, "driver_class" ); cmdLineArgs = XMLHandler.getTagValue( entrynode, "command_line_args" ); simpleBlocking = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "simple_blocking" ) ); blocking = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "blocking" ) ); simpleLoggingInterval = XMLHandler.getTagValue( entrynode, "simple_logging_interval" ); loggingInterval = XMLHandler.getTagValue( entrynode, "logging_interval" ); mapperClass = XMLHandler.getTagValue( entrynode, "mapper_class" ); combinerClass = XMLHandler.getTagValue( entrynode, "combiner_class" ); reducerClass = XMLHandler.getTagValue( entrynode, "reducer_class" ); inputPath = XMLHandler.getTagValue( entrynode, "input_path" ); inputFormatClass = XMLHandler.getTagValue( entrynode, "input_format_class" ); outputPath = XMLHandler.getTagValue( entrynode, "output_path" ); outputKeyClass = XMLHandler.getTagValue( entrynode, "output_key_class" ); outputValueClass = XMLHandler.getTagValue( entrynode, "output_value_class" ); outputFormatClass = XMLHandler.getTagValue( entrynode, "output_format_class" ); namedCluster = namedClusterLoadSaveUtil.loadClusterConfig( namedClusterService, null, rep, metaStore, entrynode, log ); setRepository( rep ); // numMapTasks = Integer.parseInt(XMLHandler.getTagValue(entrynode, "num_map_tasks")); numMapTasks = XMLHandler.getTagValue( entrynode, "num_map_tasks" ); // numReduceTasks = Integer.parseInt(XMLHandler.getTagValue(entrynode, "num_reduce_tasks")); numReduceTasks = XMLHandler.getTagValue( entrynode, "num_reduce_tasks" ); // How many user defined elements? userDefined = new ArrayList(); Node userDefinedList = XMLHandler.getSubNode( entrynode, "user_defined_list" ); int nrUserDefined = XMLHandler.countNodes( userDefinedList, "user_defined" ); for ( int i = 0; i < nrUserDefined; i++ ) { Node userDefinedNode = XMLHandler.getSubNodeByNr( userDefinedList, "user_defined", i ); String name = XMLHandler.getTagValue( userDefinedNode, "name" ); String value = XMLHandler.getTagValue( userDefinedNode, "value" ); UserDefinedItem item = new UserDefinedItem(); item.setName( name ); item.setValue( value ); userDefined.add( item ); } } @Override public String getXML() { StringBuilder retval = new StringBuilder( 1024 ); retval.append( super.getXML() ); retval.append( " " ).append( XMLHandler.addTagValue( "hadoop_job_name", hadoopJobName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "simple", isSimple ) ); retval.append( " " ).append( XMLHandler.addTagValue( "jar_url", jarUrl ) ); retval.append( " " ).append( XMLHandler.addTagValue( "driver_class", driverClass ) ); retval.append( " " ).append( XMLHandler.addTagValue( "command_line_args", cmdLineArgs ) ); retval.append( " " ).append( XMLHandler.addTagValue( "simple_blocking", simpleBlocking ) ); retval.append( " " ).append( XMLHandler.addTagValue( "blocking", blocking ) ); retval.append( " " ).append( XMLHandler.addTagValue( "logging_interval", loggingInterval ) ); retval.append( " " ).append( XMLHandler.addTagValue( "simple_logging_interval", simpleLoggingInterval ) ); retval.append( " " ).append( XMLHandler.addTagValue( "hadoop_job_name", hadoopJobName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "mapper_class", mapperClass ) ); retval.append( " " ).append( XMLHandler.addTagValue( "combiner_class", combinerClass ) ); retval.append( " " ).append( XMLHandler.addTagValue( "reducer_class", reducerClass ) ); retval.append( " " ).append( XMLHandler.addTagValue( "input_path", inputPath ) ); retval.append( " " ).append( XMLHandler.addTagValue( "input_format_class", inputFormatClass ) ); retval.append( " " ).append( XMLHandler.addTagValue( "output_path", outputPath ) ); retval.append( " " ).append( XMLHandler.addTagValue( "output_key_class", outputKeyClass ) ); retval.append( " " ).append( XMLHandler.addTagValue( "output_value_class", outputValueClass ) ); retval.append( " " ).append( XMLHandler.addTagValue( "output_format_class", outputFormatClass ) ); namedClusterLoadSaveUtil.getXmlNamedCluster( namedCluster, namedClusterService, metaStore, log, retval ); retval.append( " " ).append( XMLHandler.addTagValue( "num_map_tasks", numMapTasks ) ); retval.append( " " ).append( XMLHandler.addTagValue( "num_reduce_tasks", numReduceTasks ) ); retval.append( " " ).append( Const.CR ); if ( userDefined != null ) { for ( UserDefinedItem item : userDefined ) { if ( item.getName() != null && !"".equals( item.getName() ) && item.getValue() != null && !"".equals( item.getValue() ) ) { retval.append( " " ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", item.getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "value", item.getValue() ) ); retval.append( " " ).append( Const.CR ); } } } retval.append( " " ).append( Const.CR ); return retval.toString(); } @Override public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List databases, List slaveServers ) throws KettleException { if ( rep != null ) { super.loadRep( rep, metaStore, id_jobentry, databases, slaveServers ); setHadoopJobName( rep.getJobEntryAttributeString( id_jobentry, "hadoop_job_name" ) ); setSimple( rep.getJobEntryAttributeBoolean( id_jobentry, "simple" ) ); setJarUrl( rep.getJobEntryAttributeString( id_jobentry, "jar_url" ) ); setDriverClass( rep.getJobEntryAttributeString( id_jobentry, "driver_class" ) ); setCmdLineArgs( rep.getJobEntryAttributeString( id_jobentry, "command_line_args" ) ); setSimpleBlocking( rep.getJobEntryAttributeBoolean( id_jobentry, "simple_blocking" ) ); setBlocking( rep.getJobEntryAttributeBoolean( id_jobentry, "blocking" ) ); setSimpleLoggingInterval( rep.getJobEntryAttributeString( id_jobentry, "simple_logging_interval" ) ); setLoggingInterval( rep.getJobEntryAttributeString( id_jobentry, "logging_interval" ) ); setMapperClass( rep.getJobEntryAttributeString( id_jobentry, "mapper_class" ) ); setCombinerClass( rep.getJobEntryAttributeString( id_jobentry, "combiner_class" ) ); setReducerClass( rep.getJobEntryAttributeString( id_jobentry, "reducer_class" ) ); setInputPath( rep.getJobEntryAttributeString( id_jobentry, "input_path" ) ); setInputFormatClass( rep.getJobEntryAttributeString( id_jobentry, "input_format_class" ) ); setOutputPath( rep.getJobEntryAttributeString( id_jobentry, "output_path" ) ); setOutputKeyClass( rep.getJobEntryAttributeString( id_jobentry, "output_key_class" ) ); setOutputValueClass( rep.getJobEntryAttributeString( id_jobentry, "output_value_class" ) ); setOutputFormatClass( rep.getJobEntryAttributeString( id_jobentry, "output_format_class" ) ); namedCluster = namedClusterLoadSaveUtil.loadClusterConfig( namedClusterService, id_jobentry, rep, metaStore, null, log ); setRepository( rep ); // setNumMapTasks(new Long(rep.getJobEntryAttributeInteger(id_jobentry, "num_map_tasks")).intValue()); setNumMapTasks( rep.getJobEntryAttributeString( id_jobentry, "num_map_tasks" ) ); // setNumReduceTasks(new Long(rep.getJobEntryAttributeInteger(id_jobentry, "num_reduce_tasks")).intValue()); setNumReduceTasks( rep.getJobEntryAttributeString( id_jobentry, "num_reduce_tasks" ) ); int argnr = rep.countNrJobEntryAttributes( id_jobentry, "user_defined_name" ); //$NON-NLS-1$ if ( argnr > 0 ) { userDefined = new ArrayList(); UserDefinedItem item = null; for ( int i = 0; i < argnr; i++ ) { item = new UserDefinedItem(); item.setName( rep.getJobEntryAttributeString( id_jobentry, i, "user_defined_name" ) ); //$NON-NLS-1$ item.setValue( rep.getJobEntryAttributeString( id_jobentry, i, "user_defined_value" ) ); //$NON-NLS-1$ userDefined.add( item ); } } } else { throw new KettleException( "Unable to save to a repository. The repository is null." ); //$NON-NLS-1$ } } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { if ( rep != null ) { super.saveRep( rep, id_job ); rep.saveJobEntryAttribute( id_job, getObjectId(), "hadoop_job_name", hadoopJobName ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "simple", isSimple ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "jar_url", jarUrl ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "driver_class", driverClass ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "command_line_args", cmdLineArgs ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "simple_blocking", simpleBlocking ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "blocking", blocking ); //$NON-NLS-1$ rep .saveJobEntryAttribute( id_job, getObjectId(), "simple_logging_interval", simpleLoggingInterval ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "logging_interval", loggingInterval ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "hadoop_job_name", hadoopJobName ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "mapper_class", mapperClass ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "combiner_class", combinerClass ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "reducer_class", reducerClass ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "input_path", inputPath ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "input_format_class", inputFormatClass ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "output_path", outputPath ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "output_key_class", outputKeyClass ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "output_value_class", outputValueClass ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "output_format_class", outputFormatClass ); //$NON-NLS-1$ namedClusterLoadSaveUtil .saveNamedClusterRep( namedCluster, namedClusterService, rep, metaStore, id_job, getObjectId(), log ); rep.saveJobEntryAttribute( id_job, getObjectId(), "num_map_tasks", numMapTasks ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "num_reduce_tasks", numReduceTasks ); //$NON-NLS-1$ if ( userDefined != null ) { for ( int i = 0; i < userDefined.size(); i++ ) { UserDefinedItem item = userDefined.get( i ); if ( item.getName() != null && !"".equals( item.getName() ) && item.getValue() != null && !"" .equals( item.getValue() ) ) { //$NON-NLS-1$ //$NON-NLS-2$ rep.saveJobEntryAttribute( id_job, getObjectId(), i, "user_defined_name", item.getName() ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), i, "user_defined_value", item.getValue() ); //$NON-NLS-1$ } } } } else { throw new KettleException( "Unable to save to a repository. The repository is null." ); //$NON-NLS-1$ } } @Override public boolean evaluates() { return true; } @Override public boolean isUnconditional() { return true; } public String getSimpleLoggingInterval() { return simpleLoggingInterval == null ? DEFAULT_LOGGING_INTERVAL : simpleLoggingInterval; } public void setSimpleLoggingInterval( String simpleLoggingInterval ) { this.simpleLoggingInterval = simpleLoggingInterval; } public boolean isSimpleBlocking() { return simpleBlocking; } public void setSimpleBlocking( boolean simpleBlocking ) { this.simpleBlocking = simpleBlocking; } @Override public String getDialogClassName() { return DIALOG_NAME; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/entry/pmr/JobEntryHadoopTransJobExecutor.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.entry.pmr; import org.pentaho.big.data.api.services.BigDataServicesHelper; import org.pentaho.big.data.impl.cluster.NamedClusterManager; import org.pentaho.big.data.kettle.plugins.mapreduce.DialogClassUtil; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.NamedClusterLoadSaveUtil; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.UserDefinedItem; import org.pentaho.big.data.kettle.plugins.mapreduce.step.exit.HadoopExitMeta; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.Const; import org.pentaho.di.core.ObjectLocationSpecificationMethod; import org.pentaho.di.core.Result; import org.pentaho.di.core.annotations.JobEntry; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.plugins.JobEntryPluginType; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.util.CurrentDirectoryResolver; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.HasRepositoryDirectories; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.repository.RepositoryDirectory; import org.pentaho.di.repository.RepositoryDirectoryInterface; import org.pentaho.di.repository.StringObjectId; import org.pentaho.di.resource.ResourceDefinition; import org.pentaho.di.resource.ResourceNamingInterface; import org.pentaho.di.trans.TransConfiguration; import org.pentaho.di.trans.TransExecutionConfiguration; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.TransMeta.TransformationType; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.hadoop.shim.api.mapreduce.MapReduceJobAdvanced; import org.pentaho.hadoop.shim.api.mapreduce.MapReduceJobBuilder; import org.pentaho.hadoop.shim.api.mapreduce.MapReduceService; import org.pentaho.hadoop.shim.api.mapreduce.PentahoMapReduceJobBuilder; import org.pentaho.hadoop.shim.api.mapreduce.TaskCompletionEvent; import org.pentaho.metastore.api.IMetaStore; import org.pentaho.runtime.test.RuntimeTester; import org.pentaho.runtime.test.action.RuntimeTestActionService; import org.pentaho.runtime.test.action.impl.RuntimeTestActionServiceImpl; import org.pentaho.runtime.test.impl.RuntimeTesterImpl; import org.w3c.dom.Node; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; @SuppressWarnings( "deprecation" ) @JobEntry( id = "HadoopTransJobExecutorPlugin", image = "HDT.svg", name = "HadoopTransJobExecutorPlugin.Name", description = "HadoopTransJobExecutorPlugin.Description", categoryDescription = "i18n:org.pentaho.di.job:JobCategory.Category.BigData", i18nPackageName = "org.pentaho.di.job.entries.hadooptransjobexecutor" ) public class JobEntryHadoopTransJobExecutor extends JobEntryBase implements Cloneable, JobEntryInterface, HasRepositoryDirectories { public static final String MAPREDUCE_APPLICATION_CLASSPATH = "mapreduce.application.classpath"; public static final String DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH = "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*"; public static final String PENTAHO_MAPREDUCE_PROPERTY_USE_DISTRIBUTED_CACHE = "pmr.use.distributed.cache"; // $NON-NLS-1$ public static final String PENTAHO_MAPREDUCE_PROPERTY_PMR_LIBRARIES_ARCHIVE_FILE = "pmr.libraries.archive.file"; public static final String PENTAHO_MAPREDUCE_PROPERTY_KETTLE_HDFS_INSTALL_DIR = "pmr.kettle.dfs.install.dir"; public static final String PENTAHO_MAPREDUCE_PROPERTY_KETTLE_INSTALLATION_ID = "pmr.kettle.installation.id"; public static final String PENTAHO_MAPREDUCE_PROPERTY_ADDITIONAL_PLUGINS = "pmr.kettle.additional.plugins"; private static Class PKG = JobEntryHadoopTransJobExecutor.class; // for i18n purposes, needed by Translator2!! public static final String DIALOG_NAME = DialogClassUtil.getDialogClassName( PKG ); private final NamedClusterService namedClusterService; private final NamedClusterServiceLocator namedClusterServiceLocator; private final NamedClusterLoadSaveUtil namedClusterLoadSaveUtil = new NamedClusterLoadSaveUtil(); private String hadoopJobName; private String mapRepositoryDir; private String mapRepositoryFile; private ObjectId mapRepositoryReference; private String mapTrans; private String combinerRepositoryDir; private String combinerRepositoryFile; private ObjectId combinerRepositoryReference; private String combinerTrans; private boolean combiningSingleThreaded; private String reduceRepositoryDir; private String reduceRepositoryFile; private ObjectId reduceRepositoryReference; private String reduceTrans; private boolean reducingSingleThreaded; private String mapInputStepName; private String mapOutputStepName; private String combinerInputStepName; private String combinerOutputStepName; private String reduceInputStepName; private String reduceOutputStepName; private boolean suppressOutputMapKey; private boolean suppressOutputMapValue; private boolean suppressOutputKey; private boolean suppressOutputValue; private String inputFormatClass; private String outputFormatClass; private NamedCluster namedCluster; private String inputPath; private String outputPath; private boolean cleanOutputPath; private boolean blocking = true; private String loggingInterval = "60"; private String numMapTasks = "1"; private String numReduceTasks = "1"; private static final String KTR_EXT = ".ktr"; private List userDefined = new ArrayList(); private final RuntimeTester runtimeTester; private final RuntimeTestActionService runtimeTestActionService; public JobEntryHadoopTransJobExecutor( NamedClusterService namedClusterService, RuntimeTestActionService runtimeTestActionService, RuntimeTester runtimeTester, NamedClusterServiceLocator namedClusterServiceLocator ) throws Throwable { this.namedClusterService = namedClusterService; this.runtimeTestActionService = runtimeTestActionService; this.namedClusterServiceLocator = namedClusterServiceLocator; this.runtimeTester = runtimeTester; reducingSingleThreaded = false; combiningSingleThreaded = false; } public JobEntryHadoopTransJobExecutor() { this.namedClusterService = NamedClusterManager.getInstance(); this.runtimeTester = RuntimeTesterImpl.getInstance(); this.runtimeTestActionService = RuntimeTestActionServiceImpl.getInstance(); this.namedClusterServiceLocator = BigDataServicesHelper.getNamedClusterServiceLocator(); } protected static final TransMeta loadTransMeta( Bowl bowl, VariableSpace space, Repository rep, String filename, ObjectId transformationId, String repositoryDir, String repositoryFile ) throws KettleException { TransMeta transMeta = null; if ( rep == null ) { if ( !Const.isEmpty( filename ) ) { String realFilename = space.environmentSubstitute( filename ); transMeta = new TransMeta( bowl, realFilename ); } } else { if ( !Const.isEmpty( filename ) ) { transMeta = getTransMetaFromRepo( filename, rep, space ); } else if ( transformationId != null ) { transMeta = rep.loadTransformation( transformationId, null ); } else if ( !Const.isEmpty( repositoryDir ) && !Const.isEmpty( repositoryFile ) ) { transMeta = getTransMetaFromRepo( repositoryDir, repositoryFile, rep, space ); } } return transMeta; } public static TransMeta getTransMetaFromRepo( String fullPath, Repository rep, VariableSpace space ) throws KettleException { if ( fullPath == null ) { return null; } String trimPath = fullPath.trim(); if ( trimPath.isEmpty() || trimPath.endsWith( "/" ) ) { return null; } int index = trimPath.lastIndexOf( '/' ); if ( index == -1 ) { return null; } String filename = trimPath.substring( index + 1 ); String repDir = trimPath.substring( 0, index ); return getTransMetaFromRepo( repDir, filename, rep, space ); } public static TransMeta getTransMetaFromRepo( String repositoryDir, String repositoryFile, Repository rep, VariableSpace space ) throws KettleException { if ( space instanceof JobEntryHadoopTransJobExecutor ) { CurrentDirectoryResolver r = new CurrentDirectoryResolver(); JobEntryHadoopTransJobExecutor jobEntry = (JobEntryHadoopTransJobExecutor) space; space = r.resolveCurrentDirectory( jobEntry.getParentJobMeta().getBowl(), jobEntry, jobEntry.getParentJob().getRepositoryDirectory(), null ); } String repositoryDirS = space.environmentSubstitute( repositoryDir ); if ( repositoryDirS.isEmpty() ) { repositoryDirS = "/"; } String repositoryFileS = space.environmentSubstitute( repositoryFile ); RepositoryDirectoryInterface repositoryDirectory = rep.loadRepositoryDirectoryTree().findDirectory( repositoryDirS ); return rep.loadTransformation( repositoryFileS, repositoryDirectory, null, true, null ); } public static String[] splitInputPaths( String inputPath, VariableSpace variableSpace ) { String inputPathS = variableSpace.environmentSubstitute( inputPath ); // This is a non-elegant way to split the path on commas unless inside curly braces. There should be // a method in Const and/or a fancy regex for this kind of thing. Instead, find the curly-brace groups // and temporarily replace the commas with a non-sensical string. Then restore the commas after // splitting the input paths. Matcher m = Pattern.compile( "[{][^{]*[}]" ).matcher( inputPathS ); StringBuffer sb = new StringBuffer(); while ( m.find() ) { m.appendReplacement( sb, m.group().replace( ",", "@!@" ) ); } m.appendTail( sb ); return sb.toString().split( "," ); } public String getHadoopJobName() { return hadoopJobName; } public void setHadoopJobName( String hadoopJobName ) { this.hadoopJobName = hadoopJobName; } /** * @return An array of 3 elements : 0 - specification method for mapper, * 1 - specification method for combiner, * 2 - specification method for reducer. */ @Override public ObjectLocationSpecificationMethod[] getSpecificationMethods() { return new ObjectLocationSpecificationMethod[] { defineSpecificationMethod( mapRepositoryDir, mapRepositoryFile, mapRepositoryReference ), defineSpecificationMethod( combinerRepositoryDir, combinerRepositoryFile, combinerRepositoryReference ), defineSpecificationMethod( reduceRepositoryDir, reduceRepositoryFile, reduceRepositoryReference ) }; } /** * Returns an array of 3 elements : 0 - mapper, 1 - combiner, 2 - reducer directories * @return String array[2] of mapper, combiner, reducer repository directories */ @Override public String[] getDirectories() { return new String[]{ mapRepositoryDir != null ? mapRepositoryDir : mapTrans, combinerRepositoryDir != null ? combinerRepositoryDir : combinerTrans, reduceRepositoryDir != null ? reduceRepositoryDir : reduceTrans }; } /** * Updates repository directories with values from an array of 3 elements : * 0 - mapper, 1 - combiner, 2 - reducer directories * @param directory Array[2] of updated mapper, combiner, reducer directories to set */ @Override public void setDirectories( String[] directory ) { if ( mapRepositoryDir != null ) { mapRepositoryDir = directory[0]; } else { mapTrans = directory[0]; } if ( combinerRepositoryDir != null ) { combinerRepositoryDir = directory[1]; } else { combinerTrans = directory[1]; } if ( reduceRepositoryDir != null ) { reduceRepositoryDir = directory[2]; } else { reduceTrans = directory[2]; } } public String getMapTrans() { return mapTrans; } public void setMapTrans( String mapTrans ) { this.mapTrans = mapTrans; } public String getCombinerTrans() { return combinerTrans; } public void setCombinerTrans( String combinerTrans ) { this.combinerTrans = combinerTrans; } public String getReduceTrans() { return reduceTrans; } public void setReduceTrans( String reduceTrans ) { this.reduceTrans = reduceTrans; } public String getMapRepositoryDir() { return mapRepositoryDir; } public void setMapRepositoryDir( String mapRepositoryDir ) { this.mapRepositoryDir = mapRepositoryDir; } public String getMapRepositoryFile() { return mapRepositoryFile; } public void setMapRepositoryFile( String mapRepositoryFile ) { this.mapRepositoryFile = mapRepositoryFile; } public ObjectId getMapRepositoryReference() { return mapRepositoryReference; } public void setMapRepositoryReference( ObjectId mapRepositoryReference ) { this.mapRepositoryReference = mapRepositoryReference; } public String getCombinerRepositoryDir() { return combinerRepositoryDir; } public void setCombinerRepositoryDir( String combinerRepositoryDir ) { this.combinerRepositoryDir = combinerRepositoryDir; } public String getCombinerRepositoryFile() { return combinerRepositoryFile; } public void setCombinerRepositoryFile( String combinerRepositoryFile ) { this.combinerRepositoryFile = combinerRepositoryFile; } public ObjectId getCombinerRepositoryReference() { return combinerRepositoryReference; } public void setCombinerRepositoryReference( ObjectId combinerRepositoryReference ) { this.combinerRepositoryReference = combinerRepositoryReference; } public String getReduceRepositoryDir() { return reduceRepositoryDir; } public void setReduceRepositoryDir( String reduceRepositoryDir ) { this.reduceRepositoryDir = reduceRepositoryDir; } public String getReduceRepositoryFile() { return reduceRepositoryFile; } public void setReduceRepositoryFile( String reduceRepositoryFile ) { this.reduceRepositoryFile = reduceRepositoryFile; } public ObjectId getReduceRepositoryReference() { return reduceRepositoryReference; } public void setReduceRepositoryReference( ObjectId reduceRepositoryReference ) { this.reduceRepositoryReference = reduceRepositoryReference; } public String getMapInputStepName() { return mapInputStepName; } public void setMapInputStepName( String mapInputStepName ) { this.mapInputStepName = mapInputStepName; } public String getMapOutputStepName() { return mapOutputStepName; } public void setMapOutputStepName( String mapOutputStepName ) { this.mapOutputStepName = mapOutputStepName; } public String getCombinerInputStepName() { return combinerInputStepName; } public void setCombinerInputStepName( String combinerInputStepName ) { this.combinerInputStepName = combinerInputStepName; } public String getCombinerOutputStepName() { return combinerOutputStepName; } public void setCombinerOutputStepName( String combinerOutputStepName ) { this.combinerOutputStepName = combinerOutputStepName; } public String getReduceInputStepName() { return reduceInputStepName; } public void setReduceInputStepName( String reduceInputStepName ) { this.reduceInputStepName = reduceInputStepName; } public String getReduceOutputStepName() { return reduceOutputStepName; } public void setReduceOutputStepName( String reduceOutputStepName ) { this.reduceOutputStepName = reduceOutputStepName; } public boolean getSuppressOutputOfMapKey() { return suppressOutputMapKey; } public void setSuppressOutputOfMapKey( boolean suppress ) { suppressOutputMapKey = suppress; } public boolean getSuppressOutputOfMapValue() { return suppressOutputMapValue; } public void setSuppressOutputOfMapValue( boolean suppress ) { suppressOutputMapValue = suppress; } public boolean getSuppressOutputOfKey() { return suppressOutputKey; } public void setSuppressOutputOfKey( boolean suppress ) { suppressOutputKey = suppress; } public boolean getSuppressOutputOfValue() { return suppressOutputValue; } public void setSuppressOutputOfValue( boolean suppress ) { suppressOutputValue = suppress; } public String getInputFormatClass() { return inputFormatClass; } public void setInputFormatClass( String inputFormatClass ) { this.inputFormatClass = inputFormatClass; } public String getOutputFormatClass() { return outputFormatClass; } public void setOutputFormatClass( String outputFormatClass ) { this.outputFormatClass = outputFormatClass; } public String getInputPath() { return inputPath; } public void setInputPath( String inputPath ) { this.inputPath = inputPath; } public String getOutputPath() { return outputPath; } public void setOutputPath( String outputPath ) { this.outputPath = outputPath; } public boolean isCleanOutputPath() { return cleanOutputPath; } public void setCleanOutputPath( boolean cleanOutputPath ) { this.cleanOutputPath = cleanOutputPath; } public boolean isBlocking() { return blocking; } public void setBlocking( boolean blocking ) { this.blocking = blocking; } public String getLoggingInterval() { return loggingInterval; } public void setLoggingInterval( String loggingInterval ) { this.loggingInterval = loggingInterval; } public List getUserDefined() { return userDefined; } public void setUserDefined( List userDefined ) { this.userDefined = userDefined; } public String getNumMapTasks() { return numMapTasks; } public void setNumMapTasks( String numMapTasks ) { this.numMapTasks = numMapTasks; } public String getNumReduceTasks() { return numReduceTasks; } public void setNumReduceTasks( String numReduceTasks ) { this.numReduceTasks = numReduceTasks; } public Result execute( Result result, int arg1 ) throws KettleException { result.setNrErrors( 0 ); try { MapReduceService mapReduceService = namedClusterServiceLocator.getService( namedCluster, MapReduceService.class ); PentahoMapReduceJobBuilder jobBuilder = mapReduceService.createPentahoMapReduceJobBuilder( log, variables ); String hadoopJobNameS = environmentSubstitute( hadoopJobName ); jobBuilder.setHadoopJobName( hadoopJobNameS ); // mapper TransExecutionConfiguration transExecConfig = new TransExecutionConfiguration(); TransMeta transMeta = loadTransMeta( parentJobMeta.getBowl(), this, rep, mapTrans, mapRepositoryReference, mapRepositoryDir, mapRepositoryFile ); TransConfiguration transConfig = new TransConfiguration( transMeta, transExecConfig ); String mapInputStepNameS = environmentSubstitute( mapInputStepName ); String mapOutputStepNameS = environmentSubstitute( mapOutputStepName ); try { jobBuilder.verifyTransMeta( transMeta, mapInputStepNameS, mapOutputStepNameS ); } catch ( Exception ex ) { throw new KettleException( BaseMessages .getString( PKG, "JobEntryHadoopTransJobExecutor.MapConfiguration.Error" ), ex ); } finally { if ( transMeta != null ) { transMeta.disposeEmbeddedMetastoreProvider(); } } jobBuilder.setMapperInfo( transConfig.getXML(), mapInputStepNameS, mapOutputStepNameS ); jobBuilder.set( MapReduceJobBuilder.STRING_COMBINE_SINGLE_THREADED, combiningSingleThreaded ? "true" : "false" ); // Pass the single threaded reduction to the configuration... // jobBuilder.set( MapReduceJobBuilder.STRING_REDUCE_SINGLE_THREADED, reducingSingleThreaded ? "true" : "false" ); if ( getSuppressOutputOfMapKey() ) { jobBuilder.setMapOutputKeyClass( jobBuilder.getHadoopWritableCompatibleClassName( null ) ); } if ( getSuppressOutputOfMapValue() ) { jobBuilder.setMapOutputValueClass( jobBuilder.getHadoopWritableCompatibleClassName( null ) ); } // auto configure the output mapper key and value classes if ( !getSuppressOutputOfMapKey() || !getSuppressOutputOfMapValue() && transMeta != null ) { StepMeta mapOut = transMeta.findStep( mapOutputStepNameS ); if ( mapOut.getStepMetaInterface() instanceof HadoopExitMeta ) { RowMetaInterface prevStepFields = transMeta.getPrevStepFields( mapOut ); if ( !getSuppressOutputOfMapKey() ) { String keyName = ( (HadoopExitMeta) mapOut.getStepMetaInterface() ).getOutKeyFieldname(); int keyI = prevStepFields.indexOfValue( keyName ); ValueMetaInterface keyVM = ( keyI >= 0 ) ? prevStepFields.getValueMeta( keyI ) : null; if ( keyVM == null ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.NoMapOutputKeyDefined.Error" ) ); } String hadoopWritableKey = jobBuilder.getHadoopWritableCompatibleClassName( keyVM ); jobBuilder.setMapOutputKeyClass( hadoopWritableKey ); logDebug( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.Message.MapOutputKeyMessage", hadoopWritableKey ) ); } if ( !getSuppressOutputOfMapValue() ) { String valName = ( (HadoopExitMeta) mapOut.getStepMetaInterface() ).getOutValueFieldname(); int valI = prevStepFields.indexOfValue( valName ); ValueMetaInterface valueVM = ( valI >= 0 ) ? prevStepFields.getValueMeta( valI ) : null; if ( valueVM == null ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.NoMapOutputValueDefined.Error" ) ); } String hadoopWritableValue = jobBuilder.getHadoopWritableCompatibleClassName( valueVM ); jobBuilder.setMapOutputValueClass( hadoopWritableValue ); logDebug( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.Message.MapOutputValueMessage", hadoopWritableValue ) ); } } } // combiner transMeta = loadTransMeta( parentJobMeta.getBowl(), this, rep, combinerTrans, combinerRepositoryReference, combinerRepositoryDir, combinerRepositoryFile ); if ( transMeta != null ) { if ( combiningSingleThreaded ) { verifySingleThreadingValidity( transMeta ); } String combinerInputStepNameS = environmentSubstitute( combinerInputStepName ); String combinerOutputStepNameS = environmentSubstitute( combinerOutputStepName ); transConfig = new TransConfiguration( transMeta, transExecConfig ); jobBuilder.setCombinerInfo( transConfig.getXML(), combinerInputStepNameS, combinerOutputStepNameS ); try { jobBuilder.verifyTransMeta( transMeta, combinerInputStepNameS, combinerOutputStepNameS ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.CombinerConfiguration.Error" ), ex ); } finally { if ( transMeta != null ) { transMeta.disposeEmbeddedMetastoreProvider(); } } } // reducer transMeta = loadTransMeta( parentJobMeta.getBowl(), this, rep, reduceTrans, reduceRepositoryReference, reduceRepositoryDir, reduceRepositoryFile ); if ( transMeta != null ) { // See if this is a valid single threading reducer // if ( reducingSingleThreaded ) { verifySingleThreadingValidity( transMeta ); } String reduceInputStepNameS = environmentSubstitute( reduceInputStepName ); String reduceOutputStepNameS = environmentSubstitute( reduceOutputStepName ); transConfig = new TransConfiguration( transMeta, transExecConfig ); jobBuilder.setReducerInfo( transConfig.getXML(), reduceInputStepNameS, reduceOutputStepNameS ); try { jobBuilder.verifyTransMeta( transMeta, reduceInputStepNameS, reduceOutputStepNameS ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.ReducerConfiguration.Error" ), ex ); } finally { if ( transMeta != null ) { transMeta.disposeEmbeddedMetastoreProvider(); } } if ( getSuppressOutputOfKey() ) { jobBuilder.setOutputKeyClass( jobBuilder.getHadoopWritableCompatibleClassName( null ) ); } if ( getSuppressOutputOfValue() ) { jobBuilder.setOutputValueClass( jobBuilder.getHadoopWritableCompatibleClassName( null ) ); } // auto configure the output reduce key and value classes if ( !getSuppressOutputOfKey() || !getSuppressOutputOfValue() ) { StepMeta reduceOut = transMeta.findStep( reduceOutputStepNameS ); RowMetaInterface prevStepFields = transMeta.getPrevStepFields( reduceOut ); if ( reduceOut.getStepMetaInterface() instanceof HadoopExitMeta ) { String keyName = ( (HadoopExitMeta) reduceOut.getStepMetaInterface() ).getOutKeyFieldname(); String valName = ( (HadoopExitMeta) reduceOut.getStepMetaInterface() ).getOutValueFieldname(); int keyI = prevStepFields.indexOfValue( keyName ); ValueMetaInterface keyVM = ( keyI >= 0 ) ? prevStepFields.getValueMeta( keyI ) : null; int valI = prevStepFields.indexOfValue( valName ); ValueMetaInterface valueVM = ( valI >= 0 ) ? prevStepFields.getValueMeta( valI ) : null; if ( !getSuppressOutputOfKey() ) { if ( keyVM == null ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.NoOutputKeyDefined.Error" ) ); } String hadoopWritableKey = jobBuilder.getHadoopWritableCompatibleClassName( keyVM ); jobBuilder.setOutputKeyClass( hadoopWritableKey ); logDebug( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.Message.OutputKeyMessage", hadoopWritableKey ) ); } if ( !getSuppressOutputOfValue() ) { if ( valueVM == null ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.NoOutputValueDefined.Error" ) ); } String hadoopWritableValue = jobBuilder.getHadoopWritableCompatibleClassName( valueVM ); jobBuilder.setOutputValueClass( hadoopWritableValue ); logDebug( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.Message.OutputValueMessage", hadoopWritableValue ) ); } } } } jobBuilder.setInputFormatClass( inputFormatClass ); jobBuilder.setOutputFormatClass( outputFormatClass ); jobBuilder.setInputPaths( splitInputPaths( inputPath, variables ) ); jobBuilder.setOutputPath( environmentSubstitute( outputPath ) ); // process user defined values for ( UserDefinedItem item : userDefined ) { if ( item.getName() != null && !"".equals( item.getName() ) && item.getValue() != null && !"" .equals( item.getValue() ) ) { //$NON-NLS-1$ //$NON-NLS-2$ String nameS = environmentSubstitute( item.getName() ); String valueS = environmentSubstitute( item.getValue() ); jobBuilder.set( nameS, valueS ); } } String numMapTasksS = environmentSubstitute( numMapTasks ); try { if ( Integer.parseInt( numMapTasksS ) < 0 ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.NumMapTasks.Error" ) ); } } catch ( NumberFormatException e ) { if ( log.isDebug() ) { logError( Const.getStackTracker( e ) ); } } String numReduceTasksS = environmentSubstitute( numReduceTasks ); try { if ( Integer.parseInt( numReduceTasksS ) < 0 ) { throw new KettleException( BaseMessages .getString( PKG, "JobEntryHadoopTransJobExecutor.NumReduceTasks.Error" ) ); } } catch ( NumberFormatException e ) { if ( log.isDebug() ) { logError( Const.getStackTracker( e ) ); } } jobBuilder.setNumMapTasks( Const.toInt( numMapTasksS, 1 ) ); jobBuilder.setNumReduceTasks( Const.toInt( numReduceTasksS, 1 ) ); jobBuilder.setLogLevel( getLogLevel() ); jobBuilder.setCleanOutputPath( isCleanOutputPath() ); MapReduceJobAdvanced runningJob = jobBuilder.submit(); String loggingIntervalS = environmentSubstitute( loggingInterval ); int logIntv = 60; try { logIntv = Integer.parseInt( loggingIntervalS ); } catch ( NumberFormatException e ) { logError( "Can't parse logging interval '" + loggingIntervalS + "'. Setting " + "logging interval to 60" ); } if ( blocking ) { try { int taskCompletionEventIndex = 0; while ( !parentJob.isStopped() && !runningJob.isComplete() ) { if ( logIntv >= 1 ) { printJobStatus( runningJob ); taskCompletionEventIndex += logTaskMessages( runningJob, taskCompletionEventIndex ); Thread.sleep( logIntv * 1000 ); } else { Thread.sleep( 60000 ); } } if ( parentJob.isStopped() && !runningJob.isComplete() ) { // We must stop the job running on Hadoop runningJob.killJob(); // Indicate this job entry did not complete result.setResult( false ); } printJobStatus( runningJob ); // Log any messages we may have missed while polling logTaskMessages( runningJob, taskCompletionEventIndex ); } catch ( InterruptedException ie ) { logError( ie.getMessage(), ie ); } // Entry is successful if the MR job is successful overall result.setResult( runningJob.isSuccessful() ); } } catch ( Throwable t ) { t.printStackTrace(); result.setStopped( true ); result.setNrErrors( 1 ); result.setResult( false ); logError( Const.NVL( t.getMessage(), "" ), t ); } return result; } /** * Log messages indicating completion (success/failure) of component tasks for the provided running job. * * @param runningJob * Running job to poll for completion events * @param startIndex * Start at this event index to poll from * @return Total events consumed * @throws IOException * Error fetching events */ private int logTaskMessages( MapReduceJobAdvanced runningJob, int startIndex ) throws IOException { TaskCompletionEvent[] tcEvents = runningJob.getTaskCompletionEvents( startIndex ); for ( int i = 0; i < tcEvents.length; i++ ) { String[] diags = runningJob.getTaskDiagnostics( tcEvents[ i ].getTaskAttemptId() ); StringBuilder diagsOutput = new StringBuilder(); if ( diags != null && diags.length > 0 ) { diagsOutput.append( Const.CR ); for ( String s : diags ) { diagsOutput.append( s ); diagsOutput.append( Const.CR ); } } TaskCompletionEvent.Status status = tcEvents[ i ].getTaskStatus(); switch ( tcEvents[ i ].getTaskStatus() ) { case KILLED: case FAILED: case TIPFAILED: logError( BaseMessages .getString( PKG, "JobEntryHadoopTransJobExecutor.TaskDetails", status, tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getEventId(), diagsOutput ) ); //$NON-NLS-1$ break; case SUCCEEDED: case OBSOLETE: logDetailed( BaseMessages .getString( PKG, "JobEntryHadoopTransJobExecutor.TaskDetails", TaskCompletionEvent.Status.SUCCEEDED, tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getEventId(), diagsOutput ) ); //$NON-NLS-1$ break; default: logError( BaseMessages .getString( PKG, "JobEntryHadoopTransJobExecutor.TaskDetails", "UNKNOWN", tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getTaskAttemptId(), tcEvents[ i ].getEventId(), diagsOutput ) ); //$NON-NLS-1$ } } return tcEvents.length; } /** * @return the plugin interface for this job entry. */ public PluginInterface getPluginInterface() { String pluginId = PluginRegistry.getInstance().getPluginId( this ); return PluginRegistry.getInstance().findPluginWithId( JobEntryPluginType.class, pluginId ); } private void verifySingleThreadingValidity( TransMeta transMeta ) throws KettleException { for ( StepMeta stepMeta : transMeta.getSteps() ) { TransformationType[] types = stepMeta.getStepMetaInterface().getSupportedTransformationTypes(); boolean ok = false; for ( TransformationType type : types ) { if ( type == TransformationType.SingleThreaded ) { ok = true; } } if ( !ok ) { throw new KettleException( "Step '" + stepMeta.getName() + "' of type '" + stepMeta.getStepID() + "' is not supported in a Single Threaded transformation engine." ); } } } public void printJobStatus( MapReduceJobAdvanced runningJob ) throws IOException { if ( log.isBasic() ) { double setupPercent = runningJob.getSetupProgress() * 100f; double mapPercent = runningJob.getMapProgress() * 100f; double reducePercent = runningJob.getReduceProgress() * 100f; logBasic( BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.RunningPercent", setupPercent, mapPercent, reducePercent ) ); //$NON-NLS-1$ } } @Override public void loadXML( Node entrynode, List databases, List slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { super.loadXML( entrynode, databases, slaveServers ); hadoopJobName = XMLHandler.getTagValue( entrynode, "hadoop_job_name" ); //$NON-NLS-1$ mapRepositoryDir = XMLHandler.getTagValue( entrynode, "map_trans_repo_dir" ); //$NON-NLS-1$ mapRepositoryFile = XMLHandler.getTagValue( entrynode, "map_trans_repo_file" ); //$NON-NLS-1$ String mapTransId = XMLHandler.getTagValue( entrynode, "map_trans_repo_reference" ); //$NON-NLS-1$ mapRepositoryReference = Const.isEmpty( mapTransId ) ? null : new StringObjectId( mapTransId ); mapTrans = XMLHandler.getTagValue( entrynode, "map_trans" ); //$NON-NLS-1$ combinerRepositoryDir = XMLHandler.getTagValue( entrynode, "combiner_trans_repo_dir" ); //$NON-NLS-1$ combinerRepositoryFile = XMLHandler.getTagValue( entrynode, "combiner_trans_repo_file" ); //$NON-NLS-1$ String combinerTransId = XMLHandler.getTagValue( entrynode, "combiner_trans_repo_reference" ); //$NON-NLS-1$ combinerRepositoryReference = Const.isEmpty( combinerTransId ) ? null : new StringObjectId( combinerTransId ); combinerTrans = XMLHandler.getTagValue( entrynode, "combiner_trans" ); //$NON-NLS-1$ final String combinerSingleThreaded = XMLHandler.getTagValue( entrynode, "combiner_single_threaded" ); //$NON-NLS-1$ if ( !Const.isEmpty( combinerSingleThreaded ) ) { setCombiningSingleThreaded( "Y".equalsIgnoreCase( combinerSingleThreaded ) ); //$NON-NLS-1$ } else { setCombiningSingleThreaded( false ); } reduceRepositoryDir = XMLHandler.getTagValue( entrynode, "reduce_trans_repo_dir" ); //$NON-NLS-1$ reduceRepositoryFile = XMLHandler.getTagValue( entrynode, "reduce_trans_repo_file" ); //$NON-NLS-1$ String reduceTransId = XMLHandler.getTagValue( entrynode, "reduce_trans_repo_reference" ); //$NON-NLS-1$ reduceRepositoryReference = Const.isEmpty( reduceTransId ) ? null : new StringObjectId( reduceTransId ); reduceTrans = XMLHandler.getTagValue( entrynode, "reduce_trans" ); //$NON-NLS-1$ String single = XMLHandler.getTagValue( entrynode, "reduce_single_threaded" ); //$NON-NLS-1$ if ( Const.isEmpty( single ) ) { setReducingSingleThreaded( false ); } else { setReducingSingleThreaded( "Y".equalsIgnoreCase( single ) ); //$NON-NLS-1$ } mapInputStepName = XMLHandler.getTagValue( entrynode, "map_input_step_name" ); //$NON-NLS-1$ mapOutputStepName = XMLHandler.getTagValue( entrynode, "map_output_step_name" ); //$NON-NLS-1$ combinerInputStepName = XMLHandler.getTagValue( entrynode, "combiner_input_step_name" ); //$NON-NLS-1$ combinerOutputStepName = XMLHandler.getTagValue( entrynode, "combiner_output_step_name" ); //$NON-NLS-1$ reduceInputStepName = XMLHandler.getTagValue( entrynode, "reduce_input_step_name" ); //$NON-NLS-1$ reduceOutputStepName = XMLHandler.getTagValue( entrynode, "reduce_output_step_name" ); //$NON-NLS-1$ blocking = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "blocking" ) ); //$NON-NLS-1$ //$NON-NLS-2$ loggingInterval = XMLHandler.getTagValue( entrynode, "logging_interval" ); //$NON-NLS-1$ inputPath = XMLHandler.getTagValue( entrynode, "input_path" ); //$NON-NLS-1$ inputFormatClass = XMLHandler.getTagValue( entrynode, "input_format_class" ); //$NON-NLS-1$ outputPath = XMLHandler.getTagValue( entrynode, "output_path" ); //$NON-NLS-1$ final String cleanOutputPath = XMLHandler.getTagValue( entrynode, "clean_output_path" ); if ( !Const.isEmpty( cleanOutputPath ) ) { //$NON-NLS-1$ setCleanOutputPath( cleanOutputPath.equalsIgnoreCase( "Y" ) ); //$NON-NLS-1$ //$NON-NLS-2$ } if ( !Const.isEmpty( XMLHandler.getTagValue( entrynode, "suppress_output_map_key" ) ) ) { suppressOutputMapKey = XMLHandler.getTagValue( entrynode, "suppress_output_map_key" ).equalsIgnoreCase( "Y" ); } if ( !Const.isEmpty( XMLHandler.getTagValue( entrynode, "suppress_output_map_value" ) ) ) { suppressOutputMapValue = XMLHandler.getTagValue( entrynode, "suppress_output_map_value" ).equalsIgnoreCase( "Y" ); } if ( !Const.isEmpty( XMLHandler.getTagValue( entrynode, "suppress_output_key" ) ) ) { suppressOutputKey = XMLHandler.getTagValue( entrynode, "suppress_output_key" ).equalsIgnoreCase( "Y" ); } if ( !Const.isEmpty( XMLHandler.getTagValue( entrynode, "suppress_output_value" ) ) ) { suppressOutputValue = XMLHandler.getTagValue( entrynode, "suppress_output_value" ).equalsIgnoreCase( "Y" ); } outputFormatClass = XMLHandler.getTagValue( entrynode, "output_format_class" ); //$NON-NLS-1$ namedCluster = namedClusterLoadSaveUtil.loadClusterConfig( namedClusterService, null, rep, metaStore, entrynode, log ); setRepository( rep ); numMapTasks = XMLHandler.getTagValue( entrynode, "num_map_tasks" ); //$NON-NLS-1$ numReduceTasks = XMLHandler.getTagValue( entrynode, "num_reduce_tasks" ); //$NON-NLS-1$ // How many user defined elements? userDefined = new ArrayList(); Node userDefinedList = XMLHandler.getSubNode( entrynode, "user_defined_list" ); //$NON-NLS-1$ int nrUserDefined = XMLHandler.countNodes( userDefinedList, "user_defined" ); //$NON-NLS-1$ for ( int i = 0; i < nrUserDefined; i++ ) { Node userDefinedNode = XMLHandler.getSubNodeByNr( userDefinedList, "user_defined", i ); //$NON-NLS-1$ String name = XMLHandler.getTagValue( userDefinedNode, "name" ); //$NON-NLS-1$ String value = XMLHandler.getTagValue( userDefinedNode, "value" ); //$NON-NLS-1$ UserDefinedItem item = new UserDefinedItem(); item.setName( name ); item.setValue( value ); userDefined.add( item ); } } @Override public String getXML() { StringBuilder retval = new StringBuilder( 1024 ); retval.append( super.getXML() ); retval.append( " " ) .append( XMLHandler.addTagValue( "hadoop_job_name", hadoopJobName ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "map_trans_repo_dir", mapRepositoryDir ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "map_trans_repo_file", mapRepositoryFile ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval .append( " " ).append( XMLHandler.addTagValue( "map_trans_repo_reference", mapRepositoryReference == null ? null : mapRepositoryReference.toString() ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( "map_trans", mapTrans ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "combiner_trans_repo_dir", combinerRepositoryDir ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( "combiner_trans_repo_file", combinerRepositoryFile ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval .append( " " ).append( XMLHandler.addTagValue( "combiner_trans_repo_reference", combinerRepositoryReference == null ? null : combinerRepositoryReference.toString() ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ); retval.append( " " ) .append( XMLHandler.addTagValue( "combiner_trans", combinerTrans ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( "combiner_single_threaded", combiningSingleThreaded ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "reduce_trans_repo_dir", reduceRepositoryDir ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "reduce_trans_repo_file", reduceRepositoryFile ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval .append( " " ).append( XMLHandler.addTagValue( "reduce_trans_repo_reference", reduceRepositoryReference == null ? null : reduceRepositoryReference.toString() ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "reduce_trans", reduceTrans ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "reduce_single_threaded", reducingSingleThreaded ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "map_input_step_name", mapInputStepName ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "map_output_step_name", mapOutputStepName ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( "combiner_input_step_name", combinerInputStepName ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( "combiner_output_step_name", combinerOutputStepName ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "reduce_input_step_name", reduceInputStepName ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "reduce_output_step_name", reduceOutputStepName ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( "blocking", blocking ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "logging_interval", loggingInterval ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( "input_path", inputPath ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "input_format_class", inputFormatClass ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( "output_path", outputPath ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "clean_output_path", cleanOutputPath ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "suppress_output_map_key", suppressOutputMapKey ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( XMLHandler.addTagValue( "suppress_output_map_value", suppressOutputMapValue ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "suppress_output_key", suppressOutputKey ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "suppress_output_value", suppressOutputValue ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "output_format_class", outputFormatClass ) ); //$NON-NLS-1$ //$NON-NLS-2$ namedClusterLoadSaveUtil.getXmlNamedCluster( namedCluster, namedClusterService, metaStore, log, retval ); retval.append( " " ) .append( XMLHandler.addTagValue( "num_map_tasks", numMapTasks ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "num_reduce_tasks", numReduceTasks ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( Const.CR ); //$NON-NLS-1$ if ( userDefined != null ) { for ( UserDefinedItem item : userDefined ) { if ( item.getName() != null && !"".equals( item.getName() ) && item.getValue() != null && !"" .equals( item.getValue() ) ) { //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( Const.CR ); //$NON-NLS-1$ retval.append( " " ) .append( XMLHandler.addTagValue( "name", item.getName() ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ) .append( XMLHandler.addTagValue( "value", item.getValue() ) ); //$NON-NLS-1$ //$NON-NLS-2$ retval.append( " " ).append( Const.CR ); //$NON-NLS-1$ } } } retval.append( " " ).append( Const.CR ); //$NON-NLS-1$ return retval.toString(); } @Override public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List databases, List slaveServers ) throws KettleException { if ( rep != null ) { setHadoopJobName( rep.getJobEntryAttributeString( id_jobentry, "hadoop_job_name" ) ); //$NON-NLS-1$ setMapRepositoryDir( rep.getJobEntryAttributeString( id_jobentry, "map_trans_repo_dir" ) ); //$NON-NLS-1$ setMapRepositoryFile( rep.getJobEntryAttributeString( id_jobentry, "map_trans_repo_file" ) ); //$NON-NLS-1$ String mapTransId = rep.getJobEntryAttributeString( id_jobentry, "map_trans_repo_reference" ); //$NON-NLS-1$ setMapRepositoryReference( Const.isEmpty( mapTransId ) ? null : new StringObjectId( mapTransId ) ); setMapTrans( rep.getJobEntryAttributeString( id_jobentry, "map_trans" ) ); //$NON-NLS-1$ setReduceRepositoryDir( rep.getJobEntryAttributeString( id_jobentry, "reduce_trans_repo_dir" ) ); //$NON-NLS-1$ setReduceRepositoryFile( rep.getJobEntryAttributeString( id_jobentry, "reduce_trans_repo_file" ) ); //$NON-NLS-1$ String reduceTransId = rep.getJobEntryAttributeString( id_jobentry, "reduce_trans_repo_reference" ); //$NON-NLS-1$ setReduceRepositoryReference( Const.isEmpty( reduceTransId ) ? null : new StringObjectId( reduceTransId ) ); setReduceTrans( rep.getJobEntryAttributeString( id_jobentry, "reduce_trans" ) ); //$NON-NLS-1$ setReducingSingleThreaded( rep.getJobEntryAttributeBoolean( id_jobentry, "reduce_single_threaded", false ) ); //$NON-NLS-1$ setCombinerRepositoryDir( rep.getJobEntryAttributeString( id_jobentry, "combiner_trans_repo_dir" ) ); //$NON-NLS-1$ setCombinerRepositoryFile( rep.getJobEntryAttributeString( id_jobentry, "combiner_trans_repo_file" ) ); //$NON-NLS-1$ String combinerTransId = rep.getJobEntryAttributeString( id_jobentry, "combiner_trans_repo_reference" ); //$NON-NLS-1$ setCombinerRepositoryReference( Const.isEmpty( combinerTransId ) ? null : new StringObjectId( combinerTransId ) ); setCombinerTrans( rep.getJobEntryAttributeString( id_jobentry, "combiner_trans" ) ); //$NON-NLS-1$ setCombiningSingleThreaded( rep.getJobEntryAttributeBoolean( id_jobentry, "combiner_single_threaded", false ) ); //$NON-NLS-1$ setMapInputStepName( rep.getJobEntryAttributeString( id_jobentry, "map_input_step_name" ) ); //$NON-NLS-1$ setMapOutputStepName( rep.getJobEntryAttributeString( id_jobentry, "map_output_step_name" ) ); //$NON-NLS-1$ setCombinerInputStepName( rep.getJobEntryAttributeString( id_jobentry, "combiner_input_step_name" ) ); //$NON-NLS-1$ setCombinerOutputStepName( rep.getJobEntryAttributeString( id_jobentry, "combiner_output_step_name" ) ); //$NON-NLS-1$ setReduceInputStepName( rep.getJobEntryAttributeString( id_jobentry, "reduce_input_step_name" ) ); //$NON-NLS-1$ setReduceOutputStepName( rep.getJobEntryAttributeString( id_jobentry, "reduce_output_step_name" ) ); //$NON-NLS-1$ setBlocking( rep.getJobEntryAttributeBoolean( id_jobentry, "blocking" ) ); //$NON-NLS-1$ setLoggingInterval( rep.getJobEntryAttributeString( id_jobentry, "logging_interval" ) ); //$NON-NLS-1$ setInputPath( rep.getJobEntryAttributeString( id_jobentry, "input_path" ) ); //$NON-NLS-1$ setInputFormatClass( rep.getJobEntryAttributeString( id_jobentry, "input_format_class" ) ); //$NON-NLS-1$ setOutputPath( rep.getJobEntryAttributeString( id_jobentry, "output_path" ) ); //$NON-NLS-1$ setCleanOutputPath( rep.getJobEntryAttributeBoolean( id_jobentry, "clean_output_path" ) ); //$NON-NLS-1$ setSuppressOutputOfMapKey( rep.getJobEntryAttributeBoolean( id_jobentry, "suppress_output_map_key" ) ); //$NON-NLS-1$ setSuppressOutputOfMapValue( rep.getJobEntryAttributeBoolean( id_jobentry, "suppress_output_map_value" ) ); //$NON-NLS-1$ setSuppressOutputOfKey( rep.getJobEntryAttributeBoolean( id_jobentry, "suppress_output_key" ) ); //$NON-NLS-1$ setSuppressOutputOfValue( rep.getJobEntryAttributeBoolean( id_jobentry, "suppress_output_value" ) ); //$NON-NLS-1$ setOutputFormatClass( rep.getJobEntryAttributeString( id_jobentry, "output_format_class" ) ); //$NON-NLS-1$ namedCluster = namedClusterLoadSaveUtil.loadClusterConfig( namedClusterService, id_jobentry, rep, metaStore, null, log ); setRepository( rep ); setNumMapTasks( rep.getJobEntryAttributeString( id_jobentry, "num_map_tasks" ) ); setNumReduceTasks( rep.getJobEntryAttributeString( id_jobentry, "num_reduce_tasks" ) ); int argnr = rep.countNrJobEntryAttributes( id_jobentry, "user_defined_name" ); //$NON-NLS-1$ if ( argnr > 0 ) { userDefined = new ArrayList(); UserDefinedItem item = null; for ( int i = 0; i < argnr; i++ ) { item = new UserDefinedItem(); item.setName( rep.getJobEntryAttributeString( id_jobentry, i, "user_defined_name" ) ); //$NON-NLS-1$ item.setValue( rep.getJobEntryAttributeString( id_jobentry, i, "user_defined_value" ) ); //$NON-NLS-1$ userDefined.add( item ); } } } else { throw new KettleException( "Unable to save to a repository. The repository is null." ); //$NON-NLS-1$ } } public void saveRep( Repository rep, ObjectId id_job ) throws KettleException { if ( rep != null ) { rep.saveJobEntryAttribute( id_job, getObjectId(), "hadoop_job_name", hadoopJobName ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "map_trans_repo_dir", mapRepositoryDir ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "map_trans_repo_file", mapRepositoryFile ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "map_trans_repo_reference", mapRepositoryReference == null ? null : mapRepositoryReference.toString() ); //$NON-NLS-1$ mapTrans = mapTrans != null && mapTrans.endsWith( KTR_EXT ) ? mapTrans.replace( KTR_EXT, "" ) : mapTrans; rep.saveJobEntryAttribute( id_job, getObjectId(), "map_trans", mapTrans ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "reduce_trans_repo_dir", reduceRepositoryDir ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "reduce_trans_repo_file", reduceRepositoryFile ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "reduce_trans_repo_reference", reduceRepositoryReference == null ? null : reduceRepositoryReference.toString() ); //$NON-NLS-1$ reduceTrans = reduceTrans != null && reduceTrans.endsWith( KTR_EXT ) ? reduceTrans.replace( KTR_EXT, "" ) : reduceTrans; rep.saveJobEntryAttribute( id_job, getObjectId(), "reduce_trans", reduceTrans ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "reduce_single_threaded", reducingSingleThreaded ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "combiner_trans_repo_dir", combinerRepositoryDir ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "combiner_trans_repo_file", combinerRepositoryFile ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "combiner_trans_repo_reference", combinerRepositoryReference == null ? null : combinerRepositoryReference.toString() ); //$NON-NLS-1$ combinerTrans = combinerTrans != null && combinerTrans.endsWith( KTR_EXT ) ? combinerTrans.replace( KTR_EXT, "" ) : combinerTrans; rep.saveJobEntryAttribute( id_job, getObjectId(), "combiner_trans", combinerTrans ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "combiner_single_threaded", combiningSingleThreaded ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "map_input_step_name", mapInputStepName ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "map_output_step_name", mapOutputStepName ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "combiner_input_step_name", combinerInputStepName ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "combiner_output_step_name", combinerOutputStepName ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "reduce_input_step_name", reduceInputStepName ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "reduce_output_step_name", reduceOutputStepName ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "blocking", blocking ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "logging_interval", loggingInterval ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "input_path", inputPath ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "input_format_class", inputFormatClass ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "output_path", outputPath ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "clean_output_path", cleanOutputPath ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "suppress_output_map_key", suppressOutputMapKey ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "suppress_output_map_value", suppressOutputMapValue ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "suppress_output_key", suppressOutputKey ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "suppress_output_value", suppressOutputValue ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "output_format_class", outputFormatClass ); //$NON-NLS-1$ namedClusterLoadSaveUtil .saveNamedClusterRep( namedCluster, namedClusterService, rep, metaStore, id_job, getObjectId(), log ); rep.saveJobEntryAttribute( id_job, getObjectId(), "num_map_tasks", numMapTasks ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), "num_reduce_tasks", numReduceTasks ); //$NON-NLS-1$ if ( userDefined != null ) { for ( int i = 0; i < userDefined.size(); i++ ) { UserDefinedItem item = userDefined.get( i ); if ( item.getName() != null && !"".equals( item.getName() ) && item.getValue() != null && !"" .equals( item.getValue() ) ) { //$NON-NLS-1$ //$NON-NLS-2$ rep.saveJobEntryAttribute( id_job, getObjectId(), i, "user_defined_name", item.getName() ); //$NON-NLS-1$ rep.saveJobEntryAttribute( id_job, getObjectId(), i, "user_defined_value", item.getValue() ); //$NON-NLS-1$ } } } } else { throw new KettleException( "Unable to save to a repository. The repository is null." ); //$NON-NLS-1$ } } public boolean evaluates() { return true; } public boolean isUnconditional() { return true; } /** * @return the reduceSingleThreaded */ public boolean isReducingSingleThreaded() { return reducingSingleThreaded; } /** * @param reducingSingleThreaded * the reducing single threaded to set */ public void setReducingSingleThreaded( boolean reducingSingleThreaded ) { this.reducingSingleThreaded = reducingSingleThreaded; } public boolean isCombiningSingleThreaded() { return combiningSingleThreaded; } public void setCombiningSingleThreaded( boolean combiningSingleThreaded ) { this.combiningSingleThreaded = combiningSingleThreaded; } private boolean hasMapperDefinition() { return !Const.isEmpty( mapTrans ) || mapRepositoryReference != null || ( !Const.isEmpty( mapRepositoryDir ) && !Const.isEmpty( mapRepositoryFile ) ); } private boolean hasReducerDefinition() { return !Const.isEmpty( reduceTrans ) || reduceRepositoryReference != null || ( !Const.isEmpty( reduceRepositoryDir ) && !Const.isEmpty( reduceRepositoryFile ) ); } private boolean hasCombinerDefinition() { return !Const.isEmpty( combinerTrans ) || combinerRepositoryReference != null || ( !Const.isEmpty( combinerRepositoryDir ) && !Const.isEmpty( combinerRepositoryFile ) ); } private ObjectLocationSpecificationMethod defineSpecificationMethod( String repDir, String repFileName, ObjectId reference ) { if ( reference != null ) { return ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE; } return ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME; } /** * @return The objects referenced in the step, like a a transformation, a job, a mapper, a reducer, a combiner, ... */ public String[] getReferencedObjectDescriptions() { return new String[] { BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.ReferencedObject.Mapper" ), BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.ReferencedObject.Combiner" ), BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.ReferencedObject.Reducer" ), }; } /** * @return true for each referenced object that is enabled or has a valid reference definition. */ public boolean[] isReferencedObjectEnabled() { return new boolean[] { hasMapperDefinition(), hasCombinerDefinition(), hasReducerDefinition(), }; } /** * Load the referenced object * * @param index * the referenced object index to load (in case there are multiple references) * @param rep * the repository * @param space * the variable space to use * @return the referenced object once loaded * @throws KettleException */ public Object loadReferencedObject( Bowl bowl, int index, Repository rep, IMetaStore metaStore, VariableSpace space ) throws KettleException { switch ( index ) { case 0: return loadTransMeta( bowl, space, rep, mapTrans, mapRepositoryReference, mapRepositoryDir, mapRepositoryFile ); case 1: return loadTransMeta( bowl, space, rep, combinerTrans, combinerRepositoryReference, combinerRepositoryDir, combinerRepositoryFile ); case 2: return loadTransMeta( bowl, space, rep, reduceTrans, reduceRepositoryReference, reduceRepositoryDir, reduceRepositoryFile ); } return null; } /** * Exports the object to a flat-file system, adding content with filename keys to a set of definitions. The supplied * resource naming interface allows the object to name appropriately without worrying about those parts of the * implementation specific details. * * @param executionBowl * For file access * @param globalManagementBowl * if needed for access to the current "global" (System or Repository) level config for export. If null, no * global config will be exported. * @param space * The variable space to resolve (environment) variables with. * @param definitions * The map containing the filenames and content * @param namingInterface * The resource naming interface allows the object to be named appropriately * @param repository * The repository to load resources from * @param metaStore * the metaStore to load external metadata from * @return The filename for this object. (also contained in the definitions map) * @throws KettleException * in case something goes wrong during the export */ @Override public String exportResources( Bowl executionBowl, Bowl globalManagementBowl, VariableSpace space, Map definitions, ResourceNamingInterface namingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { // Try to load the transformation from repository or file. // Modify this recursively too... // // AGAIN: there is no need to clone this job entry because the caller is responsible for this. copyVariablesFrom( space ); boolean[] enabled = isReferencedObjectEnabled(); TransMeta transMeta; for ( int i = 0; i < enabled.length; i++ ) { if ( enabled[ i ] ) { // // First load the transformation metadata... // transMeta = (TransMeta) loadReferencedObject( executionBowl, i, repository, metaStore, space ); // Also go down into the transformation and export the files there. (mapping recursively down) // String proposedNewFilename = transMeta.exportResources( executionBowl, globalManagementBowl, transMeta, definitions, namingInterface, repository, metaStore ); // To get a relative path to it, we inject ${Internal.Job.Filename.Directory} // String newFilename = "${" + Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY + "}/" + proposedNewFilename; // Set the correct filename inside the XML. // transMeta.setFilename( newFilename ); // exports always reside in the root directory, in case we want to turn this into a file repository... // transMeta.setRepositoryDirectory( new RepositoryDirectory() ); // export to filename ALWAYS (this allows the exported XML to be executed remotely) // change it in the job entry setSpecificationMethodAndValue( i, ObjectLocationSpecificationMethod.FILENAME, newFilename, null, null ); } } return getHadoopJobName(); } private void setSpecificationMethodAndValue( int i, ObjectLocationSpecificationMethod specification, String filename, String repositoryDir, ObjectId referrence ) { switch ( specification ) { case FILENAME: { switch ( i ) { case 0: { setMapTrans( filename ); break; } case 1: { setCombinerTrans( filename ); break; } case 2: { setReduceTrans( filename ); break; } } break; } case REPOSITORY_BY_NAME: { switch ( i ) { case 0: { setMapRepositoryDir( repositoryDir ); setMapRepositoryFile( filename ); break; } case 1: { setCombinerRepositoryDir( repositoryDir ); setCombinerRepositoryFile( filename ); break; } case 2: { setReduceRepositoryDir( repositoryDir ); setReduceRepositoryFile( filename ); break; } } break; } case REPOSITORY_BY_REFERENCE: { switch ( i ) { case 0: { setMapRepositoryReference( referrence ); break; } case 1: { setCombinerRepositoryReference( referrence ); break; } case 2: { setReduceRepositoryReference( referrence ); break; } } break; } } } public NamedClusterService getNamedClusterService() { return namedClusterService; } public NamedCluster getNamedCluster() { return namedCluster; } public void setNamedCluster( NamedCluster namedCluster ) { this.namedCluster = namedCluster; } public RuntimeTester getRuntimeTester() { return runtimeTester; } public RuntimeTestActionService getRuntimeTestActionService() { return runtimeTestActionService; } @Override public String getDialogClassName() { return DIALOG_NAME; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/step/enter/HadoopEnterMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.step.enter; import org.pentaho.big.data.kettle.plugins.mapreduce.DialogClassUtil; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.trans.steps.injector.InjectorMeta; @Step( id = "HadoopEnterPlugin", image = "MRI.svg", name = "HadoopEnterPlugin.Name", description = "HadoopEnterPlugin.Description", documentationUrl = "pdi-transformation-steps-reference-overview/mapreduce-input", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", i18nPackageName = "org.pentaho.di.trans.steps.hadoopenter" ) @InjectionSupported( localizationPrefix = "HadoopEnterPlugin.Injection." ) public class HadoopEnterMeta extends InjectorMeta { @SuppressWarnings( "unused" ) private static Class PKG = HadoopEnterMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ public static final String DIALOG_NAME = DialogClassUtil.getDialogClassName( PKG ); public static final String KEY_FIELDNAME = "key"; public static final String VALUE_FIELDNAME = "value"; public HadoopEnterMeta() throws Throwable { setDefault(); } @Override public void setDefault() { allocate( 2 ); getFieldname()[ 0 ] = HadoopEnterMeta.KEY_FIELDNAME; getFieldname()[ 1 ] = HadoopEnterMeta.VALUE_FIELDNAME; } @Override public String getDialogClassName() { return DIALOG_NAME; } @Injection( name = "KEY_TYPE" ) public void setKeyType( int type ) { getType()[0] = type; } @Injection( name = "KEY_LENGTH" ) public void setKeyLength( int length ) { getLength()[0] = length; } @Injection( name = "KEY_PRECISION" ) public void setKeyPrecision( int precision ) { getPrecision()[0] = precision; } @Injection( name = "VALUE_TYPE" ) public void setValueType( int type ) { getType()[1] = type; } @Injection( name = "VALUE_LENGTH" ) public void setValueLength( int length ) { getLength()[1] = length; } @Injection( name = "VALUE_PRECISION" ) public void setValuePrecision( int precision ) { getPrecision()[1] = precision; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/step/exit/HadoopExit.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.step.exit; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; public class HadoopExit extends BaseStep implements StepInterface { private static final Class PKG = HadoopExit.class; private HadoopExitMeta meta; private HadoopExitData data; public HadoopExit( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } public void runtimeInit() throws KettleException { data.init( getTransMeta().getBowl(), getInputRowMeta(), meta, this ); } public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (HadoopExitMeta) smi; data = (HadoopExitData) sdi; Object[] r = getRow(); if ( r == null ) { // no more input to be expected... setOutputDone(); return false; } if ( first ) { runtimeInit(); first = false; } Object[] outputRow = new Object[2]; outputRow[HadoopExitData.getOutKeyOrdinal()] = r[data.getInKeyOrdinal()]; outputRow[HadoopExitData.getOutValueOrdinal()] = r[data.getInValueOrdinal()]; putRow( data.getOutputRowMeta(), outputRow ); if ( checkFeedback( getLinesRead() ) ) { logBasic( BaseMessages.getString( PKG, "HadoopExit.Linenr", getLinesRead() ) ); } return true; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/step/exit/HadoopExitData.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.step.exit; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.trans.step.BaseStepData; import org.pentaho.di.trans.step.StepDataInterface; public class HadoopExitData extends BaseStepData implements StepDataInterface { private RowMetaInterface outputRowMeta = null; private int inKeyOrdinal = -1; private int inValueOrdinal = -1; public static final int outKeyOrdinal = 0; public static final int outValueOrdinal = 1; public HadoopExitData() { super(); } public void init( Bowl bowl, RowMetaInterface rowMeta, HadoopExitMeta stepMeta, VariableSpace space ) throws KettleException { if ( rowMeta != null ) { outputRowMeta = rowMeta.clone(); stepMeta.getFields( bowl, outputRowMeta, stepMeta.getName(), null, null, space ); setInKeyOrdinal( rowMeta.indexOfValue( stepMeta.getOutKeyFieldname() ) ); setInValueOrdinal( rowMeta.indexOfValue( stepMeta.getOutValueFieldname() ) ); } } public RowMetaInterface getOutputRowMeta() { return outputRowMeta; } public void setInKeyOrdinal( int inKeyOrdinal ) { this.inKeyOrdinal = inKeyOrdinal; } public int getInKeyOrdinal() { return inKeyOrdinal; } public void setInValueOrdinal( int inValueOrdinal ) { this.inValueOrdinal = inValueOrdinal; } public int getInValueOrdinal() { return inValueOrdinal; } public static int getOutKeyOrdinal() { return outKeyOrdinal; } public static int getOutValueOrdinal() { return outValueOrdinal; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/step/exit/HadoopExitMeta.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.step.exit; import org.pentaho.big.data.kettle.plugins.mapreduce.DialogClassUtil; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.bowl.Bowl; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; import java.util.Arrays; import java.util.List; @Step( id = "HadoopExitPlugin", image = "MRO.svg", name = "HadoopExitPlugin.Name", description = "HadoopExitPlugin.Description", documentationUrl = "pdi-transformation-steps-reference-overview/mapreduce-output", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.BigData", i18nPackageName = "org.pentaho.di.trans.steps.hadoopexit" ) @InjectionSupported( localizationPrefix = "HadoopExitPlugin.Injection." ) public class HadoopExitMeta extends BaseStepMeta implements StepMetaInterface { public static final String ERROR_INVALID_KEY_FIELD = "Error.InvalidKeyField"; public static final String ERROR_INVALID_VALUE_FIELD = "Error.InvalidValueField"; public static final String OUT_KEY = "outKey"; public static final String OUT_VALUE = "outValue"; public static final String HADOOP_EXIT_META_CHECK_RESULT_NO_DATA_STREAM = "HadoopExitMeta.CheckResult.NoDataStream"; public static final String HADOOP_EXIT_META_CHECK_RESULT_NO_SPECIFIED_FIELDS = "HadoopExitMeta.CheckResult.NoSpecifiedFields"; public static final String HADOOP_EXIT_META_CHECK_RESULT_STEP_RECEVING_DATA = "HadoopExitMeta.CheckResult.StepRecevingData"; public static final String HADOOP_EXIT_META_CHECK_RESULT_NOT_RECEVING_SPECIFIED_FIELDS = "HadoopExitMeta.CheckResult.NotRecevingSpecifiedFields"; public static Class PKG = HadoopExit.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ public static final String DIALOG_NAME = DialogClassUtil.getDialogClassName( PKG ); public static String OUT_KEY_FIELDNAME = "outkeyfieldname"; public static String OUT_VALUE_FIELDNAME = "outvaluefieldname"; @Injection( name = "KEY_FIELD" ) private String outKeyFieldname; @Injection( name = "VALUE_FIELD" ) private String outValueFieldname; public HadoopExitMeta() throws Throwable { super(); } @Override public void loadXML( Node stepnode, List databases, IMetaStore metaStore ) throws KettleXMLException { setOutKeyFieldname( XMLHandler.getTagValue( stepnode, HadoopExitMeta.OUT_KEY_FIELDNAME ) ); //$NON-NLS-1$ setOutValueFieldname( XMLHandler.getTagValue( stepnode, HadoopExitMeta.OUT_VALUE_FIELDNAME ) ); //$NON-NLS-1$ } @Override public String getXML() { StringBuilder retval = new StringBuilder(); retval.append( " " ).append( XMLHandler.addTagValue( HadoopExitMeta.OUT_KEY_FIELDNAME, getOutKeyFieldname() ) ); retval.append( " " ) .append( XMLHandler.addTagValue( HadoopExitMeta.OUT_VALUE_FIELDNAME, getOutValueFieldname() ) ); return retval.toString(); } public Object clone() { return super.clone(); } @Override public void setDefault() { setOutKeyFieldname( null ); setOutValueFieldname( null ); } @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List databases ) throws KettleException { setOutKeyFieldname( rep.getStepAttributeString( id_step, HadoopExitMeta.OUT_KEY_FIELDNAME ) ); //$NON-NLS-1$ setOutValueFieldname( rep.getStepAttributeString( id_step, HadoopExitMeta.OUT_VALUE_FIELDNAME ) ); //$NON-NLS-1$ } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { rep.saveStepAttribute( id_transformation, id_step, HadoopExitMeta.OUT_KEY_FIELDNAME, getOutKeyFieldname() ); //$NON-NLS-1$ rep.saveStepAttribute( id_transformation, id_step, HadoopExitMeta.OUT_VALUE_FIELDNAME, getOutValueFieldname() ); //$NON-NLS-1$ } @Override public void getFields( Bowl bowl, RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space ) throws KettleStepException { ValueMetaInterface key = rowMeta.searchValueMeta( getOutKeyFieldname() ); ValueMetaInterface value = rowMeta.searchValueMeta( getOutValueFieldname() ); if ( key == null ) { throw new KettleStepException( BaseMessages.getString( PKG, ERROR_INVALID_KEY_FIELD, getOutKeyFieldname() ) ); } if ( value == null ) { throw new KettleStepException( BaseMessages.getString( PKG, ERROR_INVALID_VALUE_FIELD, getOutValueFieldname() ) ); } // The output consists of 2 fields: outKey and outValue // The data types rely on the input data type so we look those up // ValueMetaInterface keyMeta = key.clone(); ValueMetaInterface valueMeta = value.clone(); keyMeta.setName( OUT_KEY ); valueMeta.setName( OUT_VALUE ); rowMeta.clear(); rowMeta.addValueMeta( keyMeta ); rowMeta.addValueMeta( valueMeta ); } @Override public void check( List remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info ) { CheckResult cr; // Make sure we have an input stream that contains the desired field names if ( prev == null || prev.size() == 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, HADOOP_EXIT_META_CHECK_RESULT_NO_DATA_STREAM ), stepinfo ); //$NON-NLS-1$ remarks.add( cr ); } else { List fieldnames = Arrays.asList( prev.getFieldNames() ); HadoopExitMeta stepMeta = (HadoopExitMeta) stepinfo.getStepMetaInterface(); if ( ( stepMeta.getOutKeyFieldname() == null ) || stepMeta.getOutValueFieldname() == null ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, HADOOP_EXIT_META_CHECK_RESULT_NO_SPECIFIED_FIELDS, prev.size() + "" ), stepinfo ); //$NON-NLS-1$ //$NON-NLS-2$ remarks.add( cr ); } else { if ( fieldnames.contains( stepMeta.getOutKeyFieldname() ) && fieldnames.contains( stepMeta.getOutValueFieldname() ) ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, HADOOP_EXIT_META_CHECK_RESULT_STEP_RECEVING_DATA, prev.size() + "" ), stepinfo ); //$NON-NLS-1$ //$NON-NLS-2$ remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, HADOOP_EXIT_META_CHECK_RESULT_NOT_RECEVING_SPECIFIED_FIELDS, prev.size() + "" ), stepinfo ); //$NON-NLS-1$ //$NON-NLS-2$ remarks.add( cr ); } } } } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans ) { return new HadoopExit( stepMeta, stepDataInterface, cnr, tr, trans ); } public StepDataInterface getStepData() { return new HadoopExitData(); } public String getOutKeyFieldname() { return outKeyFieldname; } public void setOutKeyFieldname( String arg ) { outKeyFieldname = arg; } public String getOutValueFieldname() { return outValueFieldname; } public void setOutValueFieldname( String arg ) { outValueFieldname = arg; } @Override public String getDialogClassName() { return DIALOG_NAME; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/ui/entry/hadoop/JobEntryHadoopJobExecutorController.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.ui.entry.hadoop; import org.eclipse.swt.SWT; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.pentaho.hadoop.shim.api.cluster.ClusterInitializationException; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.hadoop.shim.api.cluster.NamedClusterServiceLocator; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.hadoop.JobEntryHadoopJobExecutor; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.UserDefinedItem; import org.pentaho.big.data.plugins.common.ui.HadoopClusterDelegateImpl; import org.pentaho.hadoop.shim.api.mapreduce.MapReduceJarInfo; import org.pentaho.hadoop.shim.api.mapreduce.MapReduceService; import org.pentaho.di.core.Const; import org.pentaho.di.core.plugins.JobEntryPluginType; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.database.dialog.tags.ExtTextbox; import org.pentaho.di.ui.core.gui.WindowProperty; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.util.HelpUtils; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.ui.xul.XulDomException; import org.pentaho.ui.xul.XulEventSourceAdapter; import org.pentaho.ui.xul.components.XulMenuList; import org.pentaho.ui.xul.components.XulTextbox; import org.pentaho.ui.xul.containers.XulDialog; import org.pentaho.ui.xul.containers.XulVbox; import org.pentaho.ui.xul.impl.AbstractXulEventHandler; import org.pentaho.ui.xul.jface.tags.JfaceCMenuList; import org.pentaho.ui.xul.jface.tags.JfaceMenuList; import org.pentaho.ui.xul.util.AbstractModelList; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class JobEntryHadoopJobExecutorController extends AbstractXulEventHandler { public static final String JOB_ENTRY_NAME = "jobEntryName"; //$NON-NLS-1$ public static final String HADOOP_JOB_NAME = "hadoopJobName"; //$NON-NLS-1$ public static final String JAR_URL = "jarUrl"; //$NON-NLS-1$ public static final String DRIVER_CLASS = "driverClass"; //$NON-NLS-1$ public static final String DRIVER_CLASSES = "driverClasses"; //$NON-NLS-1$ public static final String IS_SIMPLE = "isSimple"; //$NON-NLS-1$ public static final String USER_DEFINED = "userDefined"; //$NON-NLS-1$ private static final Class PKG = JobEntryHadoopJobExecutor.class; private String jobEntryName; private String hadoopJobName; private String jarUrl = ""; private String driverClass = ""; private List driverClasses = new ArrayList<>(); private boolean isSimple = true; private SimpleConfiguration sConf = new SimpleConfiguration(); private AdvancedConfiguration aConf = new AdvancedConfiguration(); private JobEntryHadoopJobExecutor jobEntry; private JobMeta jobMeta; private AbstractModelList userDefined = new AbstractModelList(); private final NamedClusterService namedClusterService; private final HadoopClusterDelegateImpl ncDelegate; private final NamedClusterServiceLocator namedClusterServiceLocator; public JobEntryHadoopJobExecutorController( HadoopClusterDelegateImpl hadoopClusterDelegate, NamedClusterService namedClusterService, NamedClusterServiceLocator namedClusterServiceLocator ) { this.ncDelegate = hadoopClusterDelegate; this.namedClusterService = namedClusterService; this.namedClusterServiceLocator = namedClusterServiceLocator; } protected VariableSpace getVariableSpace() { if ( Spoon.getInstance().getActiveTransformation() != null ) { return Spoon.getInstance().getActiveTransformation(); } else if ( Spoon.getInstance().getActiveJob() != null ) { return Spoon.getInstance().getActiveJob(); } else { return new Variables(); } } public void accept() { ExtTextbox tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-hadoopjob-name" ); this.hadoopJobName = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jar-url" ); this.jarUrl = ( (Text) tempBox.getTextControl() ).getText(); JfaceCMenuList tempList = (JfaceCMenuList) getXulDomContainer().getDocumentRoot().getElementById( "driver-class" ); this.driverClass = tempList.getValue(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "command-line-arguments" ); sConf.cmdLineArgs = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-output-key-class" ); aConf.outputKeyClass = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-output-value-class" ); aConf.outputValueClass = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-mapper-class" ); aConf.mapperClass = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-reducer-class" ); aConf.reducerClass = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "input-path" ); aConf.inputPath = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "output-path" ); aConf.outputPath = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-input-format" ); aConf.inputFormatClass = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-output-format" ); aConf.outputFormatClass = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "num-map-tasks" ); aConf.numMapTasks = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "num-reduce-tasks" ); aConf.numReduceTasks = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "logging-interval" ); aConf.loggingInterval = ( (Text) tempBox.getTextControl() ).getText(); JfaceMenuList ncBox = (JfaceMenuList) getXulDomContainer().getDocumentRoot().getElementById( "named-clusters" ); if ( !isSimple() && aConf.selectedNamedCluster != null ) { NamedCluster reload = namedClusterService.getNamedClusterByName( aConf.selectedNamedCluster.getName(), jobMeta.getMetaStore() ); if ( reload != null ) { aConf.selectedNamedCluster = reload; } } String validationErrors = ""; if ( StringUtil.isEmpty( jobEntryName ) ) { validationErrors += BaseMessages.getString( PKG, "JobEntryHadoopJobExecutor.JobEntryName.Error" ) + "\n"; } if ( StringUtil.isEmpty( hadoopJobName ) ) { validationErrors += BaseMessages.getString( PKG, "JobEntryHadoopJobExecutor.HadoopJobName.Error" ) + "\n"; } if ( !StringUtil.isEmpty( validationErrors ) ) { openErrorDialog( BaseMessages.getString( PKG, "Dialog.Error" ), validationErrors ); // show validation errors dialog return; } // common/simple jobEntry.setName( jobEntryName ); jobEntry.setHadoopJobName( hadoopJobName ); jobEntry.setSimple( isSimple ); jobEntry.setJarUrl( jarUrl ); jobEntry.setDriverClass( driverClass ); jobEntry.setCmdLineArgs( sConf.getCommandLineArgs() ); jobEntry.setSimpleBlocking( sConf.isSimpleBlocking() ); jobEntry.setSimpleLoggingInterval( sConf.getSimpleLoggingInterval() ); // advanced config jobEntry.setBlocking( aConf.isBlocking() ); jobEntry.setLoggingInterval( aConf.getLoggingInterval() ); jobEntry.setMapperClass( aConf.getMapperClass() ); jobEntry.setCombinerClass( aConf.getCombinerClass() ); jobEntry.setReducerClass( aConf.getReducerClass() ); jobEntry.setInputPath( aConf.getInputPath() ); jobEntry.setInputFormatClass( aConf.getInputFormatClass() ); jobEntry.setOutputPath( aConf.getOutputPath() ); jobEntry.setOutputKeyClass( aConf.getOutputKeyClass() ); jobEntry.setOutputValueClass( aConf.getOutputValueClass() ); jobEntry.setOutputFormatClass( aConf.getOutputFormatClass() ); jobEntry.setNamedCluster( aConf.selectedNamedCluster ); jobEntry.setNumMapTasks( aConf.getNumMapTasks() ); jobEntry.setNumReduceTasks( aConf.getNumReduceTasks() ); jobEntry.setUserDefined( userDefined ); jobEntry.setChanged(); cancel(); } public void init() throws XulDomException { if ( jobEntry != null ) { // common/simple setName( jobEntry.getName() ); setJobEntryName( jobEntry.getName() ); setHadoopJobName( jobEntry.getHadoopJobName() ); setSimple( jobEntry.isSimple() ); setJarUrl( jobEntry.getJarUrl() ); aConf.setSelectedNamedCluster( jobEntry.getNamedCluster() ); populateDriverMenuList(); setDriverClass( jobEntry.getDriverClass() ); sConf.setCommandLineArgs( jobEntry.getCmdLineArgs() ); sConf.setSimpleBlocking( jobEntry.isSimpleBlocking() ); sConf.setSimpleLoggingInterval( jobEntry.getSimpleLoggingInterval() ); // advanced config userDefined.clear(); if ( jobEntry.getUserDefined() != null ) { userDefined.addAll( jobEntry.getUserDefined() ); } VariableSpace varSpace = getVariableSpace(); ExtTextbox tempBox; tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-hadoopjob-name" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jar-url" ); tempBox.setVariableSpace( varSpace ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "command-line-arguments" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-output-key-class" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-output-value-class" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-mapper-class" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-combiner-class" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-reducer-class" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "input-path" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "output-path" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-input-format" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-output-format" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "num-map-tasks" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "num-reduce-tasks" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "logging-interval" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "simple-logging-interval" ); tempBox.setVariableSpace( varSpace ); aConf.setBlocking( jobEntry.isBlocking() ); aConf.setLoggingInterval( jobEntry.getLoggingInterval() ); aConf.setMapperClass( jobEntry.getMapperClass() ); aConf.setCombinerClass( jobEntry.getCombinerClass() ); aConf.setReducerClass( jobEntry.getReducerClass() ); aConf.setInputPath( jobEntry.getInputPath() ); aConf.setInputFormatClass( jobEntry.getInputFormatClass() ); aConf.setOutputPath( jobEntry.getOutputPath() ); aConf.setOutputKeyClass( jobEntry.getOutputKeyClass() ); aConf.setOutputValueClass( jobEntry.getOutputValueClass() ); aConf.setOutputFormatClass( jobEntry.getOutputFormatClass() ); aConf.setNumMapTasks( jobEntry.getNumMapTasks() ); aConf.setNumReduceTasks( jobEntry.getNumReduceTasks() ); } } public void setJobMeta( JobMeta jobMeta ) { this.jobMeta = jobMeta; } public void cancel() { XulDialog xulDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getElementById( "job-entry-dialog" ); Shell shell = (Shell) xulDialog.getRootObject(); if ( !shell.isDisposed() ) { WindowProperty winprop = new WindowProperty( shell ); PropsUI.getInstance().setScreen( winprop ); ( (Composite) xulDialog.getManagedObject() ).dispose(); shell.dispose(); } } public void browseJar() { XulDialog xulDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getElementById( "job-entry-dialog" ); Shell shell = (Shell) xulDialog.getRootObject(); FileDialog dialog = new FileDialog( shell, SWT.OPEN ); dialog.setFilterExtensions( new String[] { "*.jar;*.zip" } ); dialog.setFilterNames( new String[] { "Java Archives (jar)" } ); String prevName = jobEntry.environmentSubstitute( jarUrl ); String parentFolder = null; Spoon spoon = Spoon.getInstance(); try { parentFolder = KettleVFS.getFilename( KettleVFS.getInstance( spoon.getExecutionBowl() ) .getFileObject( jobEntry.environmentSubstitute( jobEntry.getFilename() ) ) .getParent() ); } catch ( Exception e ) { // not that important } if ( !Const.isEmpty( prevName ) ) { try { if ( KettleVFS.getInstance( spoon.getExecutionBowl() ).fileExists( prevName ) ) { dialog.setFilterPath( KettleVFS.getFilename( KettleVFS.getInstance( spoon.getExecutionBowl() ) .getFileObject( prevName ).getParent() ) ); } else { if ( !prevName.endsWith( ".jar" ) && !prevName.endsWith( ".zip" ) ) { prevName = "${" + Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY + "}/" + Const.trim( jarUrl ) + ".jar"; } if ( KettleVFS.getInstance( spoon.getExecutionBowl() ).fileExists( prevName ) ) { setJarUrl( prevName ); return; } } } catch ( Exception e ) { dialog.setFilterPath( parentFolder ); } } else if ( !Const.isEmpty( parentFolder ) ) { dialog.setFilterPath( parentFolder ); } String fname = dialog.open(); if ( fname != null ) { File file = new File( fname ); String name = file.getName(); String parentFolderSelection = file.getParentFile().toString(); if ( !Const.isEmpty( parentFolder ) && parentFolder.equals( parentFolderSelection ) ) { setJarUrl( "${" + Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY + "}/" + name ); } else { setJarUrl( fname ); } populateDriverMenuList(); } } public void newUserDefinedItem() { userDefined.add( new UserDefinedItem() ); } public SimpleConfiguration getSimpleConfiguration() { return sConf; } public AdvancedConfiguration getAdvancedConfiguration() { return aConf; } public AbstractModelList getUserDefined() { return userDefined; } @Override public String getName() { return "jobEntryController"; //$NON-NLS-1$ } public String getJobEntryName() { return jobEntryName; } public void setJobEntryName( String jobEntryName ) { String previousVal = this.jobEntryName; String newVal = jobEntryName; this.jobEntryName = jobEntryName; firePropertyChange( JobEntryHadoopJobExecutorController.JOB_ENTRY_NAME, previousVal, newVal ); } public String getHadoopJobName() { return hadoopJobName; } public void setHadoopJobName( String hadoopJobName ) { String previousVal = this.hadoopJobName; String newVal = hadoopJobName; this.hadoopJobName = hadoopJobName; firePropertyChange( JobEntryHadoopJobExecutorController.HADOOP_JOB_NAME, previousVal, newVal ); } public String getJarUrl() { return jarUrl; } public void setJarUrl( String jarUrl ) { String previousVal = this.jarUrl; String newVal = jarUrl; this.jarUrl = jarUrl; firePropertyChange( JobEntryHadoopJobExecutorController.JAR_URL, previousVal, newVal ); } public String getDriverClass() { return driverClass; } public void setDriverClass( String driverClass ) { String previousVal = this.driverClass; String newVal = driverClass; this.driverClass = driverClass; firePropertyChange( JobEntryHadoopJobExecutorController.DRIVER_CLASS, previousVal, newVal ); } public List getDriverClasses() { return driverClasses; } public void setDriverClasses( List driverClasses ) { List previousVal = this.driverClasses; List newVal = driverClasses; this.driverClasses = driverClasses; firePropertyChange( JobEntryHadoopJobExecutorController.DRIVER_CLASSES, previousVal, newVal ); } public boolean isSimple() { return isSimple; } public void setSimple( boolean isSimple ) { ( (XulVbox) getXulDomContainer().getDocumentRoot().getElementById( "advanced-configuration" ) ) .setVisible( !isSimple ); //$NON-NLS-1$ ( (XulVbox) getXulDomContainer().getDocumentRoot().getElementById( "simple-configuration" ) ) .setVisible( isSimple ); //$NON-NLS-1$ boolean previousVal = this.isSimple; boolean newVal = isSimple; this.isSimple = isSimple; firePropertyChange( JobEntryHadoopJobExecutorController.IS_SIMPLE, previousVal, newVal ); } public void invertSimpleBlocking() { sConf.setSimpleBlocking( !sConf.isSimpleBlocking() ); } public void invertBlocking() { aConf.setBlocking( !aConf.isBlocking() ); } public JobEntryHadoopJobExecutor getJobEntry() { return jobEntry; } public void setJobEntry( JobEntryHadoopJobExecutor jobEntry ) { this.jobEntry = jobEntry; } public List getNamedClusters() throws MetaStoreException { return namedClusterService.list( jobMeta.getMetaStore() ); } public void openErrorDialog( String title, String message ) { XulDialog errorDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getElementById( "hadoop-error-dialog" ); errorDialog.setTitle( title ); XulTextbox errorMessage = (XulTextbox) getXulDomContainer().getDocumentRoot().getElementById( "hadoop-error-message" ); errorMessage.setValue( message ); errorDialog.show(); } public void closeErrorDialog() { XulDialog errorDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getElementById( "hadoop-error-dialog" ); errorDialog.hide(); } public void editNamedCluster() { try { XulDialog xulDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getElementById( "job-entry-dialog" ); Shell shell = (Shell) xulDialog.getRootObject(); NamedCluster namedCluster; if ( aConf.isSelectedNamedCluster() ) { namedCluster = aConf.selectedNamedCluster; } else { namedCluster = namedClusterService.getClusterTemplate(); } String clusterName = ncDelegate.editNamedCluster( null, namedCluster, shell ); if ( clusterName != null ) { //cancel button on editing pressed, clusters not changed firePropertyChange( "namedClusters", namedCluster, getNamedClusters() ); selectNamedCluster( clusterName ); } } catch ( Throwable t ) { t.printStackTrace(); } } public void newNamedCluster() { try { XulDialog xulDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getElementById( "job-entry-dialog" ); Shell shell = (Shell) xulDialog.getRootObject(); String newClusterName = ncDelegate.newNamedCluster( jobMeta, null, shell ); if ( newClusterName != null ) { //cancel button on editing pressed, clusters not changed firePropertyChange( "namedClusters", null, getNamedClusters() ); selectNamedCluster( newClusterName ); } } catch ( Throwable t ) { t.printStackTrace(); } } private void selectNamedCluster( String clusterName ) throws MetaStoreException { @SuppressWarnings( "unchecked" ) XulMenuList namedClusterMenu = (XulMenuList) getXulDomContainer().getDocumentRoot() .getElementById( "named-clusters" ); for ( NamedCluster nc : getNamedClusters() ) { if ( clusterName != null && clusterName.equals( nc.getName() ) ) { namedClusterMenu.setSelectedItem( nc ); aConf.setSelectedNamedCluster( nc ); } } } public void help() { XulDialog xulDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getRootElement().getFirstChild(); Shell shell = (Shell) xulDialog.getRootObject(); PluginInterface plugin = PluginRegistry.getInstance().findPluginWithId( JobEntryPluginType.class, jobEntry.getPluginId() ); HelpUtils.openHelpDialog( shell, plugin ); } private void populateDriverMenuList() { if ( Const.isEmpty( jarUrl ) ) { return; } MapReduceService mapReduceService = null; try { mapReduceService = namedClusterServiceLocator.getService( aConf.selectedNamedCluster, MapReduceService.class ); } catch ( ClusterInitializationException e ) { jobEntry.logError( "Unable to locate map reduce service for cluster." ); } MapReduceJarInfo mapReduceJarInfo = null; try { mapReduceJarInfo = mapReduceService != null ? mapReduceService.getJarInfo( JobEntryHadoopJobExecutor.resolveJarUrl( jarUrl, getVariableSpace() ) ) : null; } catch ( Exception e ) { jobEntry.logError( "Unable to locate map reduce jar." ); } List driverClassesInJar = ( mapReduceJarInfo != null ? new ArrayList<>( mapReduceJarInfo.getClassesWithMain() ) : Collections.emptyList() ); if ( Const.isEmpty( driverClass ) ) { setDriverClasses( driverClassesInJar ); String mainClass = mapReduceJarInfo != null ? mapReduceJarInfo.getMainClass() : null; if ( mainClass != null ) { setDriverClass( mainClass ); } else if ( !driverClassesInJar.isEmpty( ) ) { setDriverClass( driverClassesInJar.get( 0 ) ); } else { setDriverClass( "" ); } } else { String saveDriverClass = driverClass; setDriverClasses( driverClassesInJar ); setDriverClass( saveDriverClass ); } } public class SimpleConfiguration extends XulEventSourceAdapter { public static final String CMD_LINE_ARGS = "commandLineArgs"; //$NON-NLS-1$ public static final String BLOCKING = "simpleBlocking"; //$NON-NLS-1$ public static final String LOGGING_INTERVAL = "simpleLoggingInterval"; //$NON-NLS-1$ private String cmdLineArgs; private boolean simpleBlocking; private String simpleLoggingInterval = "60"; public String getCommandLineArgs() { return cmdLineArgs; } public void setCommandLineArgs( String cmdLineArgs ) { String previousVal = this.cmdLineArgs; String newVal = cmdLineArgs; this.cmdLineArgs = cmdLineArgs; firePropertyChange( SimpleConfiguration.CMD_LINE_ARGS, previousVal, newVal ); } public boolean isSimpleBlocking() { return simpleBlocking; } public void setSimpleBlocking( boolean simpleBlocking ) { boolean old = this.simpleBlocking; this.simpleBlocking = simpleBlocking; firePropertyChange( SimpleConfiguration.BLOCKING, old, this.simpleBlocking ); } public String getSimpleLoggingInterval() { return simpleLoggingInterval; } public void setSimpleLoggingInterval( String simpleLoggingInterval ) { String old = this.simpleLoggingInterval; this.simpleLoggingInterval = simpleLoggingInterval; firePropertyChange( SimpleConfiguration.LOGGING_INTERVAL, old, this.simpleLoggingInterval ); } } public class AdvancedConfiguration extends XulEventSourceAdapter { public static final String OUTPUT_KEY_CLASS = "outputKeyClass"; //$NON-NLS-1$ public static final String OUTPUT_VALUE_CLASS = "outputValueClass"; //$NON-NLS-1$ public static final String MAPPER_CLASS = "mapperClass"; //$NON-NLS-1$ public static final String COMBINER_CLASS = "combinerClass"; //$NON-NLS-1$ public static final String REDUCER_CLASS = "reducerClass"; //$NON-NLS-1$ public static final String INPUT_FORMAT_CLASS = "inputFormatClass"; //$NON-NLS-1$ public static final String OUTPUT_FORMAT_CLASS = "outputFormatClass"; //$NON-NLS-1$ public static final String INPUT_PATH = "inputPath"; //$NON-NLS-1$ public static final String OUTPUT_PATH = "outputPath"; //$NON-NLS-1$ public static final String BLOCKING = "blocking"; //$NON-NLS-1$ public static final String LOGGING_INTERVAL = "loggingInterval"; //$NON-NLS-1$ public static final String HDFS_HOSTNAME = "hdfsHostname"; //$NON-NLS-1$ public static final String HDFS_PORT = "hdfsPort"; //$NON-NLS-1$ public static final String JOB_TRACKER_HOSTNAME = "jobTrackerHostname"; //$NON-NLS-1$ public static final String JOB_TRACKER_PORT = "jobTrackerPort"; //$NON-NLS-1$ public static final String NUM_MAP_TASKS = "numMapTasks"; //$NON-NLS-1$ public static final String NUM_REDUCE_TASKS = "numReduceTasks"; //$NON-NLS-1$ private String outputKeyClass; private String outputValueClass; private String mapperClass; private String combinerClass; private String reducerClass; private String inputFormatClass; private String outputFormatClass; private NamedCluster selectedNamedCluster; private String inputPath; private String outputPath; private String numMapTasks = "1"; private String numReduceTasks = "1"; private boolean blocking; private String loggingInterval = "60"; // 60 seconds public String getOutputKeyClass() { return outputKeyClass; } public void setOutputKeyClass( String outputKeyClass ) { String previousVal = this.outputKeyClass; String newVal = outputKeyClass; this.outputKeyClass = outputKeyClass; firePropertyChange( AdvancedConfiguration.OUTPUT_KEY_CLASS, previousVal, newVal ); } public String getOutputValueClass() { return outputValueClass; } public void setOutputValueClass( String outputValueClass ) { String previousVal = this.outputValueClass; String newVal = outputValueClass; this.outputValueClass = outputValueClass; firePropertyChange( AdvancedConfiguration.OUTPUT_VALUE_CLASS, previousVal, newVal ); } public String getMapperClass() { return mapperClass; } public void setMapperClass( String mapperClass ) { String previousVal = this.mapperClass; String newVal = mapperClass; this.mapperClass = mapperClass; firePropertyChange( AdvancedConfiguration.MAPPER_CLASS, previousVal, newVal ); } public String getCombinerClass() { return combinerClass; } public void setCombinerClass( String combinerClass ) { String previousVal = this.combinerClass; String newVal = combinerClass; this.combinerClass = combinerClass; firePropertyChange( AdvancedConfiguration.COMBINER_CLASS, previousVal, newVal ); } public String getReducerClass() { return reducerClass; } public void setReducerClass( String reducerClass ) { String previousVal = this.reducerClass; String newVal = reducerClass; this.reducerClass = reducerClass; firePropertyChange( AdvancedConfiguration.REDUCER_CLASS, previousVal, newVal ); } public String getInputFormatClass() { return inputFormatClass; } public void setInputFormatClass( String inputFormatClass ) { String previousVal = this.inputFormatClass; String newVal = inputFormatClass; this.inputFormatClass = inputFormatClass; firePropertyChange( AdvancedConfiguration.INPUT_FORMAT_CLASS, previousVal, newVal ); } public String getOutputFormatClass() { return outputFormatClass; } public void setOutputFormatClass( String outputFormatClass ) { String previousVal = this.outputFormatClass; String newVal = outputFormatClass; this.outputFormatClass = outputFormatClass; firePropertyChange( AdvancedConfiguration.OUTPUT_FORMAT_CLASS, previousVal, newVal ); } public String getInputPath() { return inputPath; } public void setInputPath( String inputPath ) { String previousVal = this.inputPath; String newVal = inputPath; this.inputPath = inputPath; firePropertyChange( AdvancedConfiguration.INPUT_PATH, previousVal, newVal ); } public String getOutputPath() { return outputPath; } public void setOutputPath( String outputPath ) { String previousVal = this.outputPath; String newVal = outputPath; this.outputPath = outputPath; firePropertyChange( AdvancedConfiguration.OUTPUT_PATH, previousVal, newVal ); } public boolean isBlocking() { return blocking; } public void setBlocking( boolean blocking ) { boolean previousVal = this.blocking; boolean newVal = blocking; this.blocking = blocking; firePropertyChange( AdvancedConfiguration.BLOCKING, previousVal, newVal ); } public String getLoggingInterval() { return loggingInterval; } public void setLoggingInterval( String loggingInterval ) { String previousVal = this.loggingInterval; String newVal = loggingInterval; this.loggingInterval = loggingInterval; firePropertyChange( AdvancedConfiguration.LOGGING_INTERVAL, previousVal, newVal ); } public String getNumMapTasks() { return numMapTasks; } public void setNumMapTasks( String numMapTasks ) { String previousVal = this.numMapTasks; String newVal = numMapTasks; this.numMapTasks = numMapTasks; firePropertyChange( AdvancedConfiguration.NUM_MAP_TASKS, previousVal, newVal ); } public String getNumReduceTasks() { return numReduceTasks; } public void setNumReduceTasks( String numReduceTasks ) { String previousVal = this.numReduceTasks; String newVal = numReduceTasks; this.numReduceTasks = numReduceTasks; firePropertyChange( AdvancedConfiguration.NUM_REDUCE_TASKS, previousVal, newVal ); } public boolean isSelectedNamedCluster() { return this.selectedNamedCluster != null; } public void setSelectedNamedCluster( NamedCluster namedCluster ) { this.selectedNamedCluster = namedCluster; } } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/ui/entry/hadoop/JobEntryHadoopJobExecutorDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.ui.entry.hadoop; import org.dom4j.DocumentException; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.hadoop.JobEntryHadoopJobExecutor; import org.pentaho.big.data.plugins.common.ui.HadoopClusterDelegateImpl; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entry.JobEntryDialogInterface; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.Repository; import org.pentaho.di.ui.job.entry.JobEntryDialog; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulException; import org.pentaho.ui.xul.XulRunner; import org.pentaho.ui.xul.binding.Binding.Type; import org.pentaho.ui.xul.binding.BindingConvertor; import org.pentaho.ui.xul.binding.BindingFactory; import org.pentaho.ui.xul.binding.DefaultBindingFactory; import org.pentaho.ui.xul.components.XulMenuList; import org.pentaho.ui.xul.components.XulRadio; import org.pentaho.ui.xul.components.XulTextbox; import org.pentaho.ui.xul.containers.XulDialog; import org.pentaho.ui.xul.containers.XulTree; import org.pentaho.ui.xul.containers.XulVbox; import org.pentaho.ui.xul.swt.SwtXulLoader; import org.pentaho.ui.xul.swt.SwtXulRunner; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.ResourceBundle; @PluginDialog( id = "HadoopJobExecutorPlugin", image = "HDE.svg", pluginType = PluginDialog.PluginType.JOBENTRY, documentationUrl = "https://pentaho-community.atlassian.net/wiki/display/EAI/Hadoop+Job+Executor" ) public class JobEntryHadoopJobExecutorDialog extends JobEntryDialog implements JobEntryDialogInterface { private static final Class CLZ = JobEntryHadoopJobExecutor.class; private static final Logger logger = LogManager.getLogger( JobEntryHadoopJobExecutorDialog.class ); private final NamedClusterService namedClusterService; private final JobEntryHadoopJobExecutorController controller; private JobEntryHadoopJobExecutor jobEntry; private XulDomContainer container; private BindingFactory bf; private ResourceBundle bundle = new ResourceBundle() { @Override public Enumeration getKeys() { return null; } @Override protected Object handleGetObject( String key ) { return BaseMessages.getString( CLZ, key ); } }; public JobEntryHadoopJobExecutorDialog( Shell parent, JobEntryInterface jobEntry, Repository rep, JobMeta jobMeta ) throws XulException, DocumentException, Throwable { super( parent, jobEntry, rep, jobMeta ); this.jobEntry = (JobEntryHadoopJobExecutor) jobEntry; this.namedClusterService = this.jobEntry.getNamedClusterService(); controller = new JobEntryHadoopJobExecutorController( new HadoopClusterDelegateImpl( Spoon.getInstance(), namedClusterService, this.jobEntry.getRuntimeTestActionService(), this.jobEntry.getRuntimeTester() ), namedClusterService, this.jobEntry.getNamedClusterServiceLocator() ); SwtXulLoader swtXulLoader = new SwtXulLoader(); swtXulLoader.registerClassLoader( getClass().getClassLoader() ); swtXulLoader.register( "VARIABLETEXTBOX", "org.pentaho.di.ui.core.database.dialog.tags.ExtTextbox" ); swtXulLoader.register( "VARIABLEMENULIST", "org.pentaho.di.ui.core.database.dialog.tags.ExtMenuList" ); swtXulLoader.setOuterContext( shell ); container = swtXulLoader .loadXul( "org/pentaho/big/data/kettle/plugins/mapreduce/ui/entry/JobEntryHadoopJobExecutorDialog.xul", bundle ); //$NON-NLS-1$ final XulRunner runner = new SwtXulRunner(); runner.addContainer( container ); container.addEventHandler( controller ); bf = new DefaultBindingFactory(); bf.setDocument( container.getDocumentRoot() ); bf.setBindingType( Type.BI_DIRECTIONAL ); bf.createBinding( "jobentry-name", "value", controller, JobEntryHadoopJobExecutorController.JOB_ENTRY_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ bf.createBinding( "jobentry-hadoopjob-name", "value", controller, JobEntryHadoopJobExecutorController.HADOOP_JOB_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ bf.createBinding( "jar-url", "value", controller, JobEntryHadoopJobExecutorController.JAR_URL ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ bf.createBinding( "driver-class", "value", controller, JobEntryHadoopJobExecutorController.DRIVER_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ bf.createBinding( "driver-class", "selectedItem", controller, JobEntryHadoopJobExecutorController.DRIVER_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ bf.createBinding( "driver-class", "elements", controller, JobEntryHadoopJobExecutorController.DRIVER_CLASSES ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ bf.createBinding( "command-line-arguments", "value", controller.getSimpleConfiguration(), JobEntryHadoopJobExecutorController.SimpleConfiguration.CMD_LINE_ARGS ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ bf.createBinding( "classes-output-key-class", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.OUTPUT_KEY_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-output-value-class", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.OUTPUT_VALUE_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-mapper-class", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.MAPPER_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-combiner-class", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.COMBINER_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-reducer-class", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.REDUCER_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-input-format", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.INPUT_FORMAT_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-output-format", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.OUTPUT_FORMAT_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ // bf.createBinding("num-map-tasks", "value", controller.getAdvancedConfiguration(), // AdvancedConfiguration.NUM_MAP_TASKS, bindingConverter); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "num-map-tasks", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.NUM_MAP_TASKS ); //$NON-NLS-1$ //$NON-NLS-2$ // bf.createBinding("num-reduce-tasks", "value", controller.getAdvancedConfiguration(), // AdvancedConfiguration.NUM_REDUCE_TASKS, bindingConverter); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "num-reduce-tasks", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.NUM_REDUCE_TASKS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "simple-blocking", "selected", controller.getSimpleConfiguration(), JobEntryHadoopJobExecutorController.SimpleConfiguration.BLOCKING ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "blocking", "selected", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.BLOCKING ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "simple-logging-interval", "value", controller.getSimpleConfiguration(), JobEntryHadoopJobExecutorController.SimpleConfiguration.LOGGING_INTERVAL ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "logging-interval", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.LOGGING_INTERVAL ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "input-path", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.INPUT_PATH ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "output-path", "value", controller.getAdvancedConfiguration(), JobEntryHadoopJobExecutorController.AdvancedConfiguration.OUTPUT_PATH ); //$NON-NLS-1$ //$NON-NLS-2$ ( (XulRadio) container.getDocumentRoot().getElementById( "simpleRadioButton" ) ).setSelected( this.jobEntry .isSimple() ); //$NON-NLS-1$ ( (XulRadio) container.getDocumentRoot().getElementById( "advancedRadioButton" ) ).setSelected( !this.jobEntry .isSimple() ); //$NON-NLS-1$ ( (XulVbox) container.getDocumentRoot().getElementById( "advanced-configuration" ) ).setVisible( !this.jobEntry .isSimple() ); //$NON-NLS-1$ XulTextbox simpleLoggingInterval = (XulTextbox) container.getDocumentRoot().getElementById( "simple-logging-interval" ); simpleLoggingInterval.setValue( "" + controller.getSimpleConfiguration().getSimpleLoggingInterval() ); XulTextbox loggingInterval = (XulTextbox) container.getDocumentRoot().getElementById( "logging-interval" ); loggingInterval.setValue( controller.getAdvancedConfiguration().getLoggingInterval() ); XulTextbox mapTasks = (XulTextbox) container.getDocumentRoot().getElementById( "num-map-tasks" ); mapTasks.setValue( controller.getAdvancedConfiguration().getNumMapTasks() ); XulTextbox reduceTasks = (XulTextbox) container.getDocumentRoot().getElementById( "num-reduce-tasks" ); reduceTasks.setValue( controller.getAdvancedConfiguration().getNumReduceTasks() ); XulTree variablesTree = (XulTree) container.getDocumentRoot().getElementById( "fields-table" ); //$NON-NLS-1$ bf.setBindingType( Type.ONE_WAY ); bf.createBinding( controller.getUserDefined(), "children", variablesTree, "elements" ); //$NON-NLS-1$//$NON-NLS-2$ bf.setBindingType( Type.BI_DIRECTIONAL ); controller.setJobMeta( jobMeta ); controller.setJobEntry( (JobEntryHadoopJobExecutor) jobEntry ); controller.init(); bf.setBindingType( Type.ONE_WAY ); bf.createBinding( controller, "namedClusters", "named-clusters", "elements" ).fireSourceChanged(); bf.setBindingType( Type.BI_DIRECTIONAL ); bf.createBinding( "named-clusters", "selectedIndex", controller.getAdvancedConfiguration(), "selectedNamedCluster", new BindingConvertor() { public NamedCluster sourceToTarget( final Integer index ) { List clusters = new ArrayList<>(); try { clusters = controller.getNamedClusters(); } catch ( MetaStoreException e ) { logger.error( e.getMessage(), e ); } if ( index == -1 || clusters.isEmpty() ) { return null; } return clusters.get( index ); } public Integer targetToSource( final NamedCluster value ) { return null; } } ).fireSourceChanged(); selectNamedCluster(); } private void selectNamedCluster() { @SuppressWarnings( "unchecked" ) XulMenuList namedClusterMenu = (XulMenuList) container.getDocumentRoot().getElementById( "named-clusters" ); //$NON-NLS-1$ NamedCluster namedCluster = jobEntry.getNamedCluster(); if ( namedCluster != null && isKnownNamedCluster( namedCluster, controller ) ) { namedClusterMenu.setSelectedItem( namedCluster ); controller.getAdvancedConfiguration().setSelectedNamedCluster( namedCluster ); } } public JobEntryInterface open() { XulDialog dialog = (XulDialog) container.getDocumentRoot().getElementById( "job-entry-dialog" ); //$NON-NLS-1$ dialog.show(); return jobEntry; } private boolean isKnownNamedCluster( NamedCluster jobNameCluster, JobEntryHadoopJobExecutorController controller ) { boolean result = false; if ( jobNameCluster != null ) { String jncName = jobNameCluster.getName(); List nClusters = null; try { nClusters = controller.getNamedClusters(); } catch ( MetaStoreException e ) { logger.error( e.getMessage(), e ); } if ( jncName != null && nClusters != null ) { for ( NamedCluster nc : nClusters ) { if ( jncName != null && jncName.equals( nc.getName() ) ) { result = true; break; } } } } return result; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/ui/entry/pmr/JobEntryHadoopTransJobExecutorController.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.ui.entry.pmr; import org.apache.commons.lang.StringUtils; import org.eclipse.swt.SWT; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.hadoop.shim.api.cluster.NamedClusterService; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.pmr.JobEntryHadoopTransJobExecutor; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.UserDefinedItem; import org.pentaho.big.data.plugins.common.ui.HadoopClusterDelegateImpl; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.plugins.JobEntryPluginType; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.database.dialog.tags.ExtTextbox; import org.pentaho.di.ui.core.gui.WindowProperty; import org.pentaho.di.ui.repository.dialog.SelectObjectDialog; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.util.HelpUtils; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.ui.xul.components.XulTextbox; import org.pentaho.ui.xul.containers.XulDialog; import org.pentaho.ui.xul.impl.AbstractXulEventHandler; import org.pentaho.ui.xul.jface.tags.JfaceMenuList; import org.pentaho.ui.xul.util.AbstractModelList; import com.google.common.annotations.VisibleForTesting; import java.io.File; import java.util.List; public class JobEntryHadoopTransJobExecutorController extends AbstractXulEventHandler { private static final Class PKG = JobEntryHadoopTransJobExecutor.class; public static final String JOB_ENTRY_NAME = "jobEntryName"; //$NON-NLS-1$ public static final String HADOOP_JOB_NAME = "hadoopJobName"; //$NON-NLS-1$ public static final String MAP_TRANS = "mapTrans"; //$NON-NLS-1$ public static final String COMBINER_TRANS = "combinerTrans"; //$NON-NLS-1$ public static final String REDUCE_TRANS = "reduceTrans"; //$NON-NLS-1$ public static final String MAP_TRANS_INPUT_STEP_NAME = "mapTransInputStepName"; //$NON-NLS-1$ public static final String MAP_TRANS_OUTPUT_STEP_NAME = "mapTransOutputStepName"; //$NON-NLS-1$ public static final String COMBINER_TRANS_INPUT_STEP_NAME = "combinerTransInputStepName"; //$NON-NLS-1$ public static final String COMBINER_TRANS_OUTPUT_STEP_NAME = "combinerTransOutputStepName"; //$NON-NLS-1$ public static final String COMBINING_SINGLE_THREADED = "combiningSingleThreaded"; //$NON-NLS-1$ public static final String REDUCE_TRANS_INPUT_STEP_NAME = "reduceTransInputStepName"; //$NON-NLS-1$ public static final String REDUCE_TRANS_OUTPUT_STEP_NAME = "reduceTransOutputStepName"; //$NON-NLS-1$ public static final String REDUCING_SINGLE_THREADED = "reducingSingleThreaded"; //$NON-NLS-1$ public static final String SUPPRESS_OUTPUT_MAP_KEY = "suppressOutputOfMapKey"; public static final String SUPPRESS_OUTPUT_MAP_VALUE = "suppressOutputOfMapValue"; public static final String SUPPRESS_OUTPUT_KEY = "suppressOutputOfKey"; public static final String SUPPRESS_OUTPUT_VALUE = "suppressOutputOfValue"; public static final String MAP_OUTPUT_KEY_CLASS = "mapOutputKeyClass"; //$NON-NLS-1$ public static final String MAP_OUTPUT_VALUE_CLASS = "mapOutputValueClass"; //$NON-NLS-1$ public static final String OUTPUT_KEY_CLASS = "outputKeyClass"; //$NON-NLS-1$ public static final String OUTPUT_VALUE_CLASS = "outputValueClass"; //$NON-NLS-1$ public static final String INPUT_FORMAT_CLASS = "inputFormatClass"; //$NON-NLS-1$ public static final String OUTPUT_FORMAT_CLASS = "outputFormatClass"; //$NON-NLS-1$ public static final String INPUT_PATH = "inputPath"; //$NON-NLS-1$ public static final String OUTPUT_PATH = "outputPath"; //$NON-NLS-1$ public static final String CLEAN_OUTPUT_PATH = "cleanOutputPath"; //$NON-NLS-1$ public static final String BLOCKING = "blocking"; //$NON-NLS-1$ public static final String LOGGING_INTERVAL = "loggingInterval"; //$NON-NLS-1$ public static final String HDFS_HOSTNAME = "hdfsHostname"; //$NON-NLS-1$ public static final String HDFS_PORT = "hdfsPort"; //$NON-NLS-1$ public static final String JOB_TRACKER_HOSTNAME = "jobTrackerHostname"; //$NON-NLS-1$ public static final String JOB_TRACKER_PORT = "jobTrackerPort"; //$NON-NLS-1$ public static final String NUM_MAP_TASKS = "numMapTasks"; //$NON-NLS-1$ public static final String NUM_REDUCE_TASKS = "numReduceTasks"; //$NON-NLS-1$ public static final String USER_DEFINED = "userDefined"; //$NON-NLS-1$ public static final String LOCAL = "local"; public static final String REPOSITORY = "repository"; private final NamedClusterService namedClusterService; private final HadoopClusterDelegateImpl ncDelegate; private String jobEntryName; private String hadoopJobName; private boolean suppressOutputMapKey; private boolean suppressOutputMapValue; private boolean suppressOutputKey; private boolean suppressOutputValue; private String inputFormatClass; private String outputFormatClass; private String inputPath; private String outputPath; private boolean cleanOutputPath; private String numMapTasks = "1"; private String numReduceTasks = "1"; private boolean blocking; private String loggingInterval = "60"; private String mapTrans = ""; private String combinerTrans = ""; private boolean combiningSingleThreaded; private String reduceTrans = ""; private boolean reducingSingleThreaded; private String mapTransInputStepName = ""; private String mapTransOutputStepName = ""; private String combinerTransInputStepName = ""; private String combinerTransOutputStepName = ""; private String reduceTransInputStepName = ""; private String reduceTransOutputStepName = ""; private static String storageType; private List namedClusters; protected Shell shell; private Repository rep; private JobMeta jobMeta; private NamedCluster selectedNamedCluster; private JobEntryHadoopTransJobExecutor jobEntry; private AbstractModelList userDefined = new AbstractModelList(); public JobEntryHadoopTransJobExecutorController( HadoopClusterDelegateImpl ncDelegate, NamedClusterService namedClusterService ) throws Throwable { this.ncDelegate = ncDelegate; this.namedClusterService = namedClusterService; } protected VariableSpace getVariableSpace() { if ( Spoon.getInstance().getActiveTransformation() != null ) { return Spoon.getInstance().getActiveTransformation(); } else if ( Spoon.getInstance().getActiveJob() != null ) { return Spoon.getInstance().getActiveJob(); } else { return new Variables(); } } public void accept() { ExtTextbox tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-hadoopjob-name" ); this.hadoopJobName = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-map-transformation" ); this.mapTrans = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-map-input-stepname" ); this.mapTransInputStepName = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-map-output-stepname" ); this.mapTransOutputStepName = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-combiner-transformation" ); this.combinerTrans = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-combiner-input-stepname" ); this.combinerTransInputStepName = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-combiner-output-stepname" ); this.combinerTransOutputStepName = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-reduce-transformation" ); this.reduceTrans = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-reduce-input-stepname" ); this.reduceTransInputStepName = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-reduce-output-stepname" ); this.reduceTransOutputStepName = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "input-path" ); this.inputPath = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "output-path" ); this.outputPath = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-input-format" ); this.inputFormatClass = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-output-format" ); this.outputFormatClass = ( (Text) tempBox.getTextControl() ).getText(); JfaceMenuList ncBox = (JfaceMenuList) getXulDomContainer().getDocumentRoot().getElementById( "named-clusters" ); try { selectedNamedCluster = namedClusterService.read( ncBox.getSelectedItem(), jobMeta.getMetaStore() ); } catch ( MetaStoreException e ) { openErrorDialog( BaseMessages.getString( PKG, "Dialog.Error" ), e.getMessage() ); } tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "num-map-tasks" ); this.numMapTasks = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "num-reduce-tasks" ); this.numReduceTasks = ( (Text) tempBox.getTextControl() ).getText(); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "logging-interval" ); this.loggingInterval = ( (Text) tempBox.getTextControl() ).getText(); String validationErrors = ""; if ( StringUtil.isEmpty( jobEntryName ) ) { validationErrors += BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.JobEntryName.Error" ) + "\n"; } if ( selectedNamedCluster == null ) { validationErrors += BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.NamedClusterNotProvided.Error" ) + "\n"; } if ( StringUtil.isEmpty( hadoopJobName ) ) { validationErrors += BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.HadoopJobName.Error" ) + "\n"; } if ( !StringUtils.isEmpty( numReduceTasks ) ) { String reduceS = getVariableSpace().environmentSubstitute( numReduceTasks ); try { int numR = Integer.parseInt( reduceS ); if ( numR < 0 ) { validationErrors += BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.NumReduceTasks.Error" ) + "\n"; } } catch ( NumberFormatException e ) { // omit } } if ( !StringUtils.isEmpty( numMapTasks ) ) { String mapS = getVariableSpace().environmentSubstitute( numMapTasks ); try { int numM = Integer.parseInt( mapS ); if ( numM < 0 ) { validationErrors += BaseMessages.getString( PKG, "JobEntryHadoopTransJobExecutor.NumMapTasks.Error" ) + "\n"; } } catch ( NumberFormatException e ) { // omit } } if ( !StringUtil.isEmpty( validationErrors ) ) { openErrorDialog( BaseMessages.getString( PKG, "Dialog.Error" ), validationErrors ); // show validation errors dialog return; } // common/simple jobEntry.setName( jobEntryName ); jobEntry.setHadoopJobName( hadoopJobName ); jobEntry.setMapTrans( mapTrans ); jobEntry.setMapInputStepName( mapTransInputStepName ); jobEntry.setMapOutputStepName( mapTransOutputStepName ); jobEntry.setCombinerTrans( combinerTrans ); jobEntry.setCombinerInputStepName( combinerTransInputStepName ); jobEntry.setCombinerOutputStepName( combinerTransOutputStepName ); jobEntry.setCombiningSingleThreaded( combiningSingleThreaded ); jobEntry.setReduceTrans( reduceTrans ); jobEntry.setReduceInputStepName( reduceTransInputStepName ); jobEntry.setReduceOutputStepName( reduceTransOutputStepName ); jobEntry.setReducingSingleThreaded( reducingSingleThreaded ); // advanced config jobEntry.setBlocking( isBlocking() ); jobEntry.setLoggingInterval( loggingInterval ); jobEntry.setInputPath( getInputPath() ); jobEntry.setInputFormatClass( getInputFormatClass() ); jobEntry.setOutputPath( getOutputPath() ); jobEntry.setCleanOutputPath( isCleanOutputPath() ); jobEntry.setSuppressOutputOfMapKey( isSuppressOutputOfMapKey() ); jobEntry.setSuppressOutputOfMapValue( isSuppressOutputOfMapValue() ); jobEntry.setSuppressOutputOfKey( isSuppressOutputOfKey() ); jobEntry.setSuppressOutputOfValue( isSuppressOutputOfValue() ); jobEntry.setOutputFormatClass( getOutputFormatClass() ); jobEntry.setNamedCluster( selectedNamedCluster ); jobEntry.setNumMapTasks( getNumMapTasks() ); jobEntry.setNumReduceTasks( getNumReduceTasks() ); jobEntry.setUserDefined( userDefined ); jobEntry.setChanged(); cancel(); } @SuppressWarnings( { "rawtypes" } ) public void init() throws Throwable { if ( jobEntry != null ) { // common/simple setName( jobEntry.getName() ); setJobEntryName( jobEntry.getName() ); setHadoopJobName( jobEntry.getHadoopJobName() ); // set variables VariableSpace varSpace = getVariableSpace(); ExtTextbox tempBox; tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-hadoopjob-name" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-map-transformation" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-map-input-stepname" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-map-output-stepname" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-combiner-transformation" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-combiner-input-stepname" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-combiner-output-stepname" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-reduce-transformation" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-reduce-input-stepname" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "jobentry-reduce-output-stepname" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "input-path" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "output-path" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-input-format" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "classes-output-format" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "num-map-tasks" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "num-reduce-tasks" ); tempBox.setVariableSpace( varSpace ); tempBox = (ExtTextbox) getXulDomContainer().getDocumentRoot().getElementById( "logging-interval" ); tempBox.setVariableSpace( varSpace ); setCombinerTransInputStepName( jobEntry.getCombinerInputStepName() ); setCombinerTransOutputStepName( jobEntry.getCombinerOutputStepName() ); setCombiningSingleThreaded( jobEntry.isCombiningSingleThreaded() ); // Load the map transformation into the UI if ( jobEntry.getMapTrans() != null || rep == null ) { setMapTrans( jobEntry.getMapTrans() ); } else if ( jobEntry.getMapRepositoryReference() != null ) { // Load the repository directory and file for displaying to the user try { TransMeta transMeta = rep.loadTransformation( jobEntry.getMapRepositoryReference(), null ); if ( transMeta != null && transMeta.getRepositoryDirectory() != null ) { setMapTrans( buildRepositoryPath( transMeta.getRepositoryDirectory().getPath(), transMeta.getName() ) ); } } catch ( KettleException e ) { // The transformation cannot be loaded from the repository setMapTrans( null ); } } else { setMapTrans( buildRepositoryPath( jobEntry.getMapRepositoryDir(), jobEntry.getMapRepositoryFile() ) ); } setMapTransInputStepName( jobEntry.getMapInputStepName() ); setMapTransOutputStepName( jobEntry.getMapOutputStepName() ); // Load the combiner transformation into the UI if ( jobEntry.getCombinerTrans() != null || rep == null ) { setCombinerTrans( jobEntry.getCombinerTrans() ); } else if ( jobEntry.getCombinerRepositoryReference() != null ) { // Load the repository directory and file for displaying to the user try { TransMeta transMeta = rep.loadTransformation( jobEntry.getCombinerRepositoryReference(), null ); if ( transMeta != null && transMeta.getRepositoryDirectory() != null ) { setCombinerTrans( buildRepositoryPath( transMeta.getRepositoryDirectory().getPath(), transMeta.getName() ) ); } } catch ( KettleException e ) { // The transformation cannot be loaded from the repository setCombinerTrans( null ); } } else { setCombinerTrans( buildRepositoryPath( jobEntry.getCombinerRepositoryDir(), jobEntry.getCombinerRepositoryFile() ) ); } // Load the reduce transformation into the UI if ( jobEntry.getReduceTrans() != null || rep == null ) { setReduceTrans( jobEntry.getReduceTrans() ); } else if ( jobEntry.getReduceRepositoryReference() != null ) { // Load the repository directory and file for displaying to the user try { TransMeta transMeta = rep.loadTransformation( jobEntry.getReduceRepositoryReference(), null ); if ( transMeta != null && transMeta.getRepositoryDirectory() != null ) { setReduceTrans( buildRepositoryPath( transMeta.getRepositoryDirectory().getPath(), transMeta.getName() ) ); } } catch ( KettleException e ) { // The transformation cannot be loaded from the repository setReduceTrans( null ); } } else { setReduceTrans( buildRepositoryPath( jobEntry.getReduceRepositoryDir(), jobEntry.getReduceRepositoryFile() ) ); } setReduceTransInputStepName( jobEntry.getReduceInputStepName() ); setReduceTransOutputStepName( jobEntry.getReduceOutputStepName() ); setReducingSingleThreaded( jobEntry.isReducingSingleThreaded() ); userDefined.clear(); if ( jobEntry.getUserDefined() != null ) { userDefined.addAll( jobEntry.getUserDefined() ); } setBlocking( jobEntry.isBlocking() ); setLoggingInterval( jobEntry.getLoggingInterval() ); setInputPath( jobEntry.getInputPath() ); setInputFormatClass( jobEntry.getInputFormatClass() ); setOutputPath( jobEntry.getOutputPath() ); setCleanOutputPath( jobEntry.isCleanOutputPath() ); setSuppressOutputOfMapKey( jobEntry.getSuppressOutputOfMapKey() ); setSuppressOutputOfMapValue( jobEntry.getSuppressOutputOfMapValue() ); setSuppressOutputOfKey( jobEntry.getSuppressOutputOfKey() ); setSuppressOutputOfValue( jobEntry.getSuppressOutputOfValue() ); setOutputFormatClass( jobEntry.getOutputFormatClass() ); selectedNamedCluster = jobEntry.getNamedCluster(); setNumMapTasks( jobEntry.getNumMapTasks() ); setNumReduceTasks( jobEntry.getNumReduceTasks() ); if ( Spoon.getInstance().getRepository() != null ) { storageType = REPOSITORY; } else { storageType = LOCAL; } } } public void setShell( Shell shell ) { this.shell = shell; } public void closeErrorDialog() { XulDialog errorDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getElementById( "hadoop-error-dialog" ); errorDialog.hide(); } public void setRepository( Repository rep ) { this.rep = rep; } public void setJobMeta( JobMeta jobMeta ) { this.jobMeta = jobMeta; } public void cancel() { XulDialog xulDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getElementById( "job-entry-dialog" ); Shell shell = (Shell) xulDialog.getRootObject(); if ( !shell.isDisposed() ) { WindowProperty winprop = new WindowProperty( shell ); PropsUI.getInstance().setScreen( winprop ); ( (Composite) xulDialog.getManagedObject() ).dispose(); shell.dispose(); } } private interface StringResultSetter { public void set( String val ); } private interface ObjectIdResultSetter { public void set( ObjectId val ); } public void mapTransBrowse() { if ( storageType.equalsIgnoreCase( LOCAL ) ) { //$NON-NLS-1$ browseLocalFilesystem( JobEntryHadoopTransJobExecutorController.this::setMapTrans, mapTrans ); } else if ( storageType.equalsIgnoreCase( REPOSITORY ) ) { //$NON-NLS-1$ browseRepository( JobEntryHadoopTransJobExecutorController.this::setMapTrans ); } } public void combinerTransBrowse() { if ( storageType.equalsIgnoreCase( LOCAL ) ) { //$NON-NLS-1$ browseLocalFilesystem( JobEntryHadoopTransJobExecutorController.this::setCombinerTrans, mapTrans ); } else if ( storageType.equalsIgnoreCase( REPOSITORY ) ) { //$NON-NLS-1$ browseRepository( JobEntryHadoopTransJobExecutorController.this::setCombinerTrans ); } } public void reduceTransBrowse() { if ( storageType.equalsIgnoreCase( LOCAL ) ) { //$NON-NLS-1$ browseLocalFilesystem( JobEntryHadoopTransJobExecutorController.this::setReduceTrans, mapTrans ); } else if ( storageType.equalsIgnoreCase( REPOSITORY ) ) { //$NON-NLS-1$ browseRepository( JobEntryHadoopTransJobExecutorController.this::setReduceTrans ); } } public void browseLocalFilesystem( StringResultSetter setter, String originalTransformationName ) { Shell shell = getJobEntryDialog(); FileDialog dialog = new FileDialog( shell, SWT.OPEN ); dialog.setFilterExtensions( Const.STRING_TRANS_FILTER_EXT ); dialog.setFilterNames( Const.getTransformationFilterNames() ); String prevName = jobEntry.environmentSubstitute( originalTransformationName ); String parentFolder = null; Spoon spoon = Spoon.getInstance(); try { parentFolder = KettleVFS.getFilename( KettleVFS.getInstance( spoon.getExecutionBowl() ) .getFileObject( jobEntry.environmentSubstitute( jobEntry.getFilename() ) ).getParent() ); } catch ( Exception e ) { // not that important } if ( !StringUtils.isEmpty( prevName ) ) { try { if ( KettleVFS.getInstance( spoon.getExecutionBowl() ).fileExists( prevName ) ) { dialog.setFilterPath( KettleVFS.getFilename( KettleVFS.getInstance( spoon.getExecutionBowl() ) .getFileObject( prevName ).getParent() ) ); } else { if ( !prevName.endsWith( ".ktr" ) ) { prevName = "${" + Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY + "}/" + Const.trim( originalTransformationName ) + ".ktr"; } if ( KettleVFS.getInstance( spoon.getExecutionBowl() ).fileExists( prevName ) ) { setter.set( prevName ); return; } } } catch ( Exception e ) { dialog.setFilterPath( parentFolder ); } } else if ( !StringUtils.isEmpty( parentFolder ) ) { dialog.setFilterPath( parentFolder ); } String fname = dialog.open(); if ( fname != null ) { File file = new File( fname ); String name = file.getName(); String parentFolderSelection = file.getParentFile().toString(); if ( !StringUtils.isEmpty( parentFolder ) && parentFolder.equals( parentFolderSelection ) ) { setter.set( "${" + Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY + "}/" + name ); } else { setter.set( fname ); } } } private void browseRepository( StringResultSetter transSetter ) { if ( rep != null ) { Shell shell = getJobEntryDialog(); SelectObjectDialog sod = new SelectObjectDialog( shell, rep, true, false ); String transname = sod.open(); if ( transname != null ) { if ( transSetter != null ) { transSetter.set( buildRepositoryPath( sod.getDirectory().getPath(), sod.getObjectName() ) ); } } } } /** * This method exists for consistency * * @param dir * Null is unacceptable input, a blank string will be returned * @param file * Null is unacceptable input, a blank string will be returned * @return */ private String buildRepositoryPath( String dir, String file ) { if ( dir == null || file == null ) { return ""; } if ( dir.endsWith( "/" ) ) { return dir + file; } return dir + "/" + file; } public void newUserDefinedItem() { userDefined.add( new UserDefinedItem() ); } public AbstractModelList getUserDefined() { return userDefined; } @Override public String getName() { return "jobEntryController"; //$NON-NLS-1$ } public String getJobEntryName() { return jobEntryName; } public void setJobEntryName( String jobEntryName ) { String previousVal = this.jobEntryName; String newVal = jobEntryName; this.jobEntryName = jobEntryName; firePropertyChange( JobEntryHadoopTransJobExecutorController.JOB_ENTRY_NAME, previousVal, newVal ); } public String getHadoopJobName() { return hadoopJobName; } public void setHadoopJobName( String hadoopJobName ) { String previousVal = this.hadoopJobName; String newVal = hadoopJobName; this.hadoopJobName = hadoopJobName; firePropertyChange( JobEntryHadoopTransJobExecutorController.HADOOP_JOB_NAME, previousVal, newVal ); } public String getMapTrans() { return mapTrans; } public void setMapTrans( String mapTrans ) { String previousVal = this.mapTrans; String newVal = mapTrans; this.mapTrans = mapTrans; firePropertyChange( JobEntryHadoopTransJobExecutorController.MAP_TRANS, previousVal, newVal ); } public String getCombinerTrans() { return combinerTrans; } public void setCombinerTrans( String combinerTrans ) { String previousVal = this.combinerTrans; String newVal = combinerTrans; this.combinerTrans = combinerTrans; firePropertyChange( JobEntryHadoopTransJobExecutorController.COMBINER_TRANS, previousVal, newVal ); } public String getReduceTrans() { return reduceTrans; } public void setReduceTrans( String reduceTrans ) { String previousVal = this.reduceTrans; String newVal = reduceTrans; this.reduceTrans = reduceTrans; firePropertyChange( JobEntryHadoopTransJobExecutorController.REDUCE_TRANS, previousVal, newVal ); } public String getMapTransInputStepName() { return mapTransInputStepName; } public void setMapTransInputStepName( String mapTransInputStepName ) { String previousVal = this.mapTransInputStepName; String newVal = mapTransInputStepName; this.mapTransInputStepName = mapTransInputStepName; firePropertyChange( JobEntryHadoopTransJobExecutorController.MAP_TRANS_INPUT_STEP_NAME, previousVal, newVal ); } public String getMapTransOutputStepName() { return mapTransOutputStepName; } public void setMapTransOutputStepName( String mapTransOutputStepName ) { String previousVal = this.mapTransOutputStepName; String newVal = mapTransOutputStepName; this.mapTransOutputStepName = mapTransOutputStepName; firePropertyChange( JobEntryHadoopTransJobExecutorController.MAP_TRANS_OUTPUT_STEP_NAME, previousVal, newVal ); } public String getCombinerTransInputStepName() { return combinerTransInputStepName; } public void setCombinerTransInputStepName( String combinerTransInputStepName ) { String previousVal = this.combinerTransInputStepName; String newVal = combinerTransInputStepName; this.combinerTransInputStepName = combinerTransInputStepName; firePropertyChange( JobEntryHadoopTransJobExecutorController.COMBINER_TRANS_INPUT_STEP_NAME, previousVal, newVal ); } public String getCombinerTransOutputStepName() { return combinerTransOutputStepName; } public void setCombinerTransOutputStepName( String combinerTransOutputStepName ) { String previousVal = this.combinerTransOutputStepName; String newVal = combinerTransOutputStepName; this.combinerTransOutputStepName = combinerTransOutputStepName; firePropertyChange( JobEntryHadoopTransJobExecutorController.COMBINER_TRANS_OUTPUT_STEP_NAME, previousVal, newVal ); } public String getReduceTransInputStepName() { return reduceTransInputStepName; } public void setReduceTransInputStepName( String reduceTransInputStepName ) { String previousVal = this.reduceTransInputStepName; String newVal = reduceTransInputStepName; this.reduceTransInputStepName = reduceTransInputStepName; firePropertyChange( JobEntryHadoopTransJobExecutorController.REDUCE_TRANS_INPUT_STEP_NAME, previousVal, newVal ); } public String getReduceTransOutputStepName() { return reduceTransOutputStepName; } public void setReduceTransOutputStepName( String reduceTransOutputStepName ) { String previousVal = this.reduceTransOutputStepName; String newVal = reduceTransOutputStepName; this.reduceTransOutputStepName = reduceTransOutputStepName; firePropertyChange( JobEntryHadoopTransJobExecutorController.REDUCE_TRANS_OUTPUT_STEP_NAME, previousVal, newVal ); } public void invertBlocking() { setBlocking( !isBlocking() ); } public JobEntryHadoopTransJobExecutor getJobEntry() { return jobEntry; } public void setJobEntry( JobEntryHadoopTransJobExecutor jobEntry ) { this.jobEntry = jobEntry; } public void invertSuppressOutputOfMapKey() { setSuppressOutputOfMapKey( !isSuppressOutputOfMapKey() ); } public boolean isSuppressOutputOfMapKey() { return this.suppressOutputMapKey; } public void setSuppressOutputOfMapKey( boolean suppress ) { boolean previousVal = this.suppressOutputMapKey; boolean newVal = suppress; this.suppressOutputMapKey = suppress; firePropertyChange( SUPPRESS_OUTPUT_MAP_KEY, previousVal, newVal ); } public void invertSuppressOutputOfMapValue() { setSuppressOutputOfMapValue( !isSuppressOutputOfMapValue() ); } public boolean isSuppressOutputOfMapValue() { return this.suppressOutputMapValue; } public void setSuppressOutputOfMapValue( boolean suppress ) { boolean previousVal = this.suppressOutputMapValue; boolean newVal = suppress; this.suppressOutputMapValue = suppress; firePropertyChange( SUPPRESS_OUTPUT_MAP_VALUE, previousVal, newVal ); } public void invertSuppressOutputOfKey() { setSuppressOutputOfKey( !isSuppressOutputOfKey() ); } public boolean isSuppressOutputOfKey() { return this.suppressOutputKey; } public void setSuppressOutputOfKey( boolean suppress ) { boolean previousVal = this.suppressOutputKey; boolean newVal = suppress; this.suppressOutputKey = suppress; firePropertyChange( SUPPRESS_OUTPUT_KEY, previousVal, newVal ); } public void invertSuppressOutputOfValue() { setSuppressOutputOfValue( !isSuppressOutputOfValue() ); } public boolean isSuppressOutputOfValue() { return this.suppressOutputValue; } public void setSuppressOutputOfValue( boolean suppress ) { boolean previousVal = this.suppressOutputValue; boolean newVal = suppress; this.suppressOutputValue = suppress; firePropertyChange( SUPPRESS_OUTPUT_VALUE, previousVal, newVal ); } public String getInputFormatClass() { return inputFormatClass; } public void setInputFormatClass( String inputFormatClass ) { String previousVal = this.inputFormatClass; String newVal = inputFormatClass; this.inputFormatClass = inputFormatClass; firePropertyChange( INPUT_FORMAT_CLASS, previousVal, newVal ); } public String getOutputFormatClass() { return outputFormatClass; } public void setOutputFormatClass( String outputFormatClass ) { String previousVal = this.outputFormatClass; String newVal = outputFormatClass; this.outputFormatClass = outputFormatClass; firePropertyChange( OUTPUT_FORMAT_CLASS, previousVal, newVal ); } public String getInputPath() { return inputPath; } public void setInputPath( String inputPath ) { String previousVal = this.inputPath; String newVal = inputPath; this.inputPath = inputPath; firePropertyChange( INPUT_PATH, previousVal, newVal ); } public String getOutputPath() { return outputPath; } public void setOutputPath( String outputPath ) { String previousVal = this.outputPath; String newVal = outputPath; this.outputPath = outputPath; firePropertyChange( OUTPUT_PATH, previousVal, newVal ); } public void invertCleanOutputPath() { setCleanOutputPath( !isCleanOutputPath() ); } public boolean isCleanOutputPath() { return cleanOutputPath; } public void setCleanOutputPath( boolean cleanOutputPath ) { boolean old = this.cleanOutputPath; this.cleanOutputPath = cleanOutputPath; firePropertyChange( CLEAN_OUTPUT_PATH, old, this.cleanOutputPath ); } public boolean isBlocking() { return blocking; } public void setBlocking( boolean blocking ) { boolean previousVal = this.blocking; boolean newVal = blocking; this.blocking = blocking; firePropertyChange( BLOCKING, previousVal, newVal ); } public void setReducingSingleThreaded( boolean reducingSingleThreaded ) { boolean previousVal = this.reducingSingleThreaded; boolean newVal = reducingSingleThreaded; this.reducingSingleThreaded = reducingSingleThreaded; firePropertyChange( REDUCING_SINGLE_THREADED, previousVal, newVal ); } public String getLoggingInterval() { return loggingInterval; } public void setLoggingInterval( String loggingInterval ) { String previousVal = this.loggingInterval; String newVal = loggingInterval; this.loggingInterval = loggingInterval; firePropertyChange( LOGGING_INTERVAL, previousVal, newVal ); } public String getNumMapTasks() { return numMapTasks; } public void setNumMapTasks( String numMapTasks ) { String previousVal = this.numMapTasks; String newVal = numMapTasks; this.numMapTasks = numMapTasks; firePropertyChange( NUM_MAP_TASKS, previousVal, newVal ); } public String getNumReduceTasks() { return numReduceTasks; } public void setNumReduceTasks( String numReduceTasks ) { String previousVal = this.numReduceTasks; String newVal = numReduceTasks; this.numReduceTasks = numReduceTasks; firePropertyChange( NUM_REDUCE_TASKS, previousVal, newVal ); } public List getNamedClusters() throws MetaStoreException { return namedClusterService.list( jobMeta.getMetaStore() ); } public void setNamedClusters( List namedClusters ) { this.namedClusters = namedClusters; } public void openErrorDialog( String title, String message ) { XulDialog errorDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getElementById( "hadoop-error-dialog" ); errorDialog.setTitle( title ); XulTextbox errorMessage = (XulTextbox) getXulDomContainer().getDocumentRoot().getElementById( "hadoop-error-message" ); errorMessage.setValue( message ); errorDialog.show(); } public void invertReducingSingleThreaded() { setReducingSingleThreaded( !isReducingSingleThreaded() ); } public boolean isReducingSingleThreaded() { return reducingSingleThreaded; } public void invertCombiningSingleThreaded() { setCombiningSingleThreaded( !isCombiningSingleThreaded() ); } public boolean isCombiningSingleThreaded() { return combiningSingleThreaded; } public void setCombiningSingleThreaded( boolean combiningSingleThreaded ) { boolean old = this.combiningSingleThreaded; this.combiningSingleThreaded = combiningSingleThreaded; firePropertyChange( COMBINING_SINGLE_THREADED, old, this.combiningSingleThreaded ); } public void help() { Shell shell = getJobEntryDialog(); PluginInterface plugin = PluginRegistry.getInstance().findPluginWithId( JobEntryPluginType.class, jobEntry.getPluginId() ); HelpUtils.openHelpDialog( shell, plugin ); } public void editNamedCluster() throws MetaStoreException { if ( isSelectedNamedCluster() ) { String newNcName = ncDelegate.editNamedCluster( null, getSelectedNamedCluster(), getJobEntryDialog() ); if ( newNcName != null ) { //cancel button on editing pressed, clusters not changed namedClustersChanged(); selectedNamedClusterChanged( getNamedClusterName( getSelectedNamedCluster() ), newNcName ); } } } public void newNamedCluster() throws MetaStoreException { String newNcName = ncDelegate.newNamedCluster( jobMeta, null, getJobEntryDialog() ); if ( newNcName != null ) { //cancel button on editing pressed, clusters not changed namedClustersChanged(); selectedNamedClusterChanged( getNamedClusterName( getSelectedNamedCluster() ), newNcName ); } } private Shell getJobEntryDialog() { XulDialog xulDialog = (XulDialog) getXulDomContainer().getDocumentRoot().getElementById( "job-entry-dialog" ); Shell shell = (Shell) xulDialog.getRootObject(); return shell; } private String getNamedClusterName( NamedCluster namedCluster ) { return namedCluster != null ? namedCluster.getName() : null; } /** * Reports the named clusters list has been changed. * * @throws MetaStoreException * if the exception occurs */ @VisibleForTesting void namedClustersChanged() throws MetaStoreException { firePropertyChange( "namedClusters", null, getNamedClusters() ); } /** * Reports that the selected named cluster has been changed. * * @param ncVal * the old value of the selected named cluster * @param newNcVal * the new value of the selected named cluster * @throws MetaStoreException * if the exception occurs */ @VisibleForTesting void selectedNamedClusterChanged( String ncVal, String newNcVal ) throws MetaStoreException { if ( newNcVal != null ) { ncVal = newNcVal; } if ( ncVal != null ) { for ( NamedCluster nc : getNamedClusters() ) { if ( nc.getName().equals( ncVal ) ) { firePropertyChange( "selectedNamedCluster", null, nc ); return; } } } } public void setSelectedNamedCluster( NamedCluster namedCluster ) { this.selectedNamedCluster = namedCluster; } public NamedCluster getSelectedNamedCluster() { return this.selectedNamedCluster; } public boolean isSelectedNamedCluster() { return this.selectedNamedCluster != null; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/ui/entry/pmr/JobEntryHadoopTransJobExecutorDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.ui.entry.pmr; import org.dom4j.DocumentException; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.hadoop.shim.api.cluster.NamedCluster; import org.pentaho.big.data.kettle.plugins.mapreduce.entry.pmr.JobEntryHadoopTransJobExecutor; import org.pentaho.big.data.plugins.common.ui.HadoopClusterDelegateImpl; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entry.JobEntryDialogInterface; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.Repository; import org.pentaho.di.ui.job.entry.JobEntryDialog; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.metastore.api.exceptions.MetaStoreException; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulException; import org.pentaho.ui.xul.XulRunner; import org.pentaho.ui.xul.binding.Binding.Type; import org.pentaho.ui.xul.binding.BindingConvertor; import org.pentaho.ui.xul.binding.BindingFactory; import org.pentaho.ui.xul.binding.DefaultBindingFactory; import org.pentaho.ui.xul.components.XulMenuList; import org.pentaho.ui.xul.components.XulTextbox; import org.pentaho.ui.xul.containers.XulDialog; import org.pentaho.ui.xul.containers.XulTree; import org.pentaho.ui.xul.swt.SwtXulLoader; import org.pentaho.ui.xul.swt.SwtXulRunner; import java.util.Collections; import java.util.Enumeration; import java.util.List; import java.util.ResourceBundle; @PluginDialog( id = "HadoopTransJobExecutorPlugin", image = "HDT.svg", pluginType = PluginDialog.PluginType.JOBENTRY, documentationUrl = "pdi-job-entries-reference-overview/pentaho-mapreduce" ) public class JobEntryHadoopTransJobExecutorDialog extends JobEntryDialog implements JobEntryDialogInterface { private static final Class CLZ = JobEntryHadoopTransJobExecutor.class; private JobEntryHadoopTransJobExecutor jobEntry; private final JobEntryHadoopTransJobExecutorController controller; private XulDomContainer container; private BindingFactory bf; private ResourceBundle bundle = new ResourceBundle() { @Override public Enumeration getKeys() { return null; } @Override protected Object handleGetObject( String key ) { return BaseMessages.getString( CLZ, key ); } }; public JobEntryHadoopTransJobExecutorDialog( Shell parent, JobEntryInterface jobEntry, Repository rep, JobMeta jobMeta ) throws XulException, DocumentException, Throwable { super( parent, jobEntry, rep, jobMeta ); this.jobEntry = (JobEntryHadoopTransJobExecutor) jobEntry; controller = new JobEntryHadoopTransJobExecutorController( new HadoopClusterDelegateImpl( Spoon .getInstance(), this.jobEntry.getNamedClusterService(), this.jobEntry.getRuntimeTestActionService(), this.jobEntry.getRuntimeTester() ), this.jobEntry.getNamedClusterService() ); SwtXulLoader swtXulLoader = new SwtXulLoader(); swtXulLoader.registerClassLoader( getClass().getClassLoader() ); swtXulLoader.register( "VARIABLETEXTBOX", "org.pentaho.di.ui.core.database.dialog.tags.ExtTextbox" ); swtXulLoader.setOuterContext( shell ); container = swtXulLoader.loadXul( "org/pentaho/big/data/kettle/plugins/mapreduce/ui/entry/JobEntryHadoopTransJobExecutorDialog.xul", bundle ); //$NON-NLS-1$ final XulRunner runner = new SwtXulRunner(); runner.addContainer( container ); container.addEventHandler( controller ); bf = new DefaultBindingFactory(); bf.setDocument( container.getDocumentRoot() ); bf.setBindingType( Type.BI_DIRECTIONAL ); bf.createBinding( "jobentry-name", "value", controller, JobEntryHadoopTransJobExecutorController.JOB_ENTRY_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-hadoopjob-name", "value", controller, JobEntryHadoopTransJobExecutorController.HADOOP_JOB_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-map-transformation", "value", controller, JobEntryHadoopTransJobExecutorController.MAP_TRANS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-combiner-transformation", "value", controller, JobEntryHadoopTransJobExecutorController.COMBINER_TRANS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-reduce-transformation", "value", controller, JobEntryHadoopTransJobExecutorController.REDUCE_TRANS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-map-input-stepname", "value", controller, JobEntryHadoopTransJobExecutorController.MAP_TRANS_INPUT_STEP_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-map-output-stepname", "value", controller, JobEntryHadoopTransJobExecutorController.MAP_TRANS_OUTPUT_STEP_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-combiner-input-stepname", "value", controller, JobEntryHadoopTransJobExecutorController.COMBINER_TRANS_INPUT_STEP_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-combiner-output-stepname", "value", controller, JobEntryHadoopTransJobExecutorController.COMBINER_TRANS_OUTPUT_STEP_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-combiner-single-threaded", "selected", controller, JobEntryHadoopTransJobExecutorController.COMBINING_SINGLE_THREADED ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-reduce-input-stepname", "value", controller, JobEntryHadoopTransJobExecutorController.REDUCE_TRANS_INPUT_STEP_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-reduce-output-stepname", "value", controller, JobEntryHadoopTransJobExecutorController.REDUCE_TRANS_OUTPUT_STEP_NAME ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "jobentry-reduce-single-threaded", "selected", controller, JobEntryHadoopTransJobExecutorController.REDUCING_SINGLE_THREADED ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-suppress-output-map-key", "selected", controller, JobEntryHadoopTransJobExecutorController.SUPPRESS_OUTPUT_MAP_KEY ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-suppress-output-map-value", "selected", controller, JobEntryHadoopTransJobExecutorController.SUPPRESS_OUTPUT_MAP_VALUE ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-suppress-output-key", "selected", controller, JobEntryHadoopTransJobExecutorController.SUPPRESS_OUTPUT_KEY ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-suppress-output-value", "selected", controller, JobEntryHadoopTransJobExecutorController.SUPPRESS_OUTPUT_VALUE ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-input-format", "value", controller, JobEntryHadoopTransJobExecutorController.INPUT_FORMAT_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "classes-output-format", "value", controller, JobEntryHadoopTransJobExecutorController.OUTPUT_FORMAT_CLASS ); //$NON-NLS-1$ //$NON-NLS-2$ /* * final BindingConvertor bindingConverter = new BindingConvertor() { * * public Integer sourceToTarget(String value) { return Integer.parseInt(value); } * * public String targetToSource(Integer value) { return value.toString(); } * * }; */ bf.createBinding( "num-map-tasks", "value", controller, JobEntryHadoopTransJobExecutorController.NUM_MAP_TASKS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "num-reduce-tasks", "value", controller, JobEntryHadoopTransJobExecutorController.NUM_REDUCE_TASKS ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "blocking", "selected", controller, JobEntryHadoopTransJobExecutorController.BLOCKING ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "logging-interval", "value", controller, JobEntryHadoopTransJobExecutorController.LOGGING_INTERVAL ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "input-path", "value", controller, JobEntryHadoopTransJobExecutorController.INPUT_PATH ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "output-path", "value", controller, JobEntryHadoopTransJobExecutorController.OUTPUT_PATH ); //$NON-NLS-1$ //$NON-NLS-2$ bf.createBinding( "clean-output-path", "selected", controller, JobEntryHadoopTransJobExecutorController.CLEAN_OUTPUT_PATH ); //$NON-NLS-1$ //$NON-NLS-2$ XulTree variablesTree = (XulTree) container.getDocumentRoot().getElementById( "fields-table" ); //$NON-NLS-1$ bf.setBindingType( Type.ONE_WAY ); bf.createBinding( controller.getUserDefined(), "children", variablesTree, "elements" ); //$NON-NLS-1$//$NON-NLS-2$ bf.setBindingType( Type.BI_DIRECTIONAL ); XulTextbox loggingInterval = (XulTextbox) container.getDocumentRoot().getElementById( "logging-interval" ); //$NON-NLS-1$ loggingInterval.setValue( "" + controller.getLoggingInterval() ); //$NON-NLS-1$ XulTextbox mapTasks = (XulTextbox) container.getDocumentRoot().getElementById( "num-map-tasks" ); //$NON-NLS-1$ mapTasks.setValue( "" + controller.getNumMapTasks() ); //$NON-NLS-1$ XulTextbox reduceTasks = (XulTextbox) container.getDocumentRoot().getElementById( "num-reduce-tasks" ); //$NON-NLS-1$ reduceTasks.setValue( "" + controller.getNumReduceTasks() ); //$NON-NLS-1$ controller.setJobEntry( (JobEntryHadoopTransJobExecutor) jobEntry ); controller.setShell( parent ); controller.setRepository( rep ); controller.setJobMeta( jobMeta ); controller.init(); bf.createBinding( controller, "namedClusters", "named-clusters", "elements" ).fireSourceChanged(); bf.createBinding( "named-clusters", "selectedIndex", controller, "selectedNamedCluster", new BindingConvertor() { public NamedCluster sourceToTarget( final Integer index ) { List clusters = Collections.emptyList(); try { clusters = controller.getNamedClusters(); } catch ( MetaStoreException e ) { // Ignore } if ( index == -1 || clusters.isEmpty() ) { return null; } return clusters.get( index ); } public Integer targetToSource( final NamedCluster value ) { List clusters = Collections.emptyList(); try { clusters = controller.getNamedClusters(); } catch ( MetaStoreException e ) { // Ignore } return clusters.indexOf( value ); } } ).fireSourceChanged(); selectNamedCluster(); } private void selectNamedCluster() throws MetaStoreException { @SuppressWarnings( "unchecked" ) XulMenuList namedClusterMenu = (XulMenuList) container.getDocumentRoot().getElementById( "named-clusters" ); //$NON-NLS-1$ String cn = null; NamedCluster namedCluster = jobEntry.getNamedCluster(); if ( namedCluster != null ) { cn = namedCluster.getName(); } for ( NamedCluster nc : controller.getNamedClusters() ) { if ( cn != null && cn.equals( nc.getName() ) ) { namedClusterMenu.setSelectedItem( nc ); controller.setSelectedNamedCluster( nc ); } } } public JobEntryInterface open() { XulDialog dialog = (XulDialog) container.getDocumentRoot().getElementById( "job-entry-dialog" ); //$NON-NLS-1$ dialog.show(); return jobEntry; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/ui/step/enter/HadoopEnterDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.ui.step.enter; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.big.data.kettle.plugins.mapreduce.step.enter.HadoopEnterMeta; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.ui.trans.step.BaseStepXulDialog; import org.pentaho.ui.xul.binding.Binding; import org.pentaho.ui.xul.components.XulMenuList; import org.pentaho.ui.xul.components.XulTextbox; import java.util.ArrayList; import java.util.List; @PluginDialog( id = "HadoopEnterPlugin", image = "MRI.svg", pluginType = PluginDialog.PluginType.STEP, documentationUrl = "pdi-transformation-steps-reference-overview/mapreduce-input" ) public class HadoopEnterDialog extends BaseStepXulDialog implements StepDialogInterface { private static final Class PKG = HadoopEnterMeta.class; private String workingStepname; private HadoopEnterMetaMapper metaMapper; private List typeList; public HadoopEnterDialog( Shell parent, Object in, TransMeta tr, String sname ) throws Throwable { super( "org/pentaho/big/data/kettle/plugins/mapreduce/ui/step/enter/dialog.xul", parent, (BaseStepMeta) in, tr, sname ); typeList = new ArrayList(); for ( String type : ValueMeta.getAllTypes() ) { typeList.add( type ); } init(); } public void init() throws Throwable { workingStepname = stepname; metaMapper = new HadoopEnterMetaMapper(); metaMapper.loadMeta( (HadoopEnterMeta) baseStepMeta ); bf.setBindingType( Binding.Type.ONE_WAY ); setTextBoxValue( "input-key-length", metaMapper.getInKeyLength() ); setTextBoxValue( "input-key-precision", metaMapper.getInKeyPrecision() ); setTextBoxValue( "input-value-length", metaMapper.getInValueLength() ); setTextBoxValue( "input-value-precision", metaMapper.getInValuePrecision() ); bf.createBinding( "step-name", "value", this, "stepName" ); bf.createBinding( this, "stepName", "step-name", "value" ).fireSourceChanged(); bf.createBinding( this, "types", "input-key-type", "elements" ).fireSourceChanged(); bf.createBinding( this, "types", "input-value-type", "elements" ).fireSourceChanged(); if ( metaMapper.getInKeyType() >= 0 ) { ( (XulMenuList) getXulDomContainer().getDocumentRoot().getElementById( "input-key-type" ) ) .setSelectedItem( ValueMeta.getTypeDesc( metaMapper.getInKeyType() ) ); } if ( metaMapper.getInValueType() >= 0 ) { ( (XulMenuList) getXulDomContainer().getDocumentRoot().getElementById( "input-value-type" ) ) .setSelectedItem( ValueMeta.getTypeDesc( metaMapper.getInValueType() ) ); } } @Override protected Class getClassForMessages() { return HadoopEnterMeta.class; } @Override public void onAccept() { metaMapper.setInKeyType( fetchValue( (XulMenuList) getXulDomContainer().getDocumentRoot().getElementById( "input-key-type" ) ) ); metaMapper.setInKeyLength( fetchValue( (XulTextbox) getXulDomContainer().getDocumentRoot().getElementById( "input-key-length" ) ) ); metaMapper.setInKeyPrecision( fetchValue( (XulTextbox) getXulDomContainer().getDocumentRoot().getElementById( "input-key-precision" ) ) ); metaMapper.setInValueType( fetchValue( (XulMenuList) getXulDomContainer().getDocumentRoot().getElementById( "input-value-type" ) ) ); metaMapper.setInValueLength( fetchValue( (XulTextbox) getXulDomContainer().getDocumentRoot().getElementById( "input-value-length" ) ) ); metaMapper.setInValuePrecision( fetchValue( (XulTextbox) getXulDomContainer().getDocumentRoot().getElementById( "input-value-precision" ) ) ); if ( !workingStepname.equals( stepname ) ) { stepname = workingStepname; baseStepMeta.setChanged(); } metaMapper.saveMeta( (HadoopEnterMeta) baseStepMeta ); dispose(); } private int fetchValue( XulTextbox textbox ) { int result = -1; if ( textbox != null && !StringUtil.isEmpty( textbox.getValue() ) ) { try { result = Integer.parseInt( textbox.getValue() ); } catch ( NumberFormatException e ) { log.logError( BaseMessages.getString( "HadoopEnter.Error.ParseInteger", textbox.getValue() ) ); } } return result; } private int fetchValue( XulMenuList menulist ) { int result = -1; if ( menulist != null && menulist.getValue() != null ) { result = ValueMeta.getType( menulist.getValue() ); } return result; } private void setTextBoxValue( String textbox, int value ) { String v = ""; if ( value >= 0 ) { v = Integer.toString( value ); } ( (XulTextbox) getXulDomContainer().getDocumentRoot().getElementById( textbox ) ).setValue( v ); } @Override public void onCancel() { setStepName( null ); dispose(); } public void setStepName( String stepname ) { workingStepname = stepname; } public String getStepName() { return workingStepname; } public List getTypes() { return typeList; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/ui/step/enter/HadoopEnterMetaMapper.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.ui.step.enter; import org.pentaho.big.data.kettle.plugins.mapreduce.step.enter.HadoopEnterMeta; import org.pentaho.ui.xul.XulEventSourceAdapter; public class HadoopEnterMetaMapper extends XulEventSourceAdapter { private class FieldPositions { private int key; private int value; public FieldPositions( String[] fieldnames ) { setKeyIndex( -1 ); setValueIndex( -1 ); // Determine the key and value field indices if ( fieldnames != null && fieldnames.length == 2 ) { for ( int index = 0; index < fieldnames.length; index++ ) { if ( fieldnames[index].equals( HadoopEnterMeta.KEY_FIELDNAME ) ) { setKeyIndex( index ); } else if ( fieldnames[index].equals( HadoopEnterMeta.VALUE_FIELDNAME ) ) { setValueIndex( index ); } } } } public void setKeyIndex( int key ) { this.key = key; } public int getKeyIndex() { return key; } public void setValueIndex( int value ) { this.value = value; } public int getValueIndex() { return value; } public boolean isValid() { return ( ( getKeyIndex() >= 0 && getValueIndex() >= 0 ) && getKeyIndex() != getValueIndex() ); } } public static String IN_KEY_TYPE = "in-key-type"; public static String IN_KEY_LENGTH = "in-key-length"; public static String IN_KEY_PRECISION = "in-key-precision"; public static String IN_VALUE_TYPE = "in-value-type"; public static String IN_VALUE_LENGTH = "in-value-length"; public static String IN_VALUE_PRECISION = "in-value-precision"; private int inKeyType = -1; private int inKeyLength = -1; private int inKeyPrecision = -1; private int inValueType = -1; private int inValueLength = -1; private int inValuePrecision = -1; public void setInKeyType( int arg ) { int previousVal = inKeyType; inKeyType = arg; firePropertyChange( IN_KEY_TYPE, previousVal, inKeyType ); } public void setInKeyLength( int arg ) { int previousVal = inKeyLength; inKeyLength = arg; firePropertyChange( IN_KEY_LENGTH, previousVal, inKeyLength ); } public void setInKeyPrecision( int arg ) { int previousVal = inKeyPrecision; inKeyPrecision = arg; firePropertyChange( IN_KEY_PRECISION, previousVal, inKeyPrecision ); } public void setInValueType( int arg ) { int previousVal = inValueType; inValueType = arg; firePropertyChange( IN_VALUE_TYPE, previousVal, inValueType ); } public void setInValueLength( int arg ) { int previousVal = inValueLength; inValueLength = arg; firePropertyChange( IN_VALUE_LENGTH, previousVal, inValueLength ); } public void setInValuePrecision( int arg ) { int previousVal = inValuePrecision; inValuePrecision = arg; firePropertyChange( IN_VALUE_PRECISION, previousVal, inValuePrecision ); } public int getInKeyType() { return inKeyType; } public int getInKeyLength() { return inKeyLength; } public int getInKeyPrecision() { return inKeyPrecision; } public int getInValueType() { return inValueType; } public int getInValueLength() { return inValueLength; } public int getInValuePrecision() { return inValuePrecision; } /** * Load data into the MetaMapper from the HadoopExitMeta * * @param meta */ public void loadMeta( HadoopEnterMeta meta ) { FieldPositions fields = new FieldPositions( meta.getFieldname() ); if ( !fields.isValid() ) { // We require both the key and value fields to be present return; } int[] type = meta.getType(); int[] length = meta.getLength(); int[] precision = meta.getPrecision(); setInKeyType( type[fields.getKeyIndex()] ); setInKeyLength( length[fields.getKeyIndex()] ); setInKeyPrecision( precision[fields.getKeyIndex()] ); setInValueType( type[fields.getValueIndex()] ); setInValueLength( length[fields.getValueIndex()] ); setInValuePrecision( precision[fields.getValueIndex()] ); } /** * Save data from the MetaMapper into the HadoopExitMeta * * @param meta */ public void saveMeta( HadoopEnterMeta meta ) { // Set outKey FieldPositions fields = new FieldPositions( meta.getFieldname() ); if ( !fields.isValid() ) { // Replace the field names with the key / value names meta.allocate( 2 ); fields.setKeyIndex( 0 ); fields.setValueIndex( 1 ); ( meta.getFieldname() )[fields.getKeyIndex()] = HadoopEnterMeta.KEY_FIELDNAME; ( meta.getFieldname() )[fields.getValueIndex()] = HadoopEnterMeta.VALUE_FIELDNAME; meta.setChanged(); } int[] type = new int[2]; int[] length = new int[2]; int[] precision = new int[2]; // Set Types if ( getInKeyType() >= 0 ) { type[fields.getKeyIndex()] = getInKeyType(); } if ( getInValueType() >= 0 ) { type[fields.getValueIndex()] = getInValueType(); } int[] metaType = meta.getType(); if ( metaType == null || metaType.length != 2 ) { meta.setChanged(); } for ( int index = 0; index < type.length; index++ ) { if ( type[index] != metaType[index] ) { meta.setChanged( true ); } } meta.setType( type ); // Set Lengths if ( getInKeyLength() >= 0 ) { length[fields.getKeyIndex()] = getInKeyLength(); } if ( getInValueLength() >= 0 ) { length[fields.getValueIndex()] = getInValueLength(); } int[] metaLength = meta.getLength(); if ( metaLength == null || metaLength.length != 2 ) { meta.setChanged(); } for ( int index = 0; index < length.length; index++ ) { if ( length[index] != metaLength[index] ) { meta.setChanged( true ); } } meta.setLength( length ); // Set Precisions if ( getInKeyPrecision() >= 0 ) { precision[fields.getKeyIndex()] = getInKeyPrecision(); } if ( getInValuePrecision() >= 0 ) { precision[fields.getValueIndex()] = getInValuePrecision(); } int[] metaPrecision = meta.getPrecision(); if ( metaPrecision == null || metaPrecision.length != 2 ) { meta.setChanged(); } for ( int index = 0; index < precision.length; index++ ) { if ( precision[index] != metaPrecision[index] ) { meta.setChanged( true ); } } meta.setPrecision( type ); } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/ui/step/exit/HadoopExitDialog.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.ui.step.exit; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.annotations.PluginDialog; import org.pentaho.big.data.kettle.plugins.mapreduce.step.exit.HadoopExit; import org.pentaho.big.data.kettle.plugins.mapreduce.step.exit.HadoopExitMeta; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.ui.trans.step.BaseStepXulDialog; import org.pentaho.ui.xul.binding.Binding; import org.pentaho.ui.xul.components.XulMenuList; import java.util.ArrayList; import java.util.List; @PluginDialog( id = "HadoopExitPlugin", image = "MRO.svg", pluginType = PluginDialog.PluginType.STEP, documentationUrl = "pdi-transformation-steps-reference-overview/mapreduce-output" ) public class HadoopExitDialog extends BaseStepXulDialog implements StepDialogInterface { @SuppressWarnings( "unused" ) private static final Class PKG = HadoopExit.class; private XulMenuList outKeyFieldnames; private XulMenuList outValueFieldnames; private HadoopExitMetaMapper metaMapper; private String workingStepname; private List outKeyFields = new ArrayList(); private List outValueFields = new ArrayList(); public HadoopExitDialog( Shell parent, Object in, TransMeta tr, String sname ) throws Throwable { super( "org/pentaho/big/data/kettle/plugins/mapreduce/ui/step/exit/dialog.xul", parent, (BaseStepMeta) in, tr, sname ); init(); } public void init() throws Throwable { workingStepname = stepname; metaMapper = new HadoopExitMetaMapper(); metaMapper.loadMeta( (HadoopExitMeta) baseStepMeta ); // Get input fields to generate drop down lists RowMetaInterface inputRow = null; try { inputRow = transMeta.getPrevStepFields( stepMeta ); } catch ( KettleStepException e ) { // No previous step found, leave list empty } // Seed the lists with the previously selected fields: This is done first so the last selection is at the top if ( !StringUtil.isEmpty( metaMapper.getOutKeyFieldname() ) ) { outKeyFields.add( new ValueMeta( metaMapper.getOutKeyFieldname() ) ); } if ( !StringUtil.isEmpty( metaMapper.getOutValueFieldname() ) ) { outValueFields.add( new ValueMeta( metaMapper.getOutValueFieldname() ) ); } if ( inputRow != null ) { for ( ValueMetaInterface field : inputRow.getValueMetaList() ) { // Avoid adding duplicates if ( StringUtil.isEmpty( metaMapper.getOutKeyFieldname() ) || !metaMapper.getOutKeyFieldname().equals( field.getName() ) ) { outKeyFields.add( new ValueMeta( field.getName() ) ); } // Avoid adding duplicates if ( StringUtil.isEmpty( metaMapper.getOutValueFieldname() ) || !metaMapper.getOutValueFieldname().equals( field.getName() ) ) { outValueFields.add( new ValueMeta( field.getName() ) ); } } } // Populate outKey menulist bf.setBindingType( Binding.Type.ONE_WAY ); bf.createBinding( "step-name", "value", this, "stepName" ); bf.createBinding( this, "stepName", "step-name", "value" ).fireSourceChanged(); bf.createBinding( this, "outKeyFields", "output-key-fieldname", "elements" ).fireSourceChanged(); bf.createBinding( this, "outValueFields", "output-value-fieldname", "elements" ).fireSourceChanged(); outKeyFieldnames = (XulMenuList) getXulDomContainer().getDocumentRoot().getElementById( "output-key-fieldname" ); outValueFieldnames = (XulMenuList) getXulDomContainer().getDocumentRoot().getElementById( "output-value-fieldname" ); if ( ( outKeyFieldnames != null ) && ( outKeyFieldnames.getElements().size() > 0 ) ) { outKeyFieldnames.setSelectedIndex( 0 ); } if ( ( outValueFieldnames != null ) && ( outValueFieldnames.getElements().size() > 0 ) ) { outValueFieldnames.setSelectedIndex( 0 ); } } @Override protected Class getClassForMessages() { return HadoopExit.class; } @Override public void onAccept() { metaMapper.setOutKeyFieldname( outKeyFieldnames.getValue() ); metaMapper.setOutValueFieldname( outValueFieldnames.getValue() ); if ( !workingStepname.equals( stepname ) ) { stepname = workingStepname; baseStepMeta.setChanged(); } metaMapper.saveMeta( (HadoopExitMeta) baseStepMeta ); dispose(); } @Override public void onCancel() { setStepName( null ); dispose(); } public void setStepName( String stepname ) { workingStepname = stepname; } public String getStepName() { return workingStepname; } public List getOutKeyFields() { return outKeyFields; } public List getOutValueFields() { return outValueFields; } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/java/org/pentaho/big/data/kettle/plugins/mapreduce/ui/step/exit/HadoopExitMetaMapper.java ================================================ /*! ****************************************************************************** * * Pentaho * * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com * * Use of this software is governed by the Business Source License included * in the LICENSE.TXT file. * * Change Date: 2029-07-20 ******************************************************************************/ package org.pentaho.big.data.kettle.plugins.mapreduce.ui.step.exit; import org.pentaho.big.data.kettle.plugins.mapreduce.step.exit.HadoopExitMeta; import org.pentaho.ui.xul.XulEventSourceAdapter; public class HadoopExitMetaMapper extends XulEventSourceAdapter { public static String OUT_KEY_FIELDNAME = "out-key-fieldname"; public static String OUT_VALUE_FIELDNAME = "out-value-fieldname"; protected String outKeyFieldname; protected String outValueFieldname; public void setOutKeyFieldname( String arg ) { String previousVal = outKeyFieldname; outKeyFieldname = arg; firePropertyChange( OUT_KEY_FIELDNAME, previousVal, outKeyFieldname ); } public String getOutKeyFieldname() { return outKeyFieldname; } public void setOutValueFieldname( String arg ) { String previousVal = outValueFieldname; outValueFieldname = arg; firePropertyChange( OUT_VALUE_FIELDNAME, previousVal, outValueFieldname ); } public String getOutValueFieldname() { return outValueFieldname; } /** * Load data into the MetaMapper from the HadoopExitMeta * * @param meta */ public void loadMeta( HadoopExitMeta meta ) { setOutKeyFieldname( meta.getOutKeyFieldname() ); setOutValueFieldname( meta.getOutValueFieldname() ); } /** * Save data from the MetaMapper into the HadoopExitMeta * * @param meta */ public void saveMeta( HadoopExitMeta meta ) { // Set outKey if ( meta.getOutKeyFieldname() == null && getOutKeyFieldname() != null ) { meta.setOutKeyFieldname( getOutKeyFieldname() ); meta.setChanged(); } else if ( meta.getOutKeyFieldname() != null && !meta.getOutKeyFieldname().equals( getOutKeyFieldname() ) ) { meta.setOutKeyFieldname( getOutKeyFieldname() ); meta.setChanged(); } // Set outValue if ( meta.getOutValueFieldname() == null && getOutValueFieldname() != null ) { meta.setOutValueFieldname( getOutValueFieldname() ); meta.setChanged(); } else if ( meta.getOutValueFieldname() != null && !meta.getOutValueFieldname().equals( getOutValueFieldname() ) ) { meta.setOutValueFieldname( getOutValueFieldname() ); meta.setChanged(); } } } ================================================ FILE: kettle-plugins/mapreduce/core/src/main/resources/OSGI-INF/blueprint/blueprint.xml ================================================ ================================================ FILE: kettle-plugins/mapreduce/core/src/main/resources/org/pentaho/big/data/kettle/plugins/mapreduce/entry/hadoop/messages/messages_en_US.properties ================================================ HadoopJobExecutorPlugin.Name=Hadoop job executor HadoopJobExecutorPlugin.Description=Execute MapReduce jobs in Hadoop JobEntryDialog.Title=Hadoop job executor JobEntry.Name.Label=Entry Name: JobEntryHadoopJobExecutor.Name.Label=Hadoop Job Name: JobEntryHadoopJobExecutor.JarUrl.Label=Jar: JobEntryHadoopJobExecutor.JarUrl.Browse=Browse... JobEntryHadoopJobExecutor.Driver.Class.Label=Driver Class: JobEntryHadoopJobExecutor.ModeSimple.Label=Simple JobEntryHadoopJobExecutor.ModeAdvanced.Label=Advanced JobEntryHadoopJobExecutor.Configuration.Label=Configuration JobEntryHadoopJobExecutor.ModeSimple.AssumptionsText.Label=Assumptions text here... JobEntryHadoopJobExecutor.ModeSimple.CommandLineArguments.Label=Command Line Arguments: JobEntryHadoopJobExecutor.ModeAdvanced.Blocking.Label=Enable Blocking: JobEntryHadoopJobExecutor.ModeAdvanced.Logging.Interval.Label=Logging Interval: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Classes.Label=Job Setup JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.Label=Cluster JobEntryHadoopJobExecutor.ModeAdvanced.Tab.UserDefined.Label=User Defined JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Classes.OutputKeyClass.Label=Output Key Class: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Classes.OutputValueClass.Label=Output Value Class: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Classes.MapperClass.Label=Mapper Class: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Classes.CombinerClass.Label=Combiner Class: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Classes.ReducerClass.Label=Reducer Class: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Classes.InputFormat.Label=Input Format: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Classes.OutputFormat.Label=Output Format: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.WorkingDirectory.Label=Working Directory: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.HDFSHostname.Label=HDFS Hostname: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.HDFSPort.Label=HDFS Port: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.JobTrackerHostname.Label=Job Tracker Hostname: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.JobTrackerPort.Label=Job Tracker Port: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.InputPath.Label=Input Path: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.OutputPath.Label=Output Path: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.NamedCluster.Label=Hadoop Cluster: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.NamedCluster.Edit=Edit... JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.NamedCluster.New=New... JobEntryHadoopJobExecutor.ModeAdvanced.Tab.UserDefined.NameColumn.Label=Name JobEntryHadoopJobExecutor.ModeAdvanced.Tab.UserDefined.ValueColumn.Label=Value JobEntryHadoopJobExecutor.ResolvedJar=Using jar path: {0} JobEntryHadoopJobExecutor.RunningPercent=Setup Complete: {0} Mapper Completion: {1} Reducer Completion: {2} JobEntryHadoopJobExecutor.TaskDetails=[{0}] -- Task: {1} Attempt: {2} Event: {3} {4} JobEntryHadoopJobExecutor.FailedToOpenLogFile=Unable to open file appender for file [{0}],{1} JobEntryHadoopJobExecutor.SimpleMode=Running Hadoop Job in Simple Mode JobEntryHadoopJobExecutor.AdvancedMode=Running Hadoop Job in Advanced Mode JobEntryHadoopJobExecutor.ErrorExecutingClass=Error executing class {0}. JobEntryHadoopJobExecutor.FailedToExecuteClass=Failed to execute class {0} successfully. Exited with status {1}. JobEntryHadoopJobExecutor.Blocking=Waiting for execution of {0} to finish... JobEntryHadoopJobExecutor.ModeAdvanced.NumMapTasks.Label=Number of Mapper Tasks: JobEntryHadoopJobExecutor.ModeAdvanced.NumReduceTasks.Label=Number of Reducer Tasks: JobEntryHadoopJobExecutor.Error.JarDoesNotExist=Specified jar [{0}] does not exist. JobEntryHadoopJobExecutor.SecurityManagerUpdatedDuringExecution=Security Manager updated during execution of the Hadoop Job Executor. Unable to restore previous Security Manager. JobEntryHadoopJobExecutor.JobEntryName.Error=Job Entry name missing. JobEntryHadoopJobExecutor.NamedClusterNotProvided.Error=Hadoop cluster not selected. JobEntryHadoopJobExecutor.NamedClusterPropertyMissing.Error=Selected Hadoop cluster is missing required settings. JobEntryHadoopJobExecutor.HadoopJobName.Error=Hadoop Job name missing. Dialog.Help=Help Dialog.Accept=OK Dialog.Cancel=Cancel Dialog.Error=Error NoSystemExit=JVM will not halt at this time. Runtime.exit() prevented. ErrorParsingLogInterval=Can't parse logging interval '{0}'. Using {1} seconds. JobEntryHadoopJobExecutor.ErrorDriverClassNotSpecified=Driver Class not specified. JobEntryHadoopJobExecutor.ErrorMultipleDriverClasses=Multiple Driver Classes found. Please select one. JobEntryHadoopJobExecutor.UsingDriverClass=Using Driver Class {0}. ================================================ FILE: kettle-plugins/mapreduce/core/src/main/resources/org/pentaho/big/data/kettle/plugins/mapreduce/entry/hadoop/messages/messages_ko_KR.properties ================================================ Dialog.Accept=\uD655\uC778 Dialog.Cancel=\uCDE8\uC18C Dialog.Error =\uC624\uB958 JobEntry.Name.Label=Job \uC5D4\uB4DC\uB9AC \uC774\uB984: JobEntryHadoopJobExecutor.AdvancedMode=Hodoop Job\uC744 \uACE0\uAE09 \uBAA8\uB4DC\uC5D0\uC11C \uC2E4\uD589 JobEntryHadoopJobExecutor.Configuration.Label=\uC124\uC815 JobEntryHadoopJobExecutor.Error.JarDoesNotExist=\uC9C0\uC815\uD55C jar [{0}] \uD30C\uC77C\uC774 \uC874\uC7AC\uD558\uC9C0 \uC54A\uC2B5\uB2C8\uB2E4. JobEntryHadoopJobExecutor.JarUrl.Browse=\uCC3E\uC544\uBCF4\uAE30... JobEntryHadoopJobExecutor.JarUrl.Label=Jar: JobEntryHadoopJobExecutor.ModeAdvanced.Label=\uACE0\uAE09 JobEntryHadoopJobExecutor.ModeAdvanced.Logging.Interval.Label=\uB85C\uAE45 \uC8FC\uAE30 JobEntryHadoopJobExecutor.ModeAdvanced.NumMapTasks.Label=Mapper \uD0DC\uC2A4\uD06C \uC218: JobEntryHadoopJobExecutor.ModeAdvanced.NumReduceTasks.Label=Reducer \uD0DC\uC2A4\uD06C \uC218: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.HDFSHostname.Label=HDFS \uD638\uC2A4\uD2B8: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.HDFSPort.Label=HDFS \uD3EC\uD2B8: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.InputPath.Label=Input \uACBD\uB85C: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.JobTrackerHostname.Label=Job Tracker \uD638\uC2A4\uD2B8: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.JobTrackerPort.Label=Job Tracker \uD3EC\uD2B8: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.Label=\uD074\uB7EC\uC2A4\uD130 JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.OutputPath.Label=Output \uACBD\uB85C: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.Paths.WorkingDirectory.Label=\uC791\uC5C5 \uB514\uB809\uD1A0\uB9AC: JobEntryHadoopJobExecutor.ModeAdvanced.Tab.UserDefined.Label=\uC0AC\uC6A9\uC790 \uC815\uC758 JobEntryHadoopJobExecutor.ModeAdvanced.Tab.UserDefined.NameColumn.Label=\uC774\uB984 JobEntryHadoopJobExecutor.ModeAdvanced.Tab.UserDefined.ValueColumn.Label=\uAC12 JobEntryHadoopJobExecutor.ModeSimple.CommandLineArguments.Label=\uBA85\uB839\uD589 \uC778\uC790: JobEntryHadoopJobExecutor.Name.Label =Hadoop Job \uC774\uB984: JobEntryHadoopJobExecutor.ResolvedJar =jar \uACBD\uB85C \uC0AC\uC6A9: {0} JobEntryHadoopJobExecutor.SimpleMode =Hadoop Job\uC744 \uC2EC\uD50C \uBAA8\uB4DC\uC5D0\uC11C \uC2E4\uD589 ================================================ FILE: kettle-plugins/mapreduce/core/src/main/resources/org/pentaho/big/data/kettle/plugins/mapreduce/entry/pmr/messages/messages_en_US.properties ================================================ HadoopTransJobExecutorPlugin.Name=Pentaho MapReduce HadoopTransJobExecutorPlugin.Description=Execute Transformation Based MapReduce Jobs in Hadoop JobEntryDialog.Title=Pentaho MapReduce JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.MapReduce.Label=MapReduce JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.MapReduceMapper.Label=Mapper JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.MapReduceReducer.Label=Reducer JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.MapReduceCombiner.Label=Combiner JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.JobSetup.Label=Job Setup JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Cluster.Label=Cluster JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.UserDefined.Label=User Defined JobEntry.Name.Label=Entry Name: JobEntryHadoopTransJobExecutor.Name.Label=Hadoop job name: JobEntryHadoopTransJobExecutor.MapTrans.Label=Transformation: JobEntryHadoopTransJobExecutor.CombinerTrans.Label=Transformation: JobEntryHadoopTransJobExecutor.ReduceTrans.Label=Transformation: JobEntryHadoopTransJobExecutor.MapTrans.Browse=Browse... JobEntryHadoopTransJobExecutor.CombinerTrans.Browse=Browse... JobEntryHadoopTransJobExecutor.ReduceTrans.Browse=Browse... JobEntryHadoopTransJobExecutor.MapInputStepName.Label=Input step name: JobEntryHadoopTransJobExecutor.MapOutputStepName.Label=Output step name: JobEntryHadoopTransJobExecutor.CombinerInputStepName.Label=Input step name: JobEntryHadoopTransJobExecutor.CombinerOutputStepName.Label=Output step name: JobEntryHadoopTransJobExecutor.ReduceInputStepName.Label=Input step name: JobEntryHadoopTransJobExecutor.ReduceOutputStepName.Label=Output step name: JobEntryHadoopTransJobExecutor.ModeAdvanced.Blocking.Label=Enable blocking JobEntryHadoopTransJobExecutor.ModeAdvanced.Logging.Interval.Label=Logging interval: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.SuppressMapOutputKey.Label=Ignore output of map key JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.SuppressMapOutputValue.Label=Ignore output of map value JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.SuppressOutputKey.Label=Ignore output of reduce key JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.SuppressOutputValue.Label=Ignore output of reduce value JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Classes.MapOutputKeyClass.Label=Map Output Key Class: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Classes.MapOutputValueClass.Label=Map Output Value Class: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Classes.OutputKeyClass.Label=Output Key Class: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Classes.OutputValueClass.Label=Output Value Class: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Classes.InputFormat.Label=Input format: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Classes.OutputFormat.Label=Output format: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.WorkingDirectory.Label=Working Directory: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.HDFSHostname.Label=HDFS Hostname: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.HDFSPort.Label=HDFS Port: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.JobTrackerHostname.Label=Job Tracker Hostname: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.JobTrackerPort.Label=Job Tracker Port: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.InputPath.Label=Input path: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.OutputPath.Label=Output path: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.NamedCluster.Label=Hadoop cluster: JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.NamedCluster.Edit=Edit... JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.NamedCluster.New=New... JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.Paths.CleanOutputPath.Label=Remove output path before job JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.UserDefined.NameColumn.Label=Name JobEntryHadoopTransJobExecutor.ModeAdvanced.Tab.UserDefined.ValueColumn.Label=Value JobEntryHadoopTransJobExecutor.ResolvedJar=Using jar path: {0} JobEntryHadoopTransJobExecutor.RunningPercent=Setup Complete: {0} Mapper Completion: {1} Reducer Completion: {2} JobEntryHadoopTransJobExecutor.SimpleMode=Running Hadoop Job in Simple Mode JobEntryHadoopTransJobExecutor.AdvancedMode=Running Hadoop Job in Advanced Mode JobEntryHadoopTransJobExecutor.ModeAdvanced.NumMapTasks.Label=Number of mapper tasks: JobEntryHadoopTransJobExecutor.ModeAdvanced.NumReduceTasks.Label=Number of reducer tasks: JobEntryHadoopTransJobExecutor.FailedToOpenLogFile=Unable to open file appender for file [{0}],{1} JobEntryHadoopTransJobExecutor.TaskDetails=[{0}] -- Task: {1} Attempt: {2} Event: {3} {4} JobEntryHadoopTransJobExecutor.GroupBox.MapLabel=Map JobEntryHadoopTransJobExecutor.GroupBox.CombinerLabel=Combiner JobEntryHadoopTransJobExecutor.GroupBox.ReduceLabel=Reduce JobEntryHadoopTransJobExecutor.StorageType.Label=Look in: JobEntryHadoopTransJobExecutor.StorageType.Local=Local JobEntryHadoopTransJobExecutor.StorageType.Repository.Location=Repository by name JobEntryHadoopTransJobExecutor.StorageType.Repository.Reference=Repository by reference JobEntryHadoopTransJobExecutor.JobEntryName.Error=Job Entry name missing. JobEntryHadoopTransJobExecutor.NamedClusterNotProvided.Error=Hadoop cluster not selected. JobEntryHadoopTransJobExecutor.NamedClusterPropertyMissing.Error=Selected Hadoop cluster is missing required settings. JobEntryHadoopTransJobExecutor.HadoopJobName.Error=Hadoop Job name missing. JobEntryHadoopTransJobExecutor.NumReduceTasks.Error=Number of reducer tasks must be 0 or greater. JobEntryHadoopTransJobExecutor.NumMapTasks.Error=Number of map tasks must be 0 or greater. JobEntryHadoopTransJobExecutor.NoMapOutputKeyDefined.Error=No output key field defined for the mapper transformation JobEntryHadoopTransJobExecutor.NoMapOutputValueDefined.Error=No output value field defined for the mapper transformation JobEntryHadoopTransJobExecutor.NoOutputKeyDefined.Error=No output key field defined for the reducer transformation JobEntryHadoopTransJobExecutor.NoOutputValueDefined.Error=No output value field defined for the reducer transformation JobEntryHadoopTransJobExecutor.MapConfiguration.Error=Error in mapper configuration JobEntryHadoopTransJobExecutor.CombinerConfiguration.Error=Error in combiner configuration JobEntryHadoopTransJobExecutor.ReducerConfiguration.Error=Error in reducer configuration JobEntryHadoopTransJobExecutor.Message.DistroConfigMessage=Configuring for Hadoop distribution: {0} JobEntryHadoopTransJobExecutor.Message.MapOutputKeyMessage=Using {0} for the map output key JobEntryHadoopTransJobExecutor.Message.MapOutputValueMessage=Using {0} for the map output value JobEntryHadoopTransJobExecutor.Message.OutputKeyMessage=Using {0} for the output key JobEntryHadoopTransJobExecutor.Message.OutputValueMessage=Using {0} for the output value Dialog.Accept=OK Dialog.Cancel=Cancel Dialog.Error=Error Dialog.Help=Help HelpImage.Url=help_web.png JobEntryHadoopTransJobExecutor.ReduceSingleThreaded.Label=Use single threaded transformation engine JobEntryHadoopTransJobExecutor.CombinerSingleThreaded.Label=Use single threaded transformation engine JobEntryHadoopTransJobExecutor.ReferencedObject.Mapper=Mapper JobEntryHadoopTransJobExecutor.ReferencedObject.Combiner=Combiner JobEntryHadoopTransJobExecutor.ReferencedObject.Reducer=Reducer ================================================ FILE: kettle-plugins/mapreduce/core/src/main/resources/org/pentaho/big/data/kettle/plugins/mapreduce/step/enter/messages/messages_en_US.properties ================================================ HadoopEnterPlugin.Name=MapReduce input HadoopEnterPlugin.Description=Enter a Hadoop Mapper or Reducer transformation StepConfigruationDialog.Title=MapReduce input Step.Name.Label=Step name HadoopEnter.InKey.Label=Key field HadoopEnter.InValue.Label=Value field HadoopEnter.Type.Label=Type HadoopEnter.Length.Label=Length HadoopEnter.Precision.Label=Precision Dialog.Accept=OK Dialog.Cancel=Cancel Dialog.Help=Help HadoopEnter.Error.ParseInteger=The text {0} could not be parsed as an integer HadoopEnterPlugin.Injection.KEY_TYPE=The data type of the key field. HadoopEnterPlugin.Injection.KEY_LENGTH=The length of the key field. HadoopEnterPlugin.Injection.KEY_PRECISION=Specify how many digits after a decimal will be used for the key field. HadoopEnterPlugin.Injection.VALUE_TYPE=The data type of the value field. HadoopEnterPlugin.Injection.VALUE_LENGTH=The length of the value field. HadoopEnterPlugin.Injection.VALUE_PRECISION=Specify how many digits after a decimal will be used for the value field. ================================================ FILE: kettle-plugins/mapreduce/core/src/main/resources/org/pentaho/big/data/kettle/plugins/mapreduce/step/exit/messages/messages_en_US.properties ================================================ HadoopExitPlugin.Name=MapReduce output HadoopExitPlugin.Description=Exit a Hadoop Mapper or Reducer transformation StepConfigruationDialog.Title=MapReduce output Step.Name.Label=Step name Dialog.Accept=OK Dialog.Cancel=Cancel Dialog.Help=Help HadoopExit.OutKey.Label=Key field HadoopExit.OutValue.Label=Value field HadoopExit.Linenr=Linenr {0} Error.InvalidKeyField=Key field does not exist on input stream: \"{0}\". Error.InvalidValueField=Value field does not exist on input stream: \"{0}\". HadoopExitPlugin.Injection.KEY_FIELD=The name of the key field. HadoopExitPlugin.Injection.VALUE_FIELD=The name of the value field. ================================================ FILE: kettle-plugins/mapreduce/core/src/main/resources/org/pentaho/big/data/kettle/plugins/mapreduce/ui/entry/JobEntryHadoopJobExecutorDialog.xul ================================================