Repository: linuxserver/davos Branch: master Commit: fd3a46bffa8d Files: 184 Total size: 424.6 KB Directory structure: gitextract_1q48vioe/ ├── .gitignore ├── LICENSE ├── README.md ├── build.gradle ├── conf/ │ ├── local/ │ │ ├── application.properties │ │ └── log4j2.xml │ └── release/ │ ├── application.properties │ └── log4j2.xml ├── docs/ │ ├── Makefile │ ├── make.bat │ └── source/ │ ├── conf.py │ ├── developers/ │ │ └── index.rst │ ├── faq/ │ │ └── index.rst │ ├── guides/ │ │ ├── appsettings.rst │ │ ├── gettingstarted/ │ │ │ ├── hosts.rst │ │ │ ├── index.rst │ │ │ └── schedules.rst │ │ ├── index.rst │ │ └── installation.rst │ ├── index.rst │ ├── reference/ │ │ ├── api.rst │ │ └── index.rst │ └── requirements.txt ├── gradle/ │ └── wrapper/ │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── src/ │ ├── cucumber/ │ │ ├── java/ │ │ │ └── io/ │ │ │ └── linuxserver/ │ │ │ └── davos/ │ │ │ └── bdd/ │ │ │ ├── ClientStepDefs.java │ │ │ ├── ScheduleStepDefs.java │ │ │ ├── ServerStepDefs.java │ │ │ └── helpers/ │ │ │ ├── FakeFTPServerFactory.java │ │ │ ├── FakeSFTPServerFactory.java │ │ │ └── Logging.java │ │ └── resources/ │ │ ├── Client.feature │ │ └── Schedule.feature │ ├── main/ │ │ ├── java/ │ │ │ └── io/ │ │ │ └── linuxserver/ │ │ │ └── davos/ │ │ │ ├── DavosApplication.java │ │ │ ├── Version.java │ │ │ ├── converters/ │ │ │ │ ├── Converter.java │ │ │ │ ├── HostConverter.java │ │ │ │ └── ScheduleConverter.java │ │ │ ├── delegation/ │ │ │ │ └── services/ │ │ │ │ ├── HostService.java │ │ │ │ ├── HostServiceImpl.java │ │ │ │ ├── ScheduleService.java │ │ │ │ ├── ScheduleServiceImpl.java │ │ │ │ ├── SettingsService.java │ │ │ │ └── SettingsServiceImpl.java │ │ │ ├── dto/ │ │ │ │ ├── ActionDTO.java │ │ │ │ ├── FTPFileDTO.java │ │ │ │ ├── FilterDTO.java │ │ │ │ ├── HostDTO.java │ │ │ │ ├── ScheduleDTO.java │ │ │ │ └── ScheduleProcessResponse.java │ │ │ ├── exception/ │ │ │ │ ├── HostInUseException.java │ │ │ │ ├── ScheduleAlreadyRunningException.java │ │ │ │ └── ScheduleNotRunningException.java │ │ │ ├── logging/ │ │ │ │ └── LoggingManager.java │ │ │ ├── persistence/ │ │ │ │ ├── dao/ │ │ │ │ │ ├── DefaultHostDAO.java │ │ │ │ │ ├── DefaultScheduleDAO.java │ │ │ │ │ ├── HostDAO.java │ │ │ │ │ └── ScheduleDAO.java │ │ │ │ ├── model/ │ │ │ │ │ ├── ActionModel.java │ │ │ │ │ ├── FilterModel.java │ │ │ │ │ ├── HostModel.java │ │ │ │ │ ├── ScannedFileModel.java │ │ │ │ │ └── ScheduleModel.java │ │ │ │ └── repository/ │ │ │ │ ├── HostRepository.java │ │ │ │ └── ScheduleRepository.java │ │ │ ├── schedule/ │ │ │ │ ├── RunnableSchedule.java │ │ │ │ ├── RunningSchedule.java │ │ │ │ ├── ScheduleConfiguration.java │ │ │ │ ├── ScheduleConfigurationFactory.java │ │ │ │ ├── ScheduleExecutor.java │ │ │ │ └── workflow/ │ │ │ │ ├── ConnectWorkflowStep.java │ │ │ │ ├── DisconnectWorkflowStep.java │ │ │ │ ├── DownloadFilesWorkflowStep.java │ │ │ │ ├── FilterFilesWorkflowStep.java │ │ │ │ ├── ScheduleWorkflow.java │ │ │ │ ├── WorkflowStep.java │ │ │ │ ├── actions/ │ │ │ │ │ ├── HttpAPICallAction.java │ │ │ │ │ ├── MoveFileAction.java │ │ │ │ │ ├── PostDownloadAction.java │ │ │ │ │ ├── PostDownloadExecution.java │ │ │ │ │ ├── PushbulletNotifyAction.java │ │ │ │ │ └── SNSNotifyAction.java │ │ │ │ ├── filter/ │ │ │ │ │ ├── FileFilter.java │ │ │ │ │ ├── ReferentialFileFilter.java │ │ │ │ │ └── TemporalFileFilter.java │ │ │ │ └── transfer/ │ │ │ │ ├── FTPTransfer.java │ │ │ │ ├── FilesAndFoldersTranferStrategy.java │ │ │ │ ├── FilesOnlyTransferStrategy.java │ │ │ │ ├── TransferStrategy.java │ │ │ │ └── TransferStrategyFactory.java │ │ │ ├── transfer/ │ │ │ │ └── ftp/ │ │ │ │ ├── FTPFile.java │ │ │ │ ├── FileTransferType.java │ │ │ │ ├── TransferProtocol.java │ │ │ │ ├── client/ │ │ │ │ │ ├── Client.java │ │ │ │ │ ├── ClientFactory.java │ │ │ │ │ ├── FTPClient.java │ │ │ │ │ ├── FTPSClient.java │ │ │ │ │ ├── SFTPClient.java │ │ │ │ │ └── UserCredentials.java │ │ │ │ ├── connection/ │ │ │ │ │ ├── Connection.java │ │ │ │ │ ├── ConnectionFactory.java │ │ │ │ │ ├── FTPConnection.java │ │ │ │ │ ├── SFTPConnection.java │ │ │ │ │ └── progress/ │ │ │ │ │ ├── ListenerFactory.java │ │ │ │ │ ├── ProgressListener.java │ │ │ │ │ └── SFTPProgressListener.java │ │ │ │ └── exception/ │ │ │ │ ├── ClientConnectionException.java │ │ │ │ ├── ClientDisconnectException.java │ │ │ │ ├── DeleteFileException.java │ │ │ │ ├── DownloadFailedException.java │ │ │ │ ├── FTPException.java │ │ │ │ └── FileListingException.java │ │ │ ├── util/ │ │ │ │ ├── FileStreamFactory.java │ │ │ │ ├── FileUtils.java │ │ │ │ └── PatternBuilder.java │ │ │ └── web/ │ │ │ ├── API.java │ │ │ ├── Filter.java │ │ │ ├── Host.java │ │ │ ├── Notifications.java │ │ │ ├── Pushbullet.java │ │ │ ├── SNS.java │ │ │ ├── Schedule.java │ │ │ ├── ScheduleCommand.java │ │ │ ├── Settings.java │ │ │ ├── Transfer.java │ │ │ ├── VersionChecker.java │ │ │ ├── controller/ │ │ │ │ ├── APIController.java │ │ │ │ ├── FragmentController.java │ │ │ │ ├── ViewController.java │ │ │ │ └── response/ │ │ │ │ ├── APIResponse.java │ │ │ │ └── APIResponseBuilder.java │ │ │ └── selectors/ │ │ │ ├── IntervalSelector.java │ │ │ ├── LogLevelSelector.java │ │ │ ├── MethodSelector.java │ │ │ ├── ProtocolSelector.java │ │ │ └── TransferSelector.java │ │ └── resources/ │ │ ├── static/ │ │ │ ├── browserconfig.xml │ │ │ ├── css/ │ │ │ │ └── davos.css │ │ │ ├── js/ │ │ │ │ └── davos.js │ │ │ └── manifest.json │ │ └── templates/ │ │ ├── fragments/ │ │ │ ├── api.html │ │ │ ├── filter.html │ │ │ ├── header.html │ │ │ ├── pushbullet.html │ │ │ ├── sns.html │ │ │ └── transfers.html │ │ └── v2/ │ │ ├── edit-host.html │ │ ├── edit-schedule.html │ │ ├── hosts.html │ │ ├── schedules.html │ │ └── settings.html │ └── test/ │ └── java/ │ └── io/ │ └── linuxserver/ │ └── davos/ │ ├── VersionTest.java │ ├── delegation/ │ │ └── services/ │ │ ├── ScheduleServiceImplTest.java │ │ └── SettingsServiceImplTest.java │ ├── persistence/ │ │ └── dao/ │ │ └── DefaultScheduleDAOTest.java │ ├── schedule/ │ │ ├── ScheduleConfigurationFactoryTest.java │ │ ├── ScheduleExecutorTest.java │ │ └── workflow/ │ │ ├── ConnectWorkflowStepTest.java │ │ ├── DisconnectWorkflowStepTest.java │ │ ├── DownloadFilesWorkflowStepTest.java │ │ ├── FilterFilesWorkflowStepTest.java │ │ ├── actions/ │ │ │ ├── HttpAPICallActionTest.java │ │ │ ├── MoveFileActionTest.java │ │ │ └── PushbulletNotifyActionTest.java │ │ ├── filter/ │ │ │ └── ReferentialFileFilterTest.java │ │ └── transfer/ │ │ ├── FilesAndFoldersTranferStrategyTest.java │ │ ├── FilesOnlyTransferStrategyTest.java │ │ ├── TransferStrategyFactoryTest.java │ │ └── TransferStrategyTest.java │ ├── transfer/ │ │ └── ftp/ │ │ ├── client/ │ │ │ ├── ClientFactoryTest.java │ │ │ ├── FTPClientTest.java │ │ │ ├── FTPSClientTest.java │ │ │ └── SFTPClientTest.java │ │ └── connection/ │ │ ├── FTPConnectionTest.java │ │ ├── SFTPConnectionTest.java │ │ └── progress/ │ │ ├── ListenerFactoryTest.java │ │ ├── ProgressListenerTest.java │ │ └── SFTPProgressListenerTest.java │ ├── util/ │ │ └── PatternBuilderTest.java │ └── web/ │ └── controller/ │ ├── APIControllerTest.java │ └── ViewControllerTest.java └── version.txt ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitignore ================================================ .gradle *.sw? .#* *# *~ /build /code .classpath .project .settings .metadata .factorypath .recommenders bin build lib/ target .factorypath .springBeans interpolated*.xml dependency-reduced-pom.xml build.log _site/ .*.md.html manifest.yml MANIFEST.MF settings.xml activemq-data overridedb.* *.iml *.ipr *.iws .idea .DS_Store .factorypath davos.log src/main/resources/application.properties src/main/resources/log4j2.xml .vscode/ ================================================ FILE: LICENSE ================================================ The MIT License (MIT) Copyright (c) 2015 LinuxServer.io Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ # davos [![Build Status](http://ci.linuxserver.io/buildStatus/icon?job=Software/Davos/davos_10_Unit_Tests)](http://ci.linuxserver.io/job/Software/job/Davos/job/davos_10_Unit_Tests/) [![Documentation Status](https://readthedocs.org/projects/davos/badge/?version=latest)](http://davos.readthedocs.io/en/latest) davos is an FTP download automation tool that allows you to scan various FTP servers for files you are interested in. This can be used to configure file feeds as part of a wider workflow. # Why use davos? A fair number of services still rely on "file-drops" to transport data from place to place. A common practice is to configure a cron job to periodically trigger FTP/SFTP programs to download those files. _davos_ is relatively similar, only it also adds a web UI to the whole process, making the management of these schedules easier. # How it works ## Hosts All periodic scans (Schedules) require a host to connect to. These can be added individually: ![https://raw.githubusercontent.com/linuxserver/davos/master/docs/host.png](https://raw.githubusercontent.com/linuxserver/davos/master/docs/host.png) ## Schedules Each schedule contains all of the required information pertaining to the files it is interested in. This includes the host it needs to connect to, where to look for the files, where to download them, and how often: ![https://raw.githubusercontent.com/linuxserver/davos/master/docs/schedule1.png](https://raw.githubusercontent.com/linuxserver/davos/master/docs/schedule1.png) It is also possible to limit what the schedule downloads by applying filters to each scan. _davos_ will only download files that match its list of given filters. If no filters are applied to a schedule, all files will be downloaded. Each schedule also keeps an internal record of what it scanned in the previous run, so it won't download the same file twice. ![https://raw.githubusercontent.com/linuxserver/davos/master/docs/schedule2.png](https://raw.githubusercontent.com/linuxserver/davos/master/docs/schedule2.png) Once each file has been downloaded, _davos_ can also notify you via Pushbullet, as well as sending downstream requests to other services. This is particularly useful if another service makes use of the file _davos_ has just downloaded. ![https://raw.githubusercontent.com/linuxserver/davos/master/docs/schedule3.png](https://raw.githubusercontent.com/linuxserver/davos/master/docs/schedule3.png) ## Running Finally, schedules can be started or stopped at any point, using the schedules listing UI: ![https://raw.githubusercontent.com/linuxserver/davos/master/docs/list.PNG](https://raw.githubusercontent.com/linuxserver/davos/master/docs/list.PNG) # Changelog - **2.2.2** - Updated log4j dependency to 2.16.0, accounting for CVE-2021-44228 - **2.2.1** - Fixed bug where lastRunTime got reset whenever a change was made to a schedule. - General refactoring of code, plus added unit tests. - Allow $filename resolution in URLs of API calls. - **2.2.0** - The filter pattern matcher now resolves '*' to zero or more characters, rather than one or more. - The scanned items list can now be cleared. - Added a Last Run field to the scanned items modal. - Included [readthedocs](https://davos.readthedocs.io) documentation! - Added SNS capability to notifications area - Updated FTPS connections to run over Explicit TLS, rather than Implicit SSL - This may or may not break existing schedules that use FTPS prior to 2.2.0. - Improved some areas of DEBUG logging - Schedules page now automatically updates when files are downloading - Added identity file authentication for SFTP connections - Included a version checker to help prompt users when a new version is available - **Full disclosure**: This makes a GET request to GitHub to ascertain the latest release version. - **2.1.2** - Fixed NaN bug caused by empty files (Div/0) - Fixed recursive delete issue for directories in FTP and SFTP connections. - **2.1.1** - Fixed primitive issue on Schedule model for new fields - **2.1.0** - Mandatory filtering allows schedules to only download files when at least one filter has been set. - Form validation on Hosts and Schedule pages - New theme - Inverse filtering allows schedules to download files that DO NOT match provided filters. - "Test Connection" button added to Hosts page - Schedules can now delete the remote copy of each file once the download has completed. This is separate to the Post-download actions. - New intervals: "Every minute" and "Every 5 minutes" ================================================ FILE: build.gradle ================================================ import java.util.regex.Matcher; buildscript { ext { springBootVersion = '1.4.2.RELEASE' } repositories { mavenCentral() } dependencies { classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}") classpath('io.spring.gradle:dependency-management-plugin:0.6.1.RELEASE') } } plugins { id "com.github.samueltbrown.cucumber" version "0.9" } apply plugin: 'java' apply plugin: 'eclipse' apply plugin: 'idea' apply plugin: 'org.springframework.boot' apply plugin: 'io.spring.dependency-management' jar { baseName = 'davos' version = file('version.txt').text.trim() } sourceCompatibility = 1.8 targetCompatibility = 1.8 repositories { mavenCentral() } configurations { all*.exclude module : 'spring-boot-starter-logging' bddTestCompile testCompile.extendsFrom bddTestCompile } sourceSets { bbdTest { java { srcDir 'src/cucumber/java' } resources { srcDir 'src/cucumber/resources' } compileClasspath += test.output } } dependencies { compile 'org.springframework.boot:spring-boot-starter-web' compile 'org.springframework.boot:spring-boot-starter-thymeleaf' compile 'org.springframework.boot:spring-boot-starter-data-jpa' compile 'org.springframework.boot:spring-boot-starter-jdbc' compile 'org.apache.logging.log4j:log4j-api:2.16.0' compile 'org.apache.logging.log4j:log4j-core:2.16.0' compile 'org.apache.logging.log4j:log4j-slf4j-impl:2.16.0' compile 'com.jcraft:jsch:0.1.50' compile 'joda-time:joda-time:2.3' compile 'commons-net:commons-net:3.3' compile 'commons-io:commons-io:2.4' compile 'org.apache.commons:commons-lang3:3.4' compile 'com.amazonaws:aws-java-sdk-sns:1.11.167' runtime 'com.h2database:h2' testCompile 'org.springframework.boot:spring-boot-starter-test' testCompile 'org.assertj:assertj-core:3.2.0' testCompile 'org.mockito:mockito-all:1.9.5' testCompile 'junit:junit:4.11' bddTestCompile 'org.mockftpserver:MockFtpServer:2.6' bddTestCompile 'org.apache.sshd:sshd-core:1.4.0' bddTestCompile 'info.cukes:cucumber-java:1.2.4' cucumberCompile 'info.cukes:cucumber-java:1.2.4' } eclipse { classpath { containers.remove('org.eclipse.jdt.launching.JRE_CONTAINER') containers 'org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8' } } task wrapper(type: Wrapper) { gradleVersion = '2.14' } task updateBuildVersion << { Matcher matcher = file('version.txt').text.trim() =~ /(.+)\.(.+)\.(.+)$/ if (matcher.find()) { String major = matcher.group(1) String minor = matcher.group(2) String patch = matcher.group(3).trim() int nextBuild = Integer.valueOf(patch) + 1 String updatedVersion = "${major}.${minor}.${nextBuild}" file('version.txt').text = "${updatedVersion}\n" } } task copyConfig(type: Copy) { def location = project.hasProperty('env') ? "$env" : 'local' from "conf/${location}" into "src/main/resources/" } cucumber { jvmOptions { maxHeapSize = '512m' environment 'ENV', 'staging' } } build.dependsOn copyConfig build.mustRunAfter copyConfig ================================================ FILE: conf/local/application.properties ================================================ davos.version=2.2.2 ================================================ FILE: conf/local/log4j2.xml ================================================ ================================================ FILE: conf/release/application.properties ================================================ spring.datasource.url=jdbc:h2:file:/config/db/davos2 spring.datasource.username=sa spring.datasource.password=sa spring.datasource.driverClassName=org.h2.Driver spring.jpa.hibernate.ddl-auto=update davos.version=2.2.2 ================================================ FILE: conf/release/log4j2.xml ================================================ ================================================ FILE: docs/Makefile ================================================ # Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = python -msphinx SPHINXPROJ = davos SOURCEDIR = source BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) ================================================ FILE: docs/make.bat ================================================ @ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=python -msphinx ) set SOURCEDIR=source set BUILDDIR=build set SPHINXPROJ=davos if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The Sphinx module was not found. Make sure you have Sphinx installed, echo.then set the SPHINXBUILD environment variable to point to the full echo.path of the 'sphinx-build' executable. Alternatively you may add the echo.Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd ================================================ FILE: docs/source/conf.py ================================================ # -*- coding: utf-8 -*- # # davos documentation build configuration file, created by # sphinx-quickstart on Sat Jul 29 08:01:32 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'davos' copyright = u'2017, Josh Stark' author = u'Josh Stark' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'2.2' # The full version, including alpha/beta/rc tags. release = u'2.2.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # #html_theme = 'alabaster' import sphinx_rtd_theme html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { '**': [ 'about.html', 'navigation.html', 'relations.html', # needs 'show_related': True theme option to display 'searchbox.html', 'donate.html', ] } # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'davosdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'davos.tex', u'davos Documentation', u'Josh Stark', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'davos', u'davos Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'davos', u'davos Documentation', author, 'davos', 'One line description of project.', 'Miscellaneous'), ] ================================================ FILE: docs/source/developers/index.rst ================================================ ########## Developers ########## If you wish to contribute to davos (and help me tidy up some of its rather messy code!), you will need to be able to build and run it locally. davos is written almost completely in Java using the Spring Framework, utilising the Thymeleaf rendering engine. The project is unit and integration tested using jUnit and Cucumber JVM, respectively. ***** Setup ***** Download and install the `Java 8 JDK `_. I'd also recommend using `Spring Tool Suite (STS) `_ as it is a prebuilt version of Eclipse IDE with all of the necessary plugins installed for working with a Spring application. ******** Building ******** .. note:: You do not need to pre-install Gradle for this application as it comes with Gradle Wrapper, which does all the work for you. To build the application, use Gradle: .. code-block:: text ./gradlew clean build -Penv={release|local} This will download all necessary dependencies, run tests, then package up the application. The resulting .jar file will be in ``build/libs``. If you pass through a ``-Penv=release`` when running this command, the packaged application will use the config under ``conf/release``, which tells davos to use a file-based database. By default (i.e. if you do not pass this switch through), it will use the ``conf/local`` configuration, which makes use of an in-memory database. *************** Running the app *************** It is recommended to build the app first before running, so you know your latest changes are built: .. code-block:: text ./gradlew clean build && java -jar build/libs/davos-2.2.0.jar *********** Development *********** Classpath --------- When using Eclipse (or STS), a separate Gradle command is required in order to update the project's classpath files so Eclipse is aware of the downloaded dependencies: .. code-block:: text ./gradlew cleanEcipse eclipse Code Structure -------------- The code of davos is split in to four main sections: ``src/main/java`` The core functional code. This contains all logic for the workflow, API, connectivity, and object persistence (database). ``src/main/resources`` The front-end code, including all JavaScript, CSS, images, and Thymeleaf templates. ``src/test/java`` All unit tests for the core code ``src/cucumber/java`` Integration test code. This is separate to the main project code and does not get packaged in to the released application. Running Tests ------------- To run all unit tests, use Gradle: .. code-block:: text ./gradlew test To run all integration tests: .. code-block:: text ./gradlew cucumber Managing the version -------------------- The version of the application is referenced in three files: * ``version.txt`` in the project root directory * ``conf/local/application.properties`` as a property called ``davos.version`` * ``conf/release/application.properties`` as a property called ``davos.version`` All three of these need to be updated if you are changing the version number. ================================================ FILE: docs/source/faq/index.rst ================================================ ### FAQ ### ********************************** Can davos be used to upload files? ********************************** No, davos only downloads files. There are currently no plans on implementing the ability to upload files as this will require a rework of the schedule workflow engine. ****************************** How many schedules can I have? ****************************** There is no theoretical limit to the number of schedules you can have. davos creates an initial thread pool of 10 worker threads, but this gets extended if more than 10 schedules are created. ************************** How many hosts can I have? ************************** Unlimited. ******************************************** Are host credentials hashed in the database? ******************************************** No, all host usernames and passwords are stored in plain text in the H2 database. This is because the application needs to query the hosts table every time a schedule runs, and would have no way to compare a hash with a valid password. *************************************************** How do I use an identity file for SFTP connections? *************************************************** On the Host configuration page for your Host, make sure **Use Identity File** is checked. Then enter the absolute path of the identity file. If you're running davos in a Docker container (recommended), the value of this should be some thing like "/config/id_rsa", assuming you are using an SSH private key called "id_rsa" and have placed it in your mapped host directory on your machine. Any form of private identity is applicable, for example if your host server uses .pem files for authentication, use "/config/my_identity.pem". .. note:: Remember, davos can't see files outside of its ``/download`` and ``/config`` directories when running in a Docker container. So remember to place your identity file(s) in the mapped directory on the host (e.g. ``/home/user/davos``). *************************************************************** I've just updated davos. The application is behaving strangely. *************************************************************** Some version updates include changes to the JavaScript sources for the website side of the application. Modern browsers like Chrome tend to cache these types of sources for the sake of performance. It is likely your browser has not re-cached the latest version of the JavaScript code. To remedy this, hard-refresh the app: ``CTRL`` + ``F5``. **************************************** How can I use SNS to notify me by email? **************************************** To use SNS, you'll need an Amazon AWS Account. Once set up, you should go to **Services -> Simple Notification Service**, then **Create topic**. For Topic name, enter something like "davos-notifications", and click **Create topic**. The first thing you'll notice is that it has generated a **Topic ARN**. You'll need this for the notification configuration later. Now create a subscription to your topic by clicking on **Create subscription**, chosing "Email" as the Protocol, and your preferred email address as the Endpoint. Click **Create subscription**. You'll receive an email asking you to confirm the subscription request. Once your topic has been configured, you should create an IAM User that can publish messages to it. It is this user's credentials that davos needs to perform the publish. Go to **Services -> IAM**, then **Users**. Click **Add user**. For User name, enter something sensible, then select "Programmatic access" as the Access Type. Click **Next: Permissions**. This user should only have permission to publish to this topic, nothing more. So, under "Add user to group", click **Create group**, and then **Create policy**. .. note:: A user can be in many groups. Groups can have many policies. A policy is a set of permissions for access to various things in AWS. You should be directed to the policy creation tool. Select the Policy Generator and set the following: Effect Allow AWS Service Amazon SNS Actions Publish Amazon Resource Name (ARN) {YOUR_TOPIC_ARN} Then click **Add Statement**. You should see it added underneath. Click **Next Step**. The generated policy will be shown to you on screen (it's formatted as JSON, and contains a ``Statement`` array). Update the Policy Name to something sensible (e.g. "DavosTopicPublishAccess") then click **Create Policy**. You'll be redirected back to the IAM console, but you can close this. Go back to the previous tab and under the Filter, type in the name of the policy you just created. Select it. Now, for the Group name, give it a sensible name (e.g. DavosNotifications), and click **Create group**. The group should now be selected under the IAM user console. Click **Next: Review**, make sure you're happy, and then click **Create user**. You should see a table showing the user's Access key ID and Secret access key. You'll need these for the SNS configuration in davos, so keep them safe somewhere (you can download a .csv with the credentials in). .. warning:: The Secret access key will only be shown once in the console, so make sure you store it somewhere safe. ================================================ FILE: docs/source/guides/appsettings.rst ================================================ ############ App Settings ############ Under **Settings -> App Settings**, you can configure the log level that davos will output to its log file. ******* Logging ******* All logs are written to ``davos.log``, located in the ``/config/logs`` directory. When mapping the ``/config`` directory in the container to a host directory, logs will be made available in that host directory. The log level can be changed at any time while the application is running. The available levels are: * DEBUG * INFO * WARN * ERROR The higher the level (``DEBUG`` is lowest, ``ERROR`` is highest), the fewer logs will be written. By default, davos logs at ``INFO`` level. If you are experiencing issues with davos and wish to understand the area of failure, change the level to ``DEBUG``. Under this setting, the most logs will be written. .. warning:: When setting the log level to ``DEBUG``, any secure credentials used in connections to the FTP host, or notification systems **will** be logged. ================================================ FILE: docs/source/guides/gettingstarted/hosts.rst ================================================ ##### Hosts ##### A Host configuration provides one or more Schedules with information pertaining to the FTP server to connect to when scanning for files. They are separate to the Schedule configuration to allow multiple Schedules to use the same Host configuration without the need for having to input the same data multiple times. Under **Settings -> New Host**, you will be prompted to enter all of the relevant information. Name *[REQUIRED]* The friendly name for this Host. This is what will be visible when creating a schedule, so make it indicative of the Host you're making. Protocol Which type of connection to be made. This has no bearing on how you configure the host, but will direct davos to build the specific client when connecting. Host Address *[REQUIRED]* The IP address (or hostname) of the server. Port FTP and FTPS are usually on ``21``, while SFTP is usually on ``22``. If your server has been configured to run on a separate port, this is where you reference it. Username *[REQUIRED]* Name of the user to connect as. Password Password of the user to connect as. Use Identity File Only available when ``SFTP`` is selected. Choose this if the SFTP server requires an identity file to authenticate the user. Identity File Displayed when ``Use Identity File`` is checked, replacing the ``Password`` field. Enter the location of the file. .. note:: The location of the identity file will be relative to the container's filesystem, so should ideally be under ``/config`` as this is the directory exposed by the Docker volume mapping. It is also possible to create, manage, and delete a Host via the HTTP API. See :doc:`../../reference/api` for more details. ================================================ FILE: docs/source/guides/gettingstarted/index.rst ================================================ ############### Getting Started ############### This section aims to help you understand how davos is pieced together, and shows you how it can be configured to meet your needs. It is recommended that you follow the below guides. .. toctree:: :maxdepth: 1 hosts schedules ************ How it works ************ The Schedules in davos are powered by a basic workflow engine that runs a series of steps to ensure each run processes files properly. The order of this workflow is as follows: 1. Connect to the host. 2. List all files in the provided remote directory. 3. Filter all files in the remote directory so only the relevant ones remain. 4. Remove any files that have been previously scanned. 5. For each matched file, download it. Once downloaded, run any actions required by the schedule. 6. Store the list of scanned files against the Schedule. 7. Disconnect. There is no theoretical limit to the number of schedules you can have running at the same time, however it is advised you keep it below 10, as memory usage can become quite high. ================================================ FILE: docs/source/guides/gettingstarted/schedules.rst ================================================ ######### Schedules ######### A Schedule is the configuration that tells davos when to run, where to connect, what to look for, and what to do once it has finished downloading. Schedules are the heart of davos and are powered by its workflow engine. To create a new Schedule, go to **Settings -> New Schedule**. Schedules are split into multiple sections, each with their own part to play in the process. ******* General ******* This defines the metadata and connection information of the Schedule. The **General** section allows you to name the Schedule, as well as define how often it should run, and where files should be managed. Name *[REQUIRED]* The name of the Schedule. This should be relevant to the task this schedule is performing. E.g. "Nightly Feed" Interval How often the schedule should run. The rate at which the schedule runs begins when the schedule is started for the first time. So, if it is started at 14:05, with an interval of "Every 30 minutes", it will run again at 14:35, then 15:05, and so on. .. note:: If you change the interval for an already running Schedule, you'll need to restart it before the change takes effect. .. Host The Host configuration to use for this Schedule. It will default to the first Host in the list. You cannot create a Schedule if no Hosts have been created. Host Directory *[REQUIRED]* This is the directory on the host (relative to the connection entry point) that the Schedule should use for file scanning. Absolute paths are also compliant. Local Directory *[REQUIRED]* The directory where this schedule should place file downloads. .. note :: The local directory must be relative to the container's filesystem, so should be under ``/download``. .. Transfer Type This setting will inform the Schedule whether or not it should only download matching files (``FILE``), or if it should also scan matching directories (``RECURSIVE``). This can be useful if the server contains sub-directories that may match in a scan, but should not be downloaded. Start Automatically If checked, the Schedule will automatically start when davos is started. Useful if you have a restart policy enabled in Docker and your machine requires a restart. ********* Filtering ********* This is a process that allows you to narrow down file scanning so only relevant files are processed. Filters can be exceptionally useful for host directories that are used by multiple processes or contain large numbers of files. Mandatory If checked, the Schedule will only consider scanning files if at least one filter has been defined. If checked and no filters are defined, nothing will be scanned, so nothing will be downloaded. Invert The default behaviour is to match all files on the host with the defined filters. Checking this option will invert that behaviour, so all files *not* matching the defined filters will be downloaded. Filters A list of strings that will be used to scan the host directory. Each file on the host is compared to this list - if it matches at least one filter, it will be downloaded. Filters can also be wildcarded using ``?`` (single character) and ``*`` (multiple characters). For example, for a file called "my_file_name.txt": .. code-block:: text my?file?name.txt = MATCH my*name.txt = MATCH my_file.name.txt = NO MATCH *file_name* = MATCH *file_name = NO MATCH *************** File Management *************** davos also provides a way to tidy processed files upon completion. You can choose to either delete the file remotely once downloaded (effectively making it a *move* operation), and you can also move the file locally. Delete from Host If checked, all matched and downloaded files will be deleted from the Host. This logic will run after each individual download has completed. .. warning :: If the FTP user does not have permission to delete files on the Host, this step will fail and the Schedule will cancel the current run. A future run of the Schedule will skip all files previously scanned. .. Move Downloaded File The location to move each successfully downloaded file. This will occur after each individual download has completed. A common use-case for this feature is to separate in-progress files with completed files (i.e. ``/download/doing`` and ``/download/done``). .. note :: The "move to" directory must be relative to the container's filesystem, so should be under ``/download``. Advanced users may create additional volume mappings if need be. .. note :: If davos is unable to move the file, it will remain in its originating directory, and will continue on to the Schedule's next step without failure. ****************** Downstream Actions ****************** One of the unique aspects of davos in respect to FTP management is its ability to create hooks in to other applications that may be interested in the downloaded files. This may be useful when the download action is part of a wider workflow that must be continued outside of the scope of davos. Actions defined against a Schedule will run for each individually downloaded file *after* the File Management step previously mentioned has run. There are two types of Downstream Action: *Notifications* and *API Calls*. Notifications ============= Notifications are useful if you'd like to know whenever davos has successfully downloaded a file. Generally speaking, no further action is taken after a notification is sent, but SNS may be configured to include a subscriber to a topic that performs a further action. .. note:: There is no limit to the number of notifications you can have. Pushbullet ---------- You will need an account with `Pushbullet `_ in order to use this feature. In your Pushbullet account, create an Access Token. Access Token Your Pushbullet account's access token. This will be used to authenticate notification push requests to the Pushbullet API. Amazon SNS ------------------------- You will need an `Amazon AWS `_ account to use this feature. Topic Arn The Amazon Resource Name for an SNS Topic created under your AWS account. This will be the topic that notifications are sent to. Region The region that the topic was created under. While regions are not mandatory for Topic Arns, this will be used to authenticate your account and create an SNS client in the correct region. Access Key The access key for an IAM User under your AWS account. Secret Access Key The second half of authentication with AWS. This is the secret key for the same IAM User. .. warning:: Be careful with IAM User permissions! You should create a new IAM User with permissions only to publish messages to your notification topic, nothing more! See :doc:`../../faq/index` for more details on best practice regarding IAM Users. API Calls ========= API Calls are a great way to create hooks in to other applications via their own HTTP API. URL The URL of the API you wish to call Method Available options are *GET*, *POST*, *PUT* and *DELETE* Content-Type Informs the target API what type of body you're sending (if any), e.g. "application/json" Message Body The request payload being sent to the target API .. note:: If you need to reference the downloaded file in an HTTP request, use **$filename**. This will resolve to the file or folder that was matched and subsequently downloaded. ================================================ FILE: docs/source/guides/index.rst ================================================ ###### Guides ###### This section will run you through the aspects of the application itself, including installation, first time use, and the concept of schedules (what they consist of), hosts, and how they tie together. .. toctree:: :maxdepth: 1 installation gettingstarted/index appsettings ================================================ FILE: docs/source/guides/installation.rst ================================================ ############ Installation ############ .. note :: davos has been written with `Docker `_ at the forefront regarding installation and deployment. This means that you should consider using the pre-built Docker image that `LinuxServer have provided `_ for this application. *********** With Docker *********** This is the recommended method of installation and deployment. Install Docker -------------- Firstly, you'll need to install `Docker `_, a container engine that is used to fire up user-space virtual containers. I recommend using `Docker's official guide `_ on installing the latest version of Docker CE on your machine, as the steps differ depending on your platform. Build the container =================== Create a new container from LinuxServer's image. .. code-block:: text docker create \ --name=davos \ -v :/config \ -v :/download -e PGID= -e PUID= \ -p 8080:8080 \ linuxserver/davos Params ------ * ```` The folder on your machine where davos will place its configuration and log files. Typically this will be somewhere like ``/home/me/davos``, but it can be anywhere. * ```` The folder on your machine that davos can download files to. This is the volume mount point that davos is aware of for all file downloads. * ```` The id of the user you'd like davos to run as. All files downloaded by davos will be owned by this user. * ```` The id of the group you'd like to attribute to the user davos runs as. All files downloaded by davos will be owned by this group. .. warning:: Docker will run all containers as ``root`` by default. Omitting ``PUID`` and ``PGID`` is not recommended. Run the container ================= Once the container has been created, you can run it. .. code-block:: text docker start davos After about 30 seconds, the application will be running and will be accessible on ``http://localhost:8080``. If you are running davos on a remote server, substitute ``localhost`` with the server's IP address. ************** Without Docker ************** This is not the recommended method of installation and deployment, but has the potential for being the most configurable and flexible. Davos does not have any prebuilt binaries, so you'll need to get the source and build it yourself (another reason to use Docker instead). Get the source -------------- .. code-block:: text wget https://github.com/linuxserver/davos/archive/LatestRelease.zip unzip LatestRelease.zip -d davos Configure the application ------------------------- By default, davos is configured to place all of its configuration in ``/config``, which may not be preferable if you're running the application on bare metal. Firstly, reconfigure davos to use your own defined directory for its database. In ``conf/release/application.properties``, change ``spring.datasource.url``, e.g.: .. code-block:: text spring.datasource.url=jdbc:h2:file:/home/me/davos You'll also need to do the same in ``conf/release/log4j2.xml``, this time for the appender: .. code-block:: xml Build davos ----------- .. note:: davos requires the `Java 8 SDK `_ to build. Once you've updated the configuration locations, you can build the binary. .. code-block:: text ./gradlew build -Penv=release This will create "davos-|release|.jar" in ``build/libs``. You should move this somewhere more fitting for an executable (``/var/lib``, for example). It may also be worth renaming the .jar to "davos.jar", although this is not necessary. Run davos --------- .. note:: davos requires the `Java 8 JRE `_ to build. This is not required if you already have the SDK installed. To run the application, run the following command: .. code-block:: text java -jar davos.jar ================================================ FILE: docs/source/index.rst ================================================ ############################## davos: FTP Download Automation ############################## This is the documentation for `davos `_, a web-based tool for automating and managing file downloads over FTP, FTPS and SFTP. Davos was born from the idea that even today, FTP still has relevance in many different markets, but there weren't many web-based solutions that provided an easy way to manage the movement of files (outside of a command line cron job) from one place to another. For those new to davos, look through the :doc:`guides/installation` and :doc:`guides/gettingstarted/index` guides. They will run you though how to get and set up davos for the first time. davos also provides a basic HTTP API that can be used to hook in to the application to manage things like schedules, hosts, filters, and even to stop or start individual schedules. .. toctree:: :maxdepth: 2 :caption: Contents guides/index reference/index faq/index developers/index ================================================ FILE: docs/source/reference/api.rst ================================================ ### API ### davos provides an HTTP API that exposes Schedules and Hosts so they can be managed outside the scope of the web application. This API is also used by the web application's AJAX calls. .. warning:: This API is completely unauthenticated, so anyone on your network can use this ********* /schedule ********* POST ---- Creates a single Schedule. .. code-block:: text POST /api/v2/schedule HTTP 1.0 Host: localhost:8080 Content-Type: application/json Accept: application/json { "name": String, "interval": Integer, "host": Integer, "hostDirectory": String, "localDirectory": String, "transferType": String [ FILE | RECURSIVE ], "automatic": Boolean, "moveFileTo": String, "filtersMandatory": Boolean, "invertFilters": Boolean, "deleteHostFile": Boolean, "filters": [ { "value": String } ], "notifications": { "pushbullet": [ { "apiKey": String } ], "sns": [ { "topicArn": String, "region": String, "accessKey": String, "secretAccessKey": String } ] }, "apis": [ { "url": String, "method": String [ POST | GET | PUT | DELETE ], "contentType": String, "body": String } ] } For more information regarding what each field represents, see the :doc:`../guides/gettingstarted/schedules` documentation in :doc:`../guides/gettingstarted/index`. Response ======== See: :ref:`Schedule Response Syntax `. ************** /schedule/{id} ************** GET --- Retrieves a single Schedule based on the supplied ``{id}``. .. code-block:: text GET /api/v2/schedule/{id} HTTP 1.0 Host: localhost:8080 Accept: application/json Response ======== See: :ref:`Schedule Response Syntax `. PUT --- Updates a single Schedule based on the given ``{id}``. All fields must be supplied, even if only a subset is being updated. Use a GET to first obtain the most up-to-date payload before performing a PUT. .. code-block:: text PUT /api/v2/schedule/{id} HTTP 1.0 Host: localhost:8080 Content-Type: application/json Accept: application/json { "name": String, "interval": Integer, "host": Integer, "hostDirectory": String, "localDirectory": String, "transferType": String [ FILE | RECURSIVE ], "automatic": Boolean, "moveFileTo": String, "filtersMandatory": Boolean, "invertFilters": Boolean, "deleteHostFile": Boolean, "filters": [ { "id": Integer, "value": String } ], "notifications": { "pushbullet": [ { "id": Integer, "apiKey": String } ], "sns": [ { "id": Integer, "topicArn": String, "region": String, "accessKey": String, "secretAccessKey": String } ] }, "apis": [ { "url": String, "method": String [ POST | GET | PUT | DELETE ], "contentType": String, "body": String } ] } .. note:: If you are updating a listed object, you must provide the object's ``id``. If you do not, the API will remove the old reference and create a new one. To add a new item to the list, provide the new item (without an ``id``) alongside the existing one. Response ======== See: :ref:`Schedule Response Syntax `. DELETE ------ Deletes a single Schedule with the given ``{id}``. .. code-block:: text DELETE /api/v2/schedule/{id} HTTP 1.0 Host: localhost:8080 Accept: application/json Response ======== .. code-block:: javascript { "status": String [ OK | Failed ], "body": String } *************************** /schedule/{id}/scannedFiles *************************** DELETE ------ Clears all items in the given Schedule's ``lastScannedFiles``. .. code-block:: text DELETE /api/v2/schedule/{id}/scannedFiles HTTP 1.0 Host: localhost:8080 Accept: application/json Response ======== .. code-block:: javascript { "status": String [ OK | Failed ], "body": String } ********************** /schedule/{id}/execute ********************** POST ---- Starts/Stops an existing Schedule. .. code-block:: text POST /api/v2/schedule/{id}/execute Host: localhost:8080 Content-Type: application/json Accept: application/json { "command": String [ START | STOP ] } Response ======== .. code-block:: javascript { "status": String [ OK | Failed ], "body": String } ***** /host ***** POST ---- Creates a new Host. .. code-block:: text POST /api/v2/host Host: localhost:8080 Content-Type: application/json Accept: application/json { "name": String, "address": String, "port": Integer, "protocol": String [ FTP | FTPS | SFTP ], "username": String, "password": String, "identityFile": String, "identityFileEnabled": Boolean } .. note:: If ``identityFileEnabled`` is set to TRUE, you must also provide ``identityFile``, otherwise provide ``password``. ********** /host/{id} ********** GET --- Retrieves a single Host based on the given ``{id}``. .. code-block:: text GET /api/v2/host/{id} Host: localhost:8080 Accept: application/json Response ======== See: :ref:`Host Response Syntax `. PUT --- Updates a Host with the given ``{id}``. .. code-block:: text POST /api/v2/host/{id} Host: localhost:8080 Content-Type: application/json Accept: application/json { "name": String, "address": String, "port": Integer, "protocol": String [ FTP | FTPS | SFTP ], "username": String, "password": String, "identityFile": String, "identityFileEnabled": Boolean } .. note:: If ``identityFileEnabled`` is set to TRUE, you must also provide ``identityFile``, otherwise provide ``password``. Response ======== See: :ref:`Host Response Syntax `. DELETE ------ Deletes a single Host with the given ``{id}``. .. code-block:: text DELETE /api/v2/host/{id} HTTP 1.0 Host: localhost:8080 Accept: application/json Response ======== .. code-block:: javascript { "status": String [ OK | Failure ], "body": String } .. warning:: If the Host you are attempting to delete is being used by an active Schedule, the DELETE call will fail. *************** /testConnection *************** POST ---- Allows you to assert whether or not the provided payload contains valid Host information. .. code-block:: text POST /api/v2/testConnection Host: localhost:8080 Content-Type: application/json { "id": Integer, "name": String, "address": String, "port": Integer, "protocol": String [ FTP | FTPS | SFTP ], "username": String, "password": String, "identityFile": String, "identityFileEnabled": Boolean } Response ======== .. code-block:: javascript { "status": String [ OK | Failed ], "body": String } ************* /settings/log ************* POST ---- Changes the logging level of the application's core code. Unlike other POST calls, there is no payload body. The level is passed in as a request parameter. level The level to change the logging to. Available options are DEBUG, INFO, WARN, ERROR, FATAL .. code-block:: text POST /api/v2/settings/log?level={LEVEL} Host: localhost:8080 Accept: application/json Response ======== .. code-block:: javascript { "status": String [ OK | Failed ], "body": String } ********* Responses ********* .. _schedule-response: Schedule Response Syntax ------------------------ .. code-block:: javascript { "status": String [ OK ], "body": { "id": Integer, "name": String, "interval": Integer, "host": Integer, "hostDirectory": String, "localDirectory": String, "transferType": String [ FILE | RECURSIVE ], "automatic": Boolean, "moveFileTo": String, "running": Boolean, "filtersMandatory": Boolean, "invertFilters": Boolean, "lastRunTime": String, "deleteHostFile": Boolean, "lastScannedFiles": [ String ], "filters": [ { "id": Integer, "value": String } ], "notifications": { "pushbullet": [ { "id": Integer, "apiKey": String } ], "sns": [ { "id": Integer, "topicArn": String, "region": String, "accessKey": String, "secretAccessKey": String } ] }, "transfers": [ { "fileName": String, "fileSize": Integer, "directory": Boolean, "progress": { "percentageComplete": Double, "transferSpeed": Double }, "status": String [ DOWNLOADING | SKIPPED | PENDING | FINISHED ] } ], "apis": [ { "id": Integer, "url": String, "method": String [ POST | GET | PUT | DELETE ], "contentType": String, "body": String } ] } } .. note:: ``running``, ``lastScannedFiles``, ``lastRunTime`` and ``transfers`` are immutable metadata fields and can't be used in PUT or POST requests. If supplied, they will be ignored. .. host References the ``id`` of the linked host. running Descibes whether or not the Schedule is running. lastRunTime The time recorded when the Schedule last *finished* running. lastScannedFiles A list of Strings that represent the files/folders found in the last run of the schedule. transfers A list of transfer objects that describe all files being actioned. This list will only be populated when the Schedule is running and is actively downloading. .. _host-response: Host Response Syntax -------------------- Success ======= .. code-block:: javascript { "status": String [ OK ], "body": { "id": Integer, "name": String, "address": String, "port": Integer, "protocol": String [ FTP | FTPS | SFTP ], "username": String, "password": String, "identityFile": String, "identityFileEnabled": Boolean } } Failure ======= .. code-block:: javascript { "status": String [ Failed ], "body": String } ================================================ FILE: docs/source/reference/index.rst ================================================ ######### Reference ######### .. toctree:: :maxdepth: 1 api ================================================ FILE: docs/source/requirements.txt ================================================ sphinx_rtd_theme ================================================ FILE: gradle/wrapper/gradle-wrapper.properties ================================================ #Fri Nov 11 19:22:20 GMT 2016 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists distributionUrl=https\://services.gradle.org/distributions/gradle-2.14-bin.zip ================================================ FILE: gradlew ================================================ #!/usr/bin/env bash ############################################################################## ## ## Gradle start up script for UN*X ## ############################################################################## # Attempt to set APP_HOME # Resolve links: $0 may be a link PRG="$0" # Need this for relative symlinks. while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG=`dirname "$PRG"`"/$link" fi done SAVED="`pwd`" cd "`dirname \"$PRG\"`/" >/dev/null APP_HOME="`pwd -P`" cd "$SAVED" >/dev/null APP_NAME="Gradle" APP_BASE_NAME=`basename "$0"` # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS="" # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD="maximum" warn ( ) { echo "$*" } die ( ) { echo echo "$*" echo exit 1 } # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false case "`uname`" in CYGWIN* ) cygwin=true ;; Darwin* ) darwin=true ;; MINGW* ) msys=true ;; NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else JAVACMD="java" which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi # Increase the maximum file descriptors if we can. if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then MAX_FD_LIMIT=`ulimit -H -n` if [ $? -eq 0 ] ; then if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then MAX_FD="$MAX_FD_LIMIT" fi ulimit -n $MAX_FD if [ $? -ne 0 ] ; then warn "Could not set maximum file descriptor limit: $MAX_FD" fi else warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" fi fi # For Darwin, add options to specify how the application appears in the dock if $darwin; then GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" fi # For Cygwin, switch paths to Windows format before running java if $cygwin ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` JAVACMD=`cygpath --unix "$JAVACMD"` # We build the pattern for arguments to be converted via cygpath ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` SEP="" for dir in $ROOTDIRSRAW ; do ROOTDIRS="$ROOTDIRS$SEP$dir" SEP="|" done OURCYGPATTERN="(^($ROOTDIRS))" # Add a user-defined pattern to the cygpath arguments if [ "$GRADLE_CYGPATTERN" != "" ] ; then OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" fi # Now convert the arguments - kludge to limit ourselves to /bin/sh i=0 for arg in "$@" ; do CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` else eval `echo args$i`="\"$arg\"" fi i=$((i+1)) done case $i in (0) set -- ;; (1) set -- "$args0" ;; (2) set -- "$args0" "$args1" ;; (3) set -- "$args0" "$args1" "$args2" ;; (4) set -- "$args0" "$args1" "$args2" "$args3" ;; (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; esac fi # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules function splitJvmOpts() { JVM_OPTS=("$@") } eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" ================================================ FILE: gradlew.bat ================================================ @if "%DEBUG%" == "" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @rem @rem ########################################################################## @rem Set local scope for the variables with windows NT shell if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 if "%DIRNAME%" == "" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. set DEFAULT_JVM_OPTS= @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 if "%ERRORLEVEL%" == "0" goto init echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. echo. echo Please set the JAVA_HOME variable in your environment to match the echo location of your Java installation. goto fail :findJavaFromJavaHome set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe if exist "%JAVA_EXE%" goto init echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% echo. echo Please set the JAVA_HOME variable in your environment to match the echo location of your Java installation. goto fail :init @rem Get command-line arguments, handling Windows variants if not "%OS%" == "Windows_NT" goto win9xME_args if "%@eval[2+2]" == "4" goto 4NT_args :win9xME_args @rem Slurp the command line arguments. set CMD_LINE_ARGS= set _SKIP=2 :win9xME_args_slurp if "x%~1" == "x" goto execute set CMD_LINE_ARGS=%* goto execute :4NT_args @rem Get arguments from the 4NT Shell from JP Software set CMD_LINE_ARGS=%$ :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar @rem Execute Gradle "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% :end @rem End local scope for the variables with windows NT shell if "%ERRORLEVEL%"=="0" goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 exit /b 1 :mainEnd if "%OS%"=="Windows_NT" endlocal :omega ================================================ FILE: src/cucumber/java/io/linuxserver/davos/bdd/ClientStepDefs.java ================================================ package io.linuxserver.davos.bdd; import static org.assertj.core.api.Assertions.assertThat; import java.io.File; import java.util.List; import org.apache.commons.io.FileUtils; import cucumber.api.java.After; import cucumber.api.java.en.Then; import cucumber.api.java.en.When; import io.linuxserver.davos.bdd.helpers.FakeFTPServerFactory; import io.linuxserver.davos.bdd.helpers.FakeSFTPServerFactory; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.client.Client; import io.linuxserver.davos.transfer.ftp.client.FTPClient; import io.linuxserver.davos.transfer.ftp.client.SFTPClient; import io.linuxserver.davos.transfer.ftp.client.UserCredentials; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.transfer.ftp.connection.progress.ProgressListener; public class ClientStepDefs { private static final String TMP = FileUtils.getTempDirectoryPath(); private Connection connection; private Client client; private ProgressListener progressListener; @After("@Client") public void after() { client.disconnect(); } @When("^davos connects to the server$") public void davos_connects_to_the_server() throws Throwable { client = new FTPClient(); client.setCredentials(new UserCredentials("user", "password")); client.setHost("localhost"); client.setPort(FakeFTPServerFactory.getPort()); connection = client.connect(); } @When("^davos connects to the SFTP server$") public void davos_connects_to_the_SFTP_server() throws Throwable { client = new SFTPClient(); client.setCredentials(new UserCredentials("user", "password")); client.setHost("localhost"); client.setPort(FakeSFTPServerFactory.getPort()); connection = client.connect(); } @When("^deletes an SFTP directory$") public void deletes_an_SFTP_directory() throws Throwable { connection.deleteRemoteFile(new FTPFile("toDelete", 0, "/", 0, true)); } @Then("^the SFTP directory is deleted on the server$") public void the_SFTP_directory_is_deleted_on_the_server() throws Throwable { assertThat(new File(TMP + "/toDelete").exists()).isFalse(); } @Then("^listing the files will show the correct files$") public void listing_the_files_will_show_the_correct_files() throws Throwable { List files = connection.listFiles("/tmp"); assertThat(files).hasSize(3); assertThat(files.get(0).getName()).isEqualTo("file3.txt"); assertThat(files.get(1).getName()).isEqualTo("file2.txt"); assertThat(files.get(2).getName()).isEqualTo("file1.txt"); } @When("^downloads a file$") public void downloads_a_file() throws Throwable { connection.download(new FTPFile("file2.txt", "hello world".getBytes().length, "/tmp/", 0, false), TMP); } @Then("^the file is located in the specified local directory$") public void the_file_is_located_in_the_specified_local_directory() throws Throwable { File file = new File(TMP + "/file2.txt"); assertThat(file.exists()).isTrue(); file.delete(); } @When("^initialises a Progress Listener for that connection$") public void initialises_a_Progress_Listener_for_that_connection() throws Throwable { progressListener = new CountingFTPProgressListener(); connection.setProgressListener(progressListener); } @Then("^the Progress Listener will have its values updated$") public void the_Progress_Listener_will_have_its_values_updated() throws Throwable { assertThat(progressListener.getProgress()).isEqualTo(100); assertThat(((CountingFTPProgressListener) progressListener).getTimesCalled()).isEqualTo(11); } @When("^deletes a directory$") public void deletes_a_directory() throws Throwable { connection.deleteRemoteFile(new FTPFile("toDelete", 0, "/tmp", 0, true)); } @Then("^the directory is deleted on the server$") public void the_directory_is_deleted_on_the_server() throws Throwable { assertThat(FakeFTPServerFactory.checkFileExists("/tmp/toDelete")).isFalse(); } class CountingFTPProgressListener extends ProgressListener { int timesCalled; @Override public void setBytesWritten(long byteCount) { super.setBytesWritten(byteCount); timesCalled++; } public int getTimesCalled() { return timesCalled; } } } ================================================ FILE: src/cucumber/java/io/linuxserver/davos/bdd/ScheduleStepDefs.java ================================================ package io.linuxserver.davos.bdd; import static org.assertj.core.api.Assertions.assertThat; import java.io.File; import java.util.List; import org.apache.commons.io.FileUtils; import cucumber.api.java.en.Given; import cucumber.api.java.en.Then; import cucumber.api.java.en.When; import io.linuxserver.davos.bdd.helpers.FakeFTPServerFactory; import io.linuxserver.davos.persistence.dao.ScheduleDAO; import io.linuxserver.davos.persistence.model.FilterModel; import io.linuxserver.davos.persistence.model.HostModel; import io.linuxserver.davos.persistence.model.ScheduleModel; import io.linuxserver.davos.schedule.RunnableSchedule; import io.linuxserver.davos.transfer.ftp.TransferProtocol; public class ScheduleStepDefs { private static final String TMP = FileUtils.getTempDirectoryPath(); private ScheduleModel scheduleModel; @Given("^a schedule exists for that server, with filters$") public void a_schedule_exists_for_that_server_with_filters() throws Throwable { createBasicSchedule(); FilterModel filter1 = new FilterModel(); filter1.value = "file2*"; scheduleModel.filters.add(filter1); FilterModel filter2 = new FilterModel(); filter2.value = "file3*"; scheduleModel.filters.add(filter2); } @Given("^the schedule is set to delete host files$") public void the_schedule_is_set_to_delete_host_files() throws Throwable { scheduleModel.setDeleteHostFile(true); } @Given("^the schedule is set to invert filters$") public void the_schedule_is_set_to_invert_filters() throws Throwable { scheduleModel.setInvertFilters(true); } @Given("^the schedule is set to have mandatory filters$") public void the_schedule_is_set_to_have_mandatory_filters() throws Throwable { scheduleModel.setFiltersMandatory(true); } @Given("^a schedule exists for that server, without filters$") public void a_schedule_exists_for_that_server() throws Throwable { createBasicSchedule(); } @When("^that schedule is run$") public void that_schedule_is_run() throws Throwable { new RunnableSchedule(1L, new CucumberScheduleConfigurationDAO()).run(); } @Then("^no files are downloaded$") public void no_files_are_downloaded() throws Throwable { File file1 = new File(TMP + "/file1.txt"); File file2 = new File(TMP + "/file2.txt"); File file3 = new File(TMP + "/file3.txt"); assertThat(file1.exists()).isFalse(); assertThat(file2.exists()).isFalse(); assertThat(file3.exists()).isFalse(); } @Then("^all files not matching the filters are downloaded$") public void all_files_not_matching_the_filters_are_downloaded() throws Throwable { File file1 = new File(TMP + "/file1.txt"); File file2 = new File(TMP + "/file2.txt"); File file3 = new File(TMP + "/file3.txt"); assertThat(file1.exists()).isTrue(); assertThat(file2.exists()).isFalse(); assertThat(file3.exists()).isFalse(); file1.delete(); } @Then("^those files are deleted on the host$") public void those_files_are_deleted_on_the_host() throws Throwable { assertThat(FakeFTPServerFactory.checkFileExists("/tmp/file1.txt")).isTrue(); assertThat(FakeFTPServerFactory.checkFileExists("/tmp/file2.txt")).isFalse(); assertThat(FakeFTPServerFactory.checkFileExists("/tmp/file3.txt")).isFalse(); } @Then("^only the filtered files are downloaded$") public void only_the_filtered_files_are_downloaded() throws Throwable { File file1 = new File(TMP + "/file1.txt"); File file2 = new File(TMP + "/file2.txt"); File file3 = new File(TMP + "/file3.txt"); assertThat(file1.exists()).isFalse(); assertThat(file2.exists()).isTrue(); assertThat(file3.exists()).isTrue(); file2.delete(); file3.delete(); } private void createBasicSchedule() { scheduleModel = new ScheduleModel(); scheduleModel.host = new HostModel(); scheduleModel.host.address = "localhost"; scheduleModel.host.port = FakeFTPServerFactory.getPort(); scheduleModel.host.username = "user"; scheduleModel.host.password = "password"; scheduleModel.host.protocol = TransferProtocol.FTP; scheduleModel.remoteFilePath = "/tmp"; scheduleModel.localFilePath = TMP; } class CucumberScheduleConfigurationDAO implements ScheduleDAO { @Override public List getAll() { return null; } @Override public ScheduleModel fetchSchedule(Long id) { return scheduleModel; } @Override public ScheduleModel updateConfig(ScheduleModel model) { return null; } @Override public List fetchSchedulesUsingHost(Long hostId) { // TODO Auto-generated method stub return null; } @Override public void updateScannedFilesOnSchedule(Long id, List newlyScannedFiles) { // TODO Auto-generated method stub } @Override public void deleteSchedule(Long id) { // TODO Auto-generated method stub } } } ================================================ FILE: src/cucumber/java/io/linuxserver/davos/bdd/ServerStepDefs.java ================================================ package io.linuxserver.davos.bdd; import static org.assertj.core.api.Assertions.assertThat; import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; import cucumber.api.java.After; import cucumber.api.java.en.Given; import io.linuxserver.davos.bdd.helpers.FakeFTPServerFactory; import io.linuxserver.davos.bdd.helpers.FakeSFTPServerFactory; public class ServerStepDefs { private static final String TMP = FileUtils.getTempDirectoryPath(); @Given("^there is an FTP server running$") public void there_is_an_FTP_server_running() throws Throwable { FakeFTPServerFactory.setup(); } @Given("^the FTP server has a directory with contents$") public void the_FTP_server_has_a_directory_with_contents() throws Throwable { FakeFTPServerFactory.addDirectoryWithNameAndNumberOfFiles("toDelete", 3); } @Given("^the FTP server has a directory without contents$") public void the_FTP_server_has_a_directory_without_contents() throws Throwable { FakeFTPServerFactory.addDirectoryWithNameAndNumberOfFiles("toDelete", 0); } @Given("^there is an SFTP server running$") public void there_is_an_SFTP_server_running() throws Throwable { FakeSFTPServerFactory.setup(); } @Given("^the SFTP server has a directory with contents$") public void the_SFTP_server_has_a_directory_with_contents() throws Throwable { FakeSFTPServerFactory.addDirectoryWithNameAndNumberOfFiles("toDelete", 3); assertThat(new File(TMP + "/toDelete").exists()).isTrue(); assertThat(new File(TMP + "/toDelete/file0").exists()).isTrue(); assertThat(new File(TMP + "/toDelete/file1").exists()).isTrue(); assertThat(new File(TMP + "/toDelete/file2").exists()).isTrue(); } @Given("^the SFTP server has a directory without contents$") public void the_SFTP_server_has_a_directory_without_contents() throws Throwable { FakeSFTPServerFactory.addDirectoryWithNameAndNumberOfFiles("toDelete", 0); } @After("@Server") public void after() { FakeFTPServerFactory.stop(); } @After("@SFTPServer") public void afterSFTP() throws IOException { FakeSFTPServerFactory.stop(); } } ================================================ FILE: src/cucumber/java/io/linuxserver/davos/bdd/helpers/FakeFTPServerFactory.java ================================================ package io.linuxserver.davos.bdd.helpers; import org.mockftpserver.fake.FakeFtpServer; import org.mockftpserver.fake.UserAccount; import org.mockftpserver.fake.filesystem.DirectoryEntry; import org.mockftpserver.fake.filesystem.FileEntry; import org.mockftpserver.fake.filesystem.FileSystem; import org.mockftpserver.fake.filesystem.UnixFakeFileSystem; public class FakeFTPServerFactory { private static FakeFtpServer server; public static int getPort() { return server.getServerControlPort(); } public static FakeFtpServer setup() { server = new FakeFtpServer(); server.addUserAccount(new UserAccount("user", "password", "/tmp")); server.setServerControlPort(0); FileSystem fileSystem = new UnixFakeFileSystem(); fileSystem.add(new DirectoryEntry("/tmp")); fileSystem.add(new FileEntry("/tmp/file1.txt", "hello world")); fileSystem.add(new FileEntry("/tmp/file2.txt", "hello world")); fileSystem.add(new FileEntry("/tmp/file3.txt", "hello world")); server.setFileSystem(fileSystem); server.start(); return server; } public static boolean checkFileExists(String filePath) { return server.getFileSystem().exists(filePath); } public static void addDirectoryWithNameAndNumberOfFiles(String name, int numberOfFiles) { server.getFileSystem().add(new DirectoryEntry("/tmp/" + name)); int i; for (i = 0; i < numberOfFiles; i++) server.getFileSystem().add(new FileEntry("/tmp/" + name + "/file" + i)); } public static void stop() { server.stop(); } } ================================================ FILE: src/cucumber/java/io/linuxserver/davos/bdd/helpers/FakeSFTPServerFactory.java ================================================ package io.linuxserver.davos.bdd.helpers; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collections; import org.apache.commons.io.FileUtils; import org.apache.sshd.common.NamedFactory; import org.apache.sshd.common.file.virtualfs.VirtualFileSystemFactory; import org.apache.sshd.common.session.Session; import org.apache.sshd.server.Command; import org.apache.sshd.server.SshServer; import org.apache.sshd.server.auth.password.PasswordAuthenticator; import org.apache.sshd.server.auth.password.PasswordChangeRequiredException; import org.apache.sshd.server.keyprovider.SimpleGeneratorHostKeyProvider; import org.apache.sshd.server.scp.ScpCommandFactory; import org.apache.sshd.server.session.ServerSession; import org.apache.sshd.server.shell.ProcessShellFactory; import org.apache.sshd.server.subsystem.sftp.SftpSubsystemFactory; public class FakeSFTPServerFactory { private static final String TMP = FileUtils.getTempDirectoryPath(); private static final String USERNAME = "user"; private static final String PASSWORD = "password"; private static SshServer sshd; public static void setup() throws IOException { SftpSubsystemFactory factory = new SftpSubsystemFactory.Builder().build(); sshd = SshServer.setUpDefaultServer(); sshd.setKeyPairProvider(new SimpleGeneratorHostKeyProvider()); sshd.setShellFactory(new ProcessShellFactory(new String[] { "/bin/sh", "-i", "-l" })); sshd.setCommandFactory(new ScpCommandFactory()); sshd.setSubsystemFactories(Collections.> singletonList(factory)); sshd.setPasswordAuthenticator(new PasswordAuthenticator() { @Override public boolean authenticate(String username, String password, ServerSession session) throws PasswordChangeRequiredException { return USERNAME.equals(username) && PASSWORD.equals(password); } }); sshd.setFileSystemFactory(new VirtualFileSystemFactory() { @Override protected Path computeRootDir(Session session) throws IOException { return Paths.get(TMP); } }); sshd.start(); } public static void stop() throws IOException { sshd.stop(); } public static void addDirectoryWithNameAndNumberOfFiles(String name, int numberOfFiles) throws IOException { File directory = new File(TMP + "/" + name); directory.mkdirs(); int i; for (i = 0; i < numberOfFiles; i++) new File(TMP + "/" + name + "/file" + i).createNewFile(); } public static int getPort() { return sshd.getPort(); } } ================================================ FILE: src/cucumber/java/io/linuxserver/davos/bdd/helpers/Logging.java ================================================ package io.linuxserver.davos.bdd.helpers; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.config.Configurator; import cucumber.api.java.Before; public class Logging { @Before public void before() { Configurator.setRootLevel(Level.ERROR); Configurator.setLevel("io.linuxserver", Level.ERROR); } } ================================================ FILE: src/cucumber/resources/Client.feature ================================================ @Client Feature: General client tests @Server Scenario: Connecting to the FTP server Given there is an FTP server running When davos connects to the server Then listing the files will show the correct files @Server Scenario: Downloading a file from the server Given there is an FTP server running When davos connects to the server And downloads a file Then the file is located in the specified local directory @Listener @Server Scenario: Download with FTP Progress Listener Given there is an FTP server running When davos connects to the server And initialises a Progress Listener for that connection And downloads a file Then the Progress Listener will have its values updated @Server Scenario: Deleting directories on the remote FTP server Given there is an FTP server running And the FTP server has a directory with contents When davos connects to the server And deletes a directory Then the directory is deleted on the server @Server Scenario: Deleting directories on the remote FTP server (empty) Given there is an FTP server running And the FTP server has a directory without contents When davos connects to the server And deletes a directory Then the directory is deleted on the server @SFTPServer Scenario: Deleting directories on the remote SFTP server Given there is an SFTP server running And the SFTP server has a directory with contents When davos connects to the SFTP server And deletes an SFTP directory Then the SFTP directory is deleted on the server @SFTPServer Scenario: Deleting directories on the remote SFTP server (empty) Given there is an SFTP server running And the SFTP server has a directory without contents When davos connects to the SFTP server And deletes an SFTP directory Then the SFTP directory is deleted on the server ================================================ FILE: src/cucumber/resources/Schedule.feature ================================================ @Schedule @Server Feature: Scheduling Scenario: Finding files that match filters Given there is an FTP server running And a schedule exists for that server, with filters When that schedule is run Then only the filtered files are downloaded Scenario: Should delete files once matched and downloaded Given there is an FTP server running And a schedule exists for that server, with filters And the schedule is set to delete host files When that schedule is run Then only the filtered files are downloaded And those files are deleted on the host Scenario: Should download all files not matching filters if inverted Given there is an FTP server running And a schedule exists for that server, with filters And the schedule is set to invert filters When that schedule is run Then all files not matching the filters are downloaded Scenario: Should not download any files if filters are mandatory but not set Given there is an FTP server running And a schedule exists for that server, without filters And the schedule is set to have mandatory filters When that schedule is run Then no files are downloaded ================================================ FILE: src/main/java/io/linuxserver/davos/DavosApplication.java ================================================ package io.linuxserver.davos; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class DavosApplication { public static void main(String[] args) { SpringApplication.run(DavosApplication.class, args); } } ================================================ FILE: src/main/java/io/linuxserver/davos/Version.java ================================================ package io.linuxserver.davos; public class Version { private int major; private int minor; private int patch; public Version(int major, int minor, int patch) { this.major = major; this.minor = minor; this.patch = patch; } public Version(String version) { String[] bits = version.split("\\."); this.major = Integer.parseInt(bits[0]); this.minor = Integer.parseInt(bits[1]); this.patch = Integer.parseInt(bits[2]); } public int getMajor() { return major; } public int getMinor() { return minor; } public int getPatch() { return patch; } public boolean isNewerThan(Version version) { if (this.major > version.major) return true; if (this.minor > version.minor) return true; else if (this.minor < version.minor) return false; if (this.patch > version.patch) return true; return false; } @Override public String toString() { return new StringBuilder().append(major).append(".").append(minor).append(".").append(patch).toString(); } } ================================================ FILE: src/main/java/io/linuxserver/davos/converters/Converter.java ================================================ package io.linuxserver.davos.converters; public interface Converter { T convertTo(S source); S convertFrom(T source); } ================================================ FILE: src/main/java/io/linuxserver/davos/converters/HostConverter.java ================================================ package io.linuxserver.davos.converters; import org.springframework.stereotype.Component; import io.linuxserver.davos.persistence.model.HostModel; import io.linuxserver.davos.transfer.ftp.TransferProtocol; import io.linuxserver.davos.web.Host; import io.linuxserver.davos.web.selectors.ProtocolSelector; @Component public class HostConverter implements Converter { @Override public Host convertTo(HostModel source) { Host host = new Host(); host.setId(source.id); host.setName(source.name); host.setAddress(source.address); host.setPort(source.port); host.setUsername(source.username); host.setPassword(source.password); host.setProtocol(ProtocolSelector.valueOf(source.protocol.toString())); host.setIdentityFileEnabled(source.isIdentityFileEnabled()); host.setIdentityFile(source.identityFile); return host; } @Override public HostModel convertFrom(Host source) { HostModel model = new HostModel(); model.id = source.getId(); model.name = source.getName(); model.address = source.getAddress(); model.port = source.getPort(); model.username = source.getUsername(); model.password = source.getPassword(); model.protocol = TransferProtocol.valueOf(source.getProtocol().toString()); model.setIdentityFileEnabled(source.isIdentityFileEnabled()); model.identityFile = source.getIdentityFile(); return model; } } ================================================ FILE: src/main/java/io/linuxserver/davos/converters/ScheduleConverter.java ================================================ package io.linuxserver.davos.converters; import static java.util.stream.Collectors.toList; import org.apache.commons.lang3.StringUtils; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import io.linuxserver.davos.persistence.model.ActionModel; import io.linuxserver.davos.persistence.model.FilterModel; import io.linuxserver.davos.persistence.model.ScheduleModel; import io.linuxserver.davos.transfer.ftp.FileTransferType; import io.linuxserver.davos.web.API; import io.linuxserver.davos.web.Filter; import io.linuxserver.davos.web.Pushbullet; import io.linuxserver.davos.web.SNS; import io.linuxserver.davos.web.Schedule; import io.linuxserver.davos.web.selectors.MethodSelector; import io.linuxserver.davos.web.selectors.TransferSelector; @Component public class ScheduleConverter implements Converter { private static final Logger LOGGER = LoggerFactory.getLogger(ScheduleConverter.class); @Override public Schedule convertTo(ScheduleModel source) { Schedule schedule = new Schedule(); schedule.setId(source.id); schedule.setInterval(source.interval); schedule.setLocalDirectory(source.localFilePath); schedule.setName(source.name); schedule.setHostDirectory(source.remoteFilePath); schedule.setAutomatic(source.getStartAutomatically()); schedule.setHost(source.host.id); schedule.setTransferType(TransferSelector.valueOf(source.transferType.toString())); schedule.setMoveFileTo(source.moveFileTo); schedule.getLastScannedFiles().addAll(source.scannedFiles.stream().map(f -> f.file).collect(toList())); schedule.setFiltersMandatory(source.getFiltersMandatory()); schedule.setDeleteHostFile(source.getDeleteHostFile()); schedule.setInvertFilters(source.getInvertFilters()); if (source.getLastRunTime() > 0) schedule.setLastRunTime(new DateTime(source.getLastRunTime()).toString("yyyy-MM-dd HH:mm:ss")); for (ActionModel action : source.actions) { if ("api".equals(action.actionType)) { API api = new API(); api.setId(action.id); api.setUrl(action.f1); api.setMethod(MethodSelector.valueOf(action.f2)); api.setContentType(action.f3); api.setBody(action.f4); schedule.getApis().add(api); } else if ("pushbullet".equals(action.actionType)) { Pushbullet notification = new Pushbullet(); notification.setId(action.id); notification.setApiKey(action.f1); schedule.getNotifications().getPushbullet().add(notification); } else if ("sns".equals(action.actionType)) { SNS sns = new SNS(); sns.setId(action.id); sns.setTopicArn(action.f1); sns.setRegion(action.f2); sns.setAccessKey(action.f3); sns.setSecretAccessKey(action.f4); schedule.getNotifications().getSns().add(sns); } } for (FilterModel filter : source.filters) { Filter filterDto = new Filter(); filterDto.setId(filter.id); filterDto.setValue(filter.value); schedule.getFilters().add(filterDto); } return schedule; } @Override public ScheduleModel convertFrom(Schedule source) { ScheduleModel model = new ScheduleModel(); model.id = source.getId(); model.name = source.getName(); model.interval = source.getInterval(); model.localFilePath = source.getLocalDirectory(); model.remoteFilePath = source.getHostDirectory(); model.setStartAutomatically(source.isAutomatic()); model.transferType = FileTransferType.valueOf(source.getTransferType().toString()); model.moveFileTo = source.getMoveFileTo(); model.setFiltersMandatory(source.isFiltersMandatory()); model.setInvertFilters(source.isInvertFilters()); model.setDeleteHostFile(source.isDeleteHostFile()); if (StringUtils.isNotBlank(source.getMoveFileTo())) { LOGGER.debug("Converting MoveTo to internal action: {}", source.getMoveFileTo()); ActionModel moveTo = new ActionModel(); moveTo.actionType = "move"; moveTo.f1 = source.getMoveFileTo(); moveTo.schedule = model; model.actions.add(moveTo); } for (Pushbullet action : source.getNotifications().getPushbullet()) { LOGGER.debug("Converting Pushbullet to internal action: {}", action.getApiKey()); ActionModel actionModel = new ActionModel(); actionModel.id = action.getId(); actionModel.actionType = "pushbullet"; actionModel.f1 = action.getApiKey(); actionModel.schedule = model; model.actions.add(actionModel); } for (SNS action : source.getNotifications().getSns()) { LOGGER.debug("Converting SNS to internal action: {}", action.getTopicArn()); ActionModel actionModel = new ActionModel(); actionModel.id = action.getId(); actionModel.actionType = "sns"; actionModel.f1 = action.getTopicArn(); actionModel.f2 = action.getRegion(); actionModel.f3 = action.getAccessKey(); actionModel.f4 = action.getSecretAccessKey(); actionModel.schedule = model; model.actions.add(actionModel); } for (API action : source.getApis()) { LOGGER.debug("Converting API to internal action: {}", action.getUrl()); ActionModel actionModel = new ActionModel(); actionModel.id = action.getId(); actionModel.actionType = "api"; actionModel.f1 = action.getUrl(); actionModel.f2 = action.getMethod().toString(); actionModel.f3 = action.getContentType(); actionModel.f4 = action.getBody(); actionModel.schedule = model; model.actions.add(actionModel); } for (Filter filter : source.getFilters()) { FilterModel filterModel = new FilterModel(); filterModel.id = filter.getId(); filterModel.value = filter.getValue(); filterModel.schedule = model; model.filters.add(filterModel); } return model; } } ================================================ FILE: src/main/java/io/linuxserver/davos/delegation/services/HostService.java ================================================ package io.linuxserver.davos.delegation.services; import java.util.List; import io.linuxserver.davos.web.Host; public interface HostService { List fetchAllHosts(); Host fetchHost(Long id); Host saveHost(Host host); void deleteHost(Long id); List fetchSchedulesUsingHost(Long id); void testConnection(Host host); } ================================================ FILE: src/main/java/io/linuxserver/davos/delegation/services/HostServiceImpl.java ================================================ package io.linuxserver.davos.delegation.services; import java.util.List; import java.util.stream.Collectors; import javax.annotation.Resource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import io.linuxserver.davos.converters.HostConverter; import io.linuxserver.davos.exception.HostInUseException; import io.linuxserver.davos.persistence.dao.HostDAO; import io.linuxserver.davos.persistence.dao.ScheduleDAO; import io.linuxserver.davos.persistence.model.HostModel; import io.linuxserver.davos.transfer.ftp.client.Client; import io.linuxserver.davos.transfer.ftp.client.ClientFactory; import io.linuxserver.davos.transfer.ftp.client.UserCredentials; import io.linuxserver.davos.transfer.ftp.client.UserCredentials.Identity; import io.linuxserver.davos.web.Host; @Component public class HostServiceImpl implements HostService { private static final Logger LOGGER = LoggerFactory.getLogger(HostServiceImpl.class); @Resource private HostDAO hostDAO; @Resource private ScheduleDAO scheduleDAO; @Resource private HostConverter hostConverter; @Override public Host fetchHost(Long id) { return toHost(hostDAO.fetchHost(id)); } @Override public Host saveHost(Host host) { HostModel model = hostConverter.convertFrom(host); return hostConverter.convertTo(hostDAO.saveHost(model)); } @Override public void deleteHost(Long id) { List schedulesUsingHost = fetchSchedulesUsingHost(id); if (schedulesUsingHost.isEmpty()) { hostDAO.deleteHost(id); } else { throw new HostInUseException("Host is being used by Schedules: " + schedulesUsingHost); } } @Override public List fetchAllHosts() { return hostDAO.fetchAllHosts().stream().map(this::toHost).collect(Collectors.toList()); } private Host toHost(HostModel model) { return hostConverter.convertTo(model); } @Override public List fetchSchedulesUsingHost(Long id) { return scheduleDAO.fetchSchedulesUsingHost(id).stream().map(s -> s.id).collect(Collectors.toList()); } @Override public void testConnection(Host host) { HostModel model = hostConverter.convertFrom(host); LOGGER.info("Attempting to test connection to host", model.address); Client client = new ClientFactory().getClient(model.protocol); LOGGER.debug("Credentials: {} : {}", model.username, model.password); UserCredentials userCredentials; if (model.isIdentityFileEnabled()) userCredentials = new UserCredentials(model.username, new Identity(model.identityFile)); else userCredentials = new UserCredentials(model.username, model.password); client.setCredentials(userCredentials); client.setHost(model.address); client.setPort(model.port); LOGGER.debug("Making connection on port {}", model.port); client.connect(); LOGGER.info("Connection successful."); client.disconnect(); LOGGER.debug("Disconnected"); } } ================================================ FILE: src/main/java/io/linuxserver/davos/delegation/services/ScheduleService.java ================================================ package io.linuxserver.davos.delegation.services; import java.util.List; import io.linuxserver.davos.web.Schedule; public interface ScheduleService { void startSchedule(Long id); void stopSchedule(Long id); void deleteSchedule(Long id); List fetchAllSchedules(); Schedule fetchSchedule(Long id); Schedule createSchedule(Schedule schedule); Schedule updateSchedule(Schedule schedule); void clearScannedFilesFromSchedule(Long id); } ================================================ FILE: src/main/java/io/linuxserver/davos/delegation/services/ScheduleServiceImpl.java ================================================ package io.linuxserver.davos.delegation.services; import static java.util.stream.Collectors.toList; import java.util.List; import javax.annotation.Resource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import io.linuxserver.davos.converters.HostConverter; import io.linuxserver.davos.converters.ScheduleConverter; import io.linuxserver.davos.persistence.dao.HostDAO; import io.linuxserver.davos.persistence.dao.ScheduleDAO; import io.linuxserver.davos.persistence.model.HostModel; import io.linuxserver.davos.persistence.model.ScheduleModel; import io.linuxserver.davos.schedule.ScheduleExecutor; import io.linuxserver.davos.schedule.workflow.transfer.FTPTransfer; import io.linuxserver.davos.web.Schedule; import io.linuxserver.davos.web.Transfer; import io.linuxserver.davos.web.Transfer.Progress; @Component public class ScheduleServiceImpl implements ScheduleService { private static final Logger LOGGER = LoggerFactory.getLogger(ScheduleServiceImpl.class); @Resource private ScheduleConverter scheduleConverter; @Resource private ScheduleExecutor scheduleExecutor; @Resource private HostConverter hostConverter; @Resource private ScheduleDAO scheduleDAO; @Resource private HostDAO hostDAO; @Override public void startSchedule(Long id) { LOGGER.info("Starting schedule"); scheduleExecutor.startSchedule(id); LOGGER.info("Schedule started"); } @Override public void stopSchedule(Long id) { LOGGER.info("Stopping schedule"); scheduleExecutor.stopSchedule(id); LOGGER.info("Schedule stopped"); } @Override public void deleteSchedule(Long id) { if (scheduleExecutor.isScheduleRunning(id)) { LOGGER.debug("Schedule is running, so will stop it before deleting"); stopSchedule(id); } scheduleDAO.deleteSchedule(id); } @Override public List fetchAllSchedules() { return scheduleDAO.getAll().stream().map(this::toSchedule).collect(toList()); } @Override public Schedule fetchSchedule(Long id) { return toSchedule(scheduleDAO.fetchSchedule(id)); } @Override public Schedule createSchedule(Schedule schedule) { ScheduleModel model = scheduleConverter.convertFrom(schedule); model.host = getHostForSchedule(schedule.getHost()); return scheduleConverter.convertTo(scheduleDAO.updateConfig(model)); } @Override public Schedule updateSchedule(Schedule schedule) { if (null == schedule.getId()) throw new IllegalArgumentException("Schdule has no ID"); ScheduleModel existingModel = scheduleDAO.fetchSchedule(schedule.getId()); ScheduleModel model = scheduleConverter.convertFrom(schedule); model.host = getHostForSchedule(schedule.getHost()); model.setLastRunTime(existingModel.getLastRunTime()); return scheduleConverter.convertTo(scheduleDAO.updateConfig(model)); } @Override public void clearScannedFilesFromSchedule(Long id) { ScheduleModel model = scheduleDAO.fetchSchedule(id); model.scannedFiles.clear(); scheduleDAO.updateConfig(model); } private HostModel getHostForSchedule(Long id) { HostModel hostModel = hostDAO.fetchHost(id); if (null == hostModel) { LOGGER.info("Schedule is referencing a host that does not exist"); throw new IllegalArgumentException("Host with id " + id + " does not exist."); } return hostModel; } private Schedule toSchedule(ScheduleModel model) { Schedule convertTo = scheduleConverter.convertTo(model); if (scheduleExecutor.isScheduleRunning(convertTo.getId())) { convertTo.setRunning(true); List transfers = scheduleExecutor.getRunningSchedule(convertTo.getId()).getSchedule().getTransfers(); convertTo.getTransfers().addAll(transfers.stream().map(this::toTransfer).collect(toList())); } return convertTo; } private Transfer toTransfer(FTPTransfer ftpTransfer) { Transfer transfer = new Transfer(); transfer.setFileName(ftpTransfer.getFile().getName()); transfer.setFileSize(ftpTransfer.getFile().getSize()); transfer.setDirectory(ftpTransfer.getFile().isDirectory()); transfer.setStatus(ftpTransfer.getState().toString()); if (null != ftpTransfer.getListener()) { Progress progress = new Progress(); progress.setPercentageComplete(ftpTransfer.getListener().getProgress()); progress.setTransferSpeed(ftpTransfer.getListener().getTransferSpeed()); transfer.setProgress(progress); } return transfer; } } ================================================ FILE: src/main/java/io/linuxserver/davos/delegation/services/SettingsService.java ================================================ package io.linuxserver.davos.delegation.services; import io.linuxserver.davos.Version; import io.linuxserver.davos.web.selectors.LogLevelSelector; public interface SettingsService { void setLoggingLevel(LogLevelSelector level); LogLevelSelector getCurrentLoggingLevel(); Version retrieveRemoteVersion(); } ================================================ FILE: src/main/java/io/linuxserver/davos/delegation/services/SettingsServiceImpl.java ================================================ package io.linuxserver.davos.delegation.services; import org.apache.logging.log4j.Level; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.ResponseEntity; import org.springframework.http.converter.HttpMessageConversionException; import org.springframework.stereotype.Component; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; import io.linuxserver.davos.Version; import io.linuxserver.davos.logging.LoggingManager; import io.linuxserver.davos.web.selectors.LogLevelSelector; @Component public class SettingsServiceImpl implements SettingsService { private static final Logger LOGGER = LoggerFactory.getLogger(SettingsServiceImpl.class); private LogLevelSelector currentLevel = LogLevelSelector.INFO; private RestTemplate restTemplate = new RestTemplate(); @Override public void setLoggingLevel(LogLevelSelector level) { currentLevel = level; LoggingManager.setLogLevel(Level.valueOf(level.toString())); } @Override public LogLevelSelector getCurrentLoggingLevel() { return currentLevel; } @Override public Version retrieveRemoteVersion() { try { String gitHubURL = "https://raw.githubusercontent.com/linuxserver/davos/LatestRelease/version.txt"; LOGGER.debug("Calling out to GitHub to check for new version ({})", gitHubURL); ResponseEntity response = restTemplate.exchange(gitHubURL, HttpMethod.GET, new HttpEntity(new HttpHeaders()), String.class); String body = response.getBody(); LOGGER.debug("GitHub responded with a {}, and body of {}", response.getStatusCode(), body); return new Version(body); } catch (RestClientException | HttpMessageConversionException e) { LOGGER.error("Unable to get version from GitHub: {}", e.getMessage(), e); LOGGER.debug("Defaulting remote version to zero"); return new Version(0, 0, 0); } } } ================================================ FILE: src/main/java/io/linuxserver/davos/dto/ActionDTO.java ================================================ package io.linuxserver.davos.dto; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; public class ActionDTO { public Long id; public String actionType; public String f1; public String f2; public String f3; public String f4; public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } } ================================================ FILE: src/main/java/io/linuxserver/davos/dto/FTPFileDTO.java ================================================ package io.linuxserver.davos.dto; public class FTPFileDTO { public String name; public String extension; public String modified; public String path; public long size; public boolean directory; } ================================================ FILE: src/main/java/io/linuxserver/davos/dto/FilterDTO.java ================================================ package io.linuxserver.davos.dto; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; public class FilterDTO { public Long id; public String value; public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } } ================================================ FILE: src/main/java/io/linuxserver/davos/dto/HostDTO.java ================================================ package io.linuxserver.davos.dto; import io.linuxserver.davos.transfer.ftp.TransferProtocol; public class HostDTO { public String name; public String address; public int port; public String username; public String password; public TransferProtocol protocol = TransferProtocol.FTP; } ================================================ FILE: src/main/java/io/linuxserver/davos/dto/ScheduleDTO.java ================================================ package io.linuxserver.davos.dto; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import io.linuxserver.davos.transfer.ftp.FileTransferType; import io.linuxserver.davos.transfer.ftp.TransferProtocol; public class ScheduleDTO { public Long id; public String name; public boolean startAutomatically; public int interval; public TransferProtocol connectionType; public String hostName; public int port; public String username; public String password; public String remoteFilePath; public String localFilePath; public long lastRun; public boolean running; public FileTransferType transferType; public List filters = new ArrayList(); public List actions = new ArrayList(); public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } } ================================================ FILE: src/main/java/io/linuxserver/davos/dto/ScheduleProcessResponse.java ================================================ package io.linuxserver.davos.dto; public class ScheduleProcessResponse { public String message = "OK"; public Long id; } ================================================ FILE: src/main/java/io/linuxserver/davos/exception/HostInUseException.java ================================================ package io.linuxserver.davos.exception; public class HostInUseException extends RuntimeException { private static final long serialVersionUID = 618892455818185964L; public HostInUseException(String message) { super(message); } } ================================================ FILE: src/main/java/io/linuxserver/davos/exception/ScheduleAlreadyRunningException.java ================================================ package io.linuxserver.davos.exception; public class ScheduleAlreadyRunningException extends RuntimeException { private static final long serialVersionUID = 1L; public ScheduleAlreadyRunningException() { super("The schedule is already running"); } } ================================================ FILE: src/main/java/io/linuxserver/davos/exception/ScheduleNotRunningException.java ================================================ package io.linuxserver.davos.exception; public class ScheduleNotRunningException extends RuntimeException { private static final long serialVersionUID = 1L; public ScheduleNotRunningException() { super("The schedule was not running"); } } ================================================ FILE: src/main/java/io/linuxserver/davos/logging/LoggingManager.java ================================================ package io.linuxserver.davos.logging; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.config.Configurator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class LoggingManager { private static final Logger LOGGER = LoggerFactory.getLogger(LoggingManager.class); public static void enableDebug() { Configurator.setLevel("io.linuxserver", Level.DEBUG); LOGGER.debug("DEBUG has been enabled"); } public static void disableDebug() { Configurator.setLevel("io.linuxserver", Level.INFO); LOGGER.info("DEBUG has been disabled. Back at INFO."); } public static void setLogLevel(Level level) { LOGGER.info("Logging level now set at {}", level); Configurator.setLevel("io.linuxserver", level); } } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/dao/DefaultHostDAO.java ================================================ package io.linuxserver.davos.persistence.dao; import java.util.List; import javax.annotation.Resource; import org.springframework.stereotype.Component; import io.linuxserver.davos.persistence.model.HostModel; import io.linuxserver.davos.persistence.repository.HostRepository; @Component public class DefaultHostDAO implements HostDAO { @Resource private HostRepository hostRepository; @Override public HostModel saveHost(HostModel host) { return hostRepository.save(host); } @Override public HostModel fetchHost(Long id) { return hostRepository.findOne(id); } @Override public List fetchAllHosts() { return hostRepository.findAll(); } @Override public void deleteHost(Long id) { hostRepository.delete(id); } } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/dao/DefaultScheduleDAO.java ================================================ package io.linuxserver.davos.persistence.dao; import static java.util.stream.Collectors.toList; import java.util.List; import javax.annotation.Resource; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import io.linuxserver.davos.persistence.model.ScannedFileModel; import io.linuxserver.davos.persistence.model.ScheduleModel; import io.linuxserver.davos.persistence.repository.ScheduleRepository; @Component public class DefaultScheduleDAO implements ScheduleDAO { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultScheduleDAO.class); @Resource private ScheduleRepository configRepository; @Override public List getAll() { return configRepository.findAll(); } @Override public ScheduleModel fetchSchedule(Long id) { return configRepository.findOne(id); } @Override public ScheduleModel updateConfig(ScheduleModel model) { if (null != model.id) { LOGGER.debug("Getting original view of schedule to overlay scannedFiles. " + "These should only be updated by 'updateScannedFilesOnSchedule'"); ScheduleModel current = fetchSchedule(model.id); model.scannedFiles = current.scannedFiles; } LOGGER.debug("Saving model: {}, filters {}", model, model.filters); ScheduleModel savedModel = configRepository.save(model); LOGGER.debug("Schedule model has been saved. Returned values from DB are: {}", model); return savedModel; } @Override public List fetchSchedulesUsingHost(Long hostId) { List models = configRepository.findByHost_Id(hostId); LOGGER.debug("Found {} schedules using host {}", models.size(), hostId); return models; } @Override public void updateScannedFilesOnSchedule(Long id, List newlyScannedFiles) { ScheduleModel model = configRepository.findOne(id); model.scannedFiles.clear(); model.scannedFiles.addAll(newlyScannedFiles.stream().map(f -> toScannedFileModel(f, model)).collect(toList())); model.setLastRunTime(DateTime.now().getMillis()); configRepository.save(model); } private ScannedFileModel toScannedFileModel(String fileName, ScheduleModel model) { ScannedFileModel scannedFileModel = new ScannedFileModel(); scannedFileModel.file = fileName; scannedFileModel.schedule = model; return scannedFileModel; } @Override public void deleteSchedule(Long id) { configRepository.delete(id); LOGGER.info("Schedule has been deleted"); } } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/dao/HostDAO.java ================================================ package io.linuxserver.davos.persistence.dao; import java.util.List; import io.linuxserver.davos.persistence.model.HostModel; public interface HostDAO { HostModel saveHost(HostModel host); HostModel fetchHost(Long id); List fetchAllHosts(); void deleteHost(Long id); } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/dao/ScheduleDAO.java ================================================ package io.linuxserver.davos.persistence.dao; import java.util.List; import io.linuxserver.davos.persistence.model.ScheduleModel; public interface ScheduleDAO { List getAll(); List fetchSchedulesUsingHost(Long hostId); ScheduleModel fetchSchedule(Long id); ScheduleModel updateConfig(ScheduleModel model); void updateScannedFilesOnSchedule(Long id, List newlyScannedFiles); void deleteSchedule(Long id); } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/model/ActionModel.java ================================================ package io.linuxserver.davos.persistence.model; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; @Entity public class ActionModel { @Id @GeneratedValue public Long id; @Column public String actionType; @Column public String f1; @Column public String f2; @Column public String f3; @Column public String f4; @ManyToOne @JoinColumn(name = "action_schedule_id") public ScheduleModel schedule; @Override public String toString() { return "ActionModel [id=" + id + ", actionType=" + actionType + ", f1=" + f1 + ", f2=" + f2 + ", f3=" + f3 + ", f4=" + f4 + "]"; } } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/model/FilterModel.java ================================================ package io.linuxserver.davos.persistence.model; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; @Entity public class FilterModel { @Id @GeneratedValue public Long id; @Column public String value; @ManyToOne @JoinColumn(name = "filter_schedule_id") public ScheduleModel schedule; @Override public String toString() { return "FilterModel [id=" + id + ", value=" + value + "]"; } } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/model/HostModel.java ================================================ package io.linuxserver.davos.persistence.model; import java.util.ArrayList; import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.OneToMany; import org.hibernate.annotations.LazyCollection; import org.hibernate.annotations.LazyCollectionOption; import io.linuxserver.davos.transfer.ftp.TransferProtocol; @Entity public class HostModel { @Id @GeneratedValue public Long id; @Column public String name; @Column public String address; @Column public int port; @Column public TransferProtocol protocol = TransferProtocol.FTP; @Column public String username; @Column public String password; @Column public String identityFile; @Column private Boolean identityFileEnabled; public boolean isIdentityFileEnabled() { if (null == identityFileEnabled) return false; return identityFileEnabled; } public void setIdentityFileEnabled(boolean identityFileEnabled) { this.identityFileEnabled = identityFileEnabled; } @OneToMany(mappedBy = "host", orphanRemoval = false) @LazyCollection(LazyCollectionOption.TRUE) public List schedules = new ArrayList(); @Override public String toString() { return "HostModel [id=" + id + ", name=" + name + ", address=" + address + ", port=" + port + ", protocol=" + protocol + ", username=" + username + ", password=" + password + ", identityFile=" + identityFile + ", identityFileEnabled=" + identityFileEnabled + ", schedules=" + schedules + "]"; } } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/model/ScannedFileModel.java ================================================ package io.linuxserver.davos.persistence.model; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; @Entity public class ScannedFileModel { @Id @GeneratedValue public Long id; @Column public String file; @ManyToOne @JoinColumn(name = "scanned_file_schedule_id") public ScheduleModel schedule; } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/model/ScheduleModel.java ================================================ package io.linuxserver.davos.persistence.model; import java.util.ArrayList; import java.util.List; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import org.hibernate.annotations.LazyCollection; import org.hibernate.annotations.LazyCollectionOption; import io.linuxserver.davos.transfer.ftp.FileTransferType; @Entity public class ScheduleModel { @Id @GeneratedValue public Long id; @Column public String name; @Column private Boolean startAutomatically; @Column public int interval; @Column public String remoteFilePath; @Column public String localFilePath; @Column public String moveFileTo; @Column private Boolean filtersMandatory; @Column private Boolean deleteHostFile; @Column private Boolean invertFilters; @Column private Long lastRunTime; public long getLastRunTime() { if (null != lastRunTime) return lastRunTime; return 0; } public Boolean getFiltersMandatory() { if (null != filtersMandatory) return filtersMandatory; return false; } public void setLastRunTime(long millis) { lastRunTime = millis; } public void setFiltersMandatory(boolean filtersMandatory) { this.filtersMandatory = filtersMandatory; } public Boolean getDeleteHostFile() { if (null != deleteHostFile) return deleteHostFile; return false; } public void setDeleteHostFile(boolean deleteHostFile) { this.deleteHostFile = deleteHostFile; } public Boolean getStartAutomatically() { if (null != startAutomatically) return startAutomatically; return false; } public void setStartAutomatically(boolean startAutomatically) { this.startAutomatically = startAutomatically; } public Boolean getInvertFilters() { if (null != invertFilters) return invertFilters; return false; } public void setInvertFilters(boolean invertFilters) { this.invertFilters = invertFilters; } @Column public FileTransferType transferType = FileTransferType.FILE; @ManyToOne(fetch = FetchType.EAGER) @JoinColumn(name = "schedule_host_id") public HostModel host; @OneToMany(orphanRemoval = true, mappedBy = "schedule", cascade = CascadeType.ALL) @LazyCollection(LazyCollectionOption.FALSE) public List filters = new ArrayList(); @OneToMany(orphanRemoval = true, mappedBy = "schedule", cascade = CascadeType.ALL) @LazyCollection(LazyCollectionOption.FALSE) public List actions = new ArrayList(); @OneToMany(orphanRemoval = true, mappedBy = "schedule", cascade = CascadeType.ALL) @LazyCollection(LazyCollectionOption.FALSE) public List scannedFiles = new ArrayList(); @Override public String toString() { return "ScheduleModel [id=" + id + ", name=" + name + ", startAutomatically=" + startAutomatically + ", interval=" + interval + ", remoteFilePath=" + remoteFilePath + ", localFilePath=" + localFilePath + ", transferType=" + transferType + ", filters=" + filters + ", actions=" + actions + "]"; } } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/repository/HostRepository.java ================================================ package io.linuxserver.davos.persistence.repository; import java.util.List; import org.springframework.data.repository.CrudRepository; import io.linuxserver.davos.persistence.model.HostModel; public interface HostRepository extends CrudRepository { List findAll(); } ================================================ FILE: src/main/java/io/linuxserver/davos/persistence/repository/ScheduleRepository.java ================================================ package io.linuxserver.davos.persistence.repository; import java.util.List; import org.springframework.data.repository.CrudRepository; import io.linuxserver.davos.persistence.model.ScheduleModel; public interface ScheduleRepository extends CrudRepository { List findAll(); List findByHost_Id(Long hostId); } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/RunnableSchedule.java ================================================ package io.linuxserver.davos.schedule; import static java.util.stream.Collectors.toList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.persistence.dao.ScheduleDAO; import io.linuxserver.davos.persistence.model.ScheduleModel; import io.linuxserver.davos.schedule.workflow.ScheduleWorkflow; import io.linuxserver.davos.schedule.workflow.transfer.FTPTransfer; public class RunnableSchedule implements Runnable { private static final Logger LOGGER = LoggerFactory.getLogger(RunnableSchedule.class); private ScheduleDAO configurationDAO; private Long scheduleId; private ScheduleWorkflow scheduleWorkflow; public RunnableSchedule(Long scheduleId, ScheduleDAO configurationDAO) { this.scheduleId = scheduleId; this.configurationDAO = configurationDAO; } @Override public void run() { LOGGER.info("Starting schedule {}", scheduleId); ScheduleModel model = configurationDAO.fetchSchedule(scheduleId); ScheduleConfiguration config = ScheduleConfigurationFactory.createConfig(model); scheduleWorkflow = new ScheduleWorkflow(config); LOGGER.debug("Setting last scanned files on workflow before starting."); scheduleWorkflow.getFilesFromLastScan().addAll(model.scannedFiles.stream().map(sf -> sf.file).collect(toList())); LOGGER.debug("Starting workflow"); scheduleWorkflow.start(); LOGGER.debug("Workflow finished"); LOGGER.debug("Saving newly scanned files against schedule"); configurationDAO.updateScannedFilesOnSchedule(scheduleId, scheduleWorkflow.getFilesFromLastScan()); } public List getTransfers() { return scheduleWorkflow.getFilesToDownload(); } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/RunningSchedule.java ================================================ package io.linuxserver.davos.schedule; import java.util.concurrent.ScheduledFuture; public class RunningSchedule { private final ScheduledFuture future; private final RunnableSchedule schedule; public RunningSchedule(ScheduledFuture future, RunnableSchedule schedule) { this.future = future; this.schedule = schedule; } public ScheduledFuture getFuture() { return future; } public RunnableSchedule getSchedule() { return schedule; } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/ScheduleConfiguration.java ================================================ package io.linuxserver.davos.schedule; import java.util.ArrayList; import java.util.List; import io.linuxserver.davos.schedule.workflow.actions.PostDownloadAction; import io.linuxserver.davos.transfer.ftp.FileTransferType; import io.linuxserver.davos.transfer.ftp.TransferProtocol; import io.linuxserver.davos.transfer.ftp.client.UserCredentials; public class ScheduleConfiguration { private TransferProtocol connectionType; private String hostname; private int port; private UserCredentials credentials; private String remoteFilePath; private String localFilePath; private String scheduleName; private List filters = new ArrayList(); private List actions = new ArrayList(); private FileTransferType transferType; private boolean filtersMandatory; private boolean invertFilters; private boolean deleteHostFile; public ScheduleConfiguration(final String scheduleName, final TransferProtocol protocol, final String hostname, final int port, final UserCredentials credentials, final String remoteFilePath, final String localFilePath, FileTransferType transferType, boolean filtersMandatory, boolean invertFilters, boolean deleteHostFile) { this.scheduleName = scheduleName; this.connectionType = protocol; this.hostname = hostname; this.port = port; this.credentials = credentials; this.localFilePath = localFilePath; this.remoteFilePath = remoteFilePath; this.transferType = transferType; this.filtersMandatory = filtersMandatory; this.invertFilters = invertFilters; this.deleteHostFile = deleteHostFile; } public TransferProtocol getConnectionType() { return connectionType; } public String getHostName() { return hostname; } public int getPort() { return port; } public UserCredentials getCredentials() { return credentials; } public String getRemoteFilePath() { return remoteFilePath; } public String getLocalFilePath() { return localFilePath; } public List getFilters() { return filters; } public void setFilters(List filters) { this.filters = filters; } public String getScheduleName() { return scheduleName; } public List getActions() { return actions; } public void setActions(List actions) { this.actions = actions; } public FileTransferType getTransferType() { return transferType; } public boolean isFiltersMandatory() { return filtersMandatory; } public boolean isInvertFilters() { return invertFilters; } public boolean isDeleteHostFile() { return deleteHostFile; } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/ScheduleConfigurationFactory.java ================================================ package io.linuxserver.davos.schedule; import org.apache.commons.lang3.StringUtils; import io.linuxserver.davos.persistence.model.ActionModel; import io.linuxserver.davos.persistence.model.FilterModel; import io.linuxserver.davos.persistence.model.HostModel; import io.linuxserver.davos.persistence.model.ScheduleModel; import io.linuxserver.davos.schedule.workflow.actions.HttpAPICallAction; import io.linuxserver.davos.schedule.workflow.actions.MoveFileAction; import io.linuxserver.davos.schedule.workflow.actions.PushbulletNotifyAction; import io.linuxserver.davos.schedule.workflow.actions.SNSNotifyAction; import io.linuxserver.davos.transfer.ftp.client.UserCredentials; import io.linuxserver.davos.transfer.ftp.client.UserCredentials.Identity; public class ScheduleConfigurationFactory { public static ScheduleConfiguration createConfig(ScheduleModel model) { ScheduleConfiguration config = new ScheduleConfiguration(model.name, model.host.protocol, model.host.address, model.host.port, buildCredentials(model.host), model.remoteFilePath, model.localFilePath, model.transferType, model.getFiltersMandatory(), model.getInvertFilters(), model.getDeleteHostFile()); if (StringUtils.isNotBlank(model.moveFileTo)) config.getActions().add(new MoveFileAction(config.getLocalFilePath(), model.moveFileTo)); if (null != model.filters) addFilters(model, config); if (null != model.actions) addActions(model, config); return config; } private static UserCredentials buildCredentials(HostModel host) { if (host.isIdentityFileEnabled()) return new UserCredentials(host.username, new Identity(host.identityFile)); return new UserCredentials(host.username, host.password); } private static void addActions(ScheduleModel model, ScheduleConfiguration config) { for (ActionModel action : model.actions) { if ("pushbullet".equals(action.actionType)) config.getActions().add(new PushbulletNotifyAction(action.f1)); if ("sns".equals(action.actionType)) config.getActions().add(new SNSNotifyAction(action.f2, action.f1, action.f3, action.f4)); if ("api".equals(action.actionType)) config.getActions().add(new HttpAPICallAction(action.f1, action.f2, action.f3, action.f4)); } } private static void addFilters(ScheduleModel model, ScheduleConfiguration config) { for (FilterModel filter : model.filters) config.getFilters().add(filter.value); } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/ScheduleExecutor.java ================================================ package io.linuxserver.davos.schedule; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; import javax.annotation.Resource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import io.linuxserver.davos.exception.ScheduleAlreadyRunningException; import io.linuxserver.davos.exception.ScheduleNotRunningException; import io.linuxserver.davos.persistence.dao.ScheduleDAO; import io.linuxserver.davos.persistence.model.ScheduleModel; @Component public class ScheduleExecutor { private static final Logger LOGGER = LoggerFactory.getLogger(ScheduleExecutor.class); private Map runningSchedules = new HashMap<>(); @Resource private ScheduleDAO scheduleConfigurationDAO; private ScheduledExecutorService scheduledExecutorService; public ScheduleExecutor() { this.scheduledExecutorService = Executors.newScheduledThreadPool(10); } public boolean isScheduleRunning(Long id) { return runningSchedules.containsKey(id); } public RunningSchedule getRunningSchedule(Long id) { return runningSchedules.get(id); } @PostConstruct public void runAutomaticStartupSchedules() { LOGGER.info("Initialising automatic startup schedules"); for (ScheduleModel model : scheduleConfigurationDAO.getAll()) { if (model.getStartAutomatically()) { RunnableSchedule runnable = new RunnableSchedule(model.id, scheduleConfigurationDAO); ScheduledFuture runningSchedule = scheduledExecutorService.scheduleAtFixedRate(runnable, 0, model.interval, TimeUnit.MINUTES); runningSchedules.put(model.id, new RunningSchedule(runningSchedule, runnable)); } } LOGGER.info("Automatic startup schedules should now be running"); } public void startSchedule(Long id) throws ScheduleAlreadyRunningException { if (!runningSchedules.containsKey(id)) { ScheduleModel model = scheduleConfigurationDAO.fetchSchedule(id); RunnableSchedule runnable = new RunnableSchedule(model.id, scheduleConfigurationDAO); LOGGER.info("Starting schedule {}", id); ScheduledFuture runningSchedule = scheduledExecutorService.scheduleAtFixedRate(runnable, 0, model.interval, TimeUnit.MINUTES); runningSchedules.put(model.id, new RunningSchedule(runningSchedule, runnable)); } else { throw new ScheduleAlreadyRunningException(); } } public void stopSchedule(Long id) throws ScheduleNotRunningException { if (runningSchedules.containsKey(id)) { LOGGER.info("Stopping schedule {}", id); ScheduledFuture future = runningSchedules.get(id).getFuture(); if (!future.isCancelled()) { future.cancel(true); runningSchedules.remove(id); LOGGER.info("Schedule should now be stopped"); } } else { throw new ScheduleNotRunningException(); } } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/ConnectWorkflowStep.java ================================================ package io.linuxserver.davos.schedule.workflow; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.transfer.ftp.client.Client; import io.linuxserver.davos.transfer.ftp.client.ClientFactory; import io.linuxserver.davos.transfer.ftp.exception.FTPException; public class ConnectWorkflowStep extends WorkflowStep { private static final Logger LOGGER = LoggerFactory.getLogger(ConnectWorkflowStep.class); private ClientFactory clientFactory = new ClientFactory(); public ConnectWorkflowStep() { this.nextStep = new FilterFilesWorkflowStep(); } @Override public void runStep(ScheduleWorkflow schedule) { Client client = clientFactory.getClient(schedule.getConfig().getConnectionType()); client.setCredentials(schedule.getConfig().getCredentials()); client.setHost(schedule.getConfig().getHostName()); client.setPort(schedule.getConfig().getPort()); try { LOGGER.info("Connecting to host {} on port {}", schedule.getConfig().getHostName(), schedule.getConfig().getPort()); schedule.setConnection(client.connect()); schedule.setClient(client); LOGGER.info("Connection success. Moving onto next step"); nextStep.runStep(schedule); } catch (FTPException e) { LOGGER.error("Unable to create connection to {} on port {}. Falling back. Will try again next time.", schedule.getConfig().getHostName(), schedule.getConfig().getPort()); LOGGER.error("Error was: {}", e.getMessage()); LOGGER.debug("Stacktrace", e); } } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/DisconnectWorkflowStep.java ================================================ package io.linuxserver.davos.schedule.workflow; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.transfer.ftp.exception.FTPException; public class DisconnectWorkflowStep extends WorkflowStep { private static final Logger LOGGER = LoggerFactory.getLogger(DisconnectWorkflowStep.class); @Override public void runStep(ScheduleWorkflow schedule) { try { schedule.getClient().disconnect(); } catch (FTPException e) { LOGGER.error("Unable to disconnect from host. Error was: {}", e.getMessage()); LOGGER.debug("Stacktrace", e); } } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/DownloadFilesWorkflowStep.java ================================================ package io.linuxserver.davos.schedule.workflow; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.schedule.ScheduleConfiguration; import io.linuxserver.davos.schedule.workflow.transfer.FTPTransfer; import io.linuxserver.davos.schedule.workflow.transfer.TransferStrategy; import io.linuxserver.davos.schedule.workflow.transfer.TransferStrategyFactory; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.progress.ListenerFactory; import io.linuxserver.davos.transfer.ftp.connection.progress.ProgressListener; import io.linuxserver.davos.transfer.ftp.exception.FTPException; public class DownloadFilesWorkflowStep extends WorkflowStep { private static final Logger LOGGER = LoggerFactory.getLogger(DownloadFilesWorkflowStep.class); private TransferStrategyFactory transferStrategyFactory = new TransferStrategyFactory(); public DownloadFilesWorkflowStep() { this.nextStep = new DisconnectWorkflowStep(); } @Override public void runStep(ScheduleWorkflow schedule) { ScheduleConfiguration config = schedule.getConfig(); TransferStrategy strategyToUse = transferStrategyFactory.getStrategy(config.getTransferType(), schedule.getConnection()); LOGGER.debug("Strategy chosen for downloads is {}, selected {}", config.getTransferType(), strategyToUse); strategyToUse.setPostDownloadActions(schedule.getConfig().getActions()); LOGGER.debug("PostDownloadActions: {} have been set against chosen strategy", schedule.getConfig().getActions()); try { if (schedule.getFilesToDownload().isEmpty()) LOGGER.info("There are no files to download in this run"); for (FTPTransfer transfer : schedule.getFilesToDownload()) { LOGGER.debug("Generating listener for transfer"); FTPFile file = transfer.getFile(); ProgressListener listener = new ListenerFactory().createListener(config.getConnectionType()); schedule.getConnection().setProgressListener(listener); transfer.setListener(listener); strategyToUse.transferFile(transfer, config.getLocalFilePath()); if (config.isDeleteHostFile()) schedule.getConnection().deleteRemoteFile(file); } LOGGER.info("Download step complete. Moving onto next step"); schedule.getFilesToDownload().clear(); } catch (FTPException e) { LOGGER.error("Unable to complete download. Error was: {}", e.getMessage()); LOGGER.debug("Stacktrace", e); LOGGER.info("Clearing current queue and will still continue to next step"); schedule.getFilesToDownload().clear(); } nextStep.runStep(schedule); } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/FilterFilesWorkflowStep.java ================================================ package io.linuxserver.davos.schedule.workflow; import static java.util.stream.Collectors.toList; import java.util.ArrayList; import java.util.List; import java.util.function.Predicate; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.schedule.workflow.filter.ReferentialFileFilter; import io.linuxserver.davos.schedule.workflow.transfer.FTPTransfer; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.exception.FTPException; import io.linuxserver.davos.util.PatternBuilder; public class FilterFilesWorkflowStep extends WorkflowStep { private static final Logger LOGGER = LoggerFactory.getLogger(FilterFilesWorkflowStep.class); public FilterFilesWorkflowStep() { this.nextStep = new DownloadFilesWorkflowStep(); this.backoutStep = new DisconnectWorkflowStep(); } @Override public void runStep(ScheduleWorkflow schedule) { try { List filters = schedule.getConfig().getFilters(); List allFiles = schedule.getConnection().listFiles(schedule.getConfig().getRemoteFilePath()).stream() .filter(removeCurrentAndParentDirs()).collect(toList()); List filesToFilter = new ReferentialFileFilter(schedule.getFilesFromLastScan()).filter(allFiles); List filteredFiles = new ArrayList(); LOGGER.debug("Clearing pending download list"); schedule.getFilesToDownload().clear(); if (noFilteringRequired(schedule, filters)) { LOGGER.info("Filter list was empty. Adding all found files to list"); LOGGER.debug("All files: {}", filesToFilter.stream().map(f -> f.getName()).collect(Collectors.toList())); schedule.getFilesToDownload().addAll(filesToFilter.stream().map(f -> new FTPTransfer(f)).collect(toList())); } else { LOGGER.debug("Filters used {}", filters); LOGGER.debug("Files to filter against {}", filesToFilter.stream().map(f -> f.getName()).collect(toList())); boolean invertFilters = schedule.getConfig().isInvertFilters(); for (FTPFile file : filesToFilter) filterFilesByName(invertFilters, filters, filteredFiles, file); schedule.getFilesToDownload().addAll(filteredFiles.stream().map(f -> new FTPTransfer(f)).collect(toList())); } LOGGER.debug("Resetting files from scan to files in this scan"); schedule.getFilesFromLastScan().clear(); schedule.getFilesFromLastScan().addAll(allFiles.stream().map(f -> f.getName()).collect(toList())); LOGGER.debug("Files from last scan set to {}", schedule.getFilesFromLastScan()); LOGGER.info("Filtered files. Moving onto next step"); nextStep.runStep(schedule); } catch (FTPException e) { LOGGER.error("Unable to filter files. Error message was: {}", e.getMessage()); LOGGER.debug("Stacktrace", e); LOGGER.info("Backing out of this run."); backoutStep.runStep(schedule); } } private boolean noFilteringRequired(ScheduleWorkflow schedule, List filters) { return filters.isEmpty() && !schedule.getConfig().isFiltersMandatory(); } private void filterFilesByName(boolean invertFilters, List filters, List filteredFiles, FTPFile file) { if (invertFilters) { boolean filterForFileFound = false; for (String filter : filters) { String expression = PatternBuilder.buildFromFilterString(filter); if (file.getName().toLowerCase().matches(expression.toLowerCase())) filterForFileFound = true; } if (!filterForFileFound) { LOGGER.debug("Inverting enabled - no matching filter found for file {}, so adding to download list.", file.getName()); filteredFiles.add(file); } } else { for (String filter : filters) { String expression = PatternBuilder.buildFromFilterString(filter); if (file.getName().toLowerCase().matches(expression.toLowerCase())) { LOGGER.debug("Matched {} to {}. Adding to final filter list.", file.getName().toLowerCase(), expression.toLowerCase()); filteredFiles.add(file); return; } } } } private Predicate removeCurrentAndParentDirs() { return file -> !file.getName().equals(".") && !file.getName().equals(".."); } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/ScheduleWorkflow.java ================================================ package io.linuxserver.davos.schedule.workflow; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.schedule.ScheduleConfiguration; import io.linuxserver.davos.schedule.workflow.transfer.FTPTransfer; import io.linuxserver.davos.transfer.ftp.client.Client; import io.linuxserver.davos.transfer.ftp.connection.Connection; public class ScheduleWorkflow { private static final Logger LOGGER = LoggerFactory.getLogger(ScheduleWorkflow.class); private ScheduleConfiguration config; private Client client; private Connection connection; private List filesFromLastScan = new ArrayList<>(); private List filesToDownload = new ArrayList<>(); public ScheduleWorkflow(ScheduleConfiguration config) { this.config = config; } protected Client getClient() { return client; } protected ScheduleConfiguration getConfig() { return config; } protected Connection getConnection() { return connection; } public void start() { LOGGER.info("Running schedule: {}", config.getScheduleName()); new ConnectWorkflowStep().runStep(this); LOGGER.info("Finished schedule run: {}", config.getScheduleName()); } protected void setConnection(Connection connection) { this.connection = connection; } protected void setClient(Client client) { this.client = client; } public List getFilesFromLastScan() { return filesFromLastScan; } public List getFilesToDownload() { return filesToDownload; } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/WorkflowStep.java ================================================ package io.linuxserver.davos.schedule.workflow; public abstract class WorkflowStep { protected WorkflowStep nextStep; protected WorkflowStep backoutStep; abstract public void runStep(ScheduleWorkflow schedule); } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/actions/HttpAPICallAction.java ================================================ package io.linuxserver.davos.schedule.workflow.actions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.converter.HttpMessageConversionException; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; public class HttpAPICallAction implements PostDownloadAction { private static final Logger LOGGER = LoggerFactory.getLogger(HttpAPICallAction.class); private RestTemplate restTemplate = new RestTemplate(); private String url; private HttpMethod method; private String contentType; private String body; public HttpAPICallAction(String url, String method, String contentType, String body) { this.url = url; this.method = HttpMethod.valueOf(method); this.contentType = contentType; this.body = body; } @Override public void execute(PostDownloadExecution execution) { try { HttpHeaders headers = new HttpHeaders(); headers.add("Content-Type", contentType); LOGGER.info("Sending message to generic API for {}", execution.fileName); HttpEntity httpEntity = new HttpEntity(resolveFilename(body, execution.fileName), headers); LOGGER.debug("Sending {} message {} to generic API: {}", method, httpEntity, url); restTemplate.exchange(resolveFilename(url, execution.fileName), method, httpEntity, Object.class); } catch (RestClientException | HttpMessageConversionException e) { LOGGER.debug("Full stacktrace", e); LOGGER.error("Unable to complete message to generic API. Given error: {}", e.getMessage()); } } @Override public String toString() { return getClass().getSimpleName(); } private String resolveFilename(String value, String filename) { return value.replace("$filename", filename); } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/actions/MoveFileAction.java ================================================ package io.linuxserver.davos.schedule.workflow.actions; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.util.FileUtils; public class MoveFileAction implements PostDownloadAction { private static final Logger LOGGER = LoggerFactory.getLogger(MoveFileAction.class); private String currentFilePath; private String newFilePath; private FileUtils fileUtils = new FileUtils(); public MoveFileAction(String currentFilePath, String newFilePath) { this.currentFilePath = FileUtils.ensureTrailingSlash(currentFilePath); this.newFilePath = FileUtils.ensureTrailingSlash(newFilePath); } @Override public void execute(PostDownloadExecution execution) { try { LOGGER.info("Executing move action: Moving {} to {}", execution.fileName, newFilePath); fileUtils.moveFileToDirectory(currentFilePath + execution.fileName, newFilePath); LOGGER.info("File successfully moved!"); } catch (IOException e) { LOGGER.error("Unable to move {} to {}. Reason given: {}", execution.fileName, newFilePath, e.getMessage()); LOGGER.debug("Full stack trace on error", e); } } @Override public String toString() { return getClass().getSimpleName(); } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/actions/PostDownloadAction.java ================================================ package io.linuxserver.davos.schedule.workflow.actions; public interface PostDownloadAction { void execute(PostDownloadExecution execution); } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/actions/PostDownloadExecution.java ================================================ package io.linuxserver.davos.schedule.workflow.actions; public class PostDownloadExecution { public String fileName; } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/actions/PushbulletNotifyAction.java ================================================ package io.linuxserver.davos.schedule.workflow.actions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.MediaType; import org.springframework.http.converter.HttpMessageConversionException; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; public class PushbulletNotifyAction implements PostDownloadAction { private static final Logger LOGGER = LoggerFactory.getLogger(PushbulletNotifyAction.class); private RestTemplate restTemplate = new RestTemplate(); private String apiKey; public PushbulletNotifyAction(String apiKey) { this.apiKey = apiKey; } @Override public void execute(PostDownloadExecution execution) { PushbulletRequest body = new PushbulletRequest(); body.body = execution.fileName; body.title = "A new file has been downloaded"; body.type = "note"; HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); headers.add("Authorization", "Bearer " + apiKey); try { LOGGER.info("Sending notification to Pushbullet for {}", execution.fileName); LOGGER.debug("API Key: {}", apiKey); HttpEntity httpEntity = new HttpEntity(body, headers); LOGGER.debug("Sending message to Pushbullet: {}", httpEntity); restTemplate.exchange("https://api.pushbullet.com/v2/pushes", HttpMethod.POST, httpEntity, Object.class); } catch (RestClientException | HttpMessageConversionException e ) { LOGGER.debug("Full stacktrace", e); LOGGER.error("Unable to complete notification to Pushbullet. Given error: {}", e.getMessage()); } } @Override public String toString() { return getClass().getSimpleName(); } class PushbulletRequest { public String type; public String title; public String body; @Override public String toString() { return "PushbulletRequest [type=" + type + ", title=" + title + ", body=" + body + "]"; } } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/actions/SNSNotifyAction.java ================================================ package io.linuxserver.davos.schedule.workflow.actions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.sns.AmazonSNS; import com.amazonaws.services.sns.AmazonSNSClient; import com.amazonaws.services.sns.AmazonSNSClientBuilder; import com.amazonaws.services.sns.model.PublishRequest; import com.amazonaws.services.sns.model.PublishResult; public class SNSNotifyAction implements PostDownloadAction { private static final Logger LOGGER = LoggerFactory.getLogger(SNSNotifyAction.class); private AmazonSNSClientBuilder snsClientBuilder = AmazonSNSClient.builder(); private String region; private String arn; private String accessKey; private String secretAccessKey; public SNSNotifyAction(String region, String arn, String accessKey, String secretAccessKey) { this.region = region; this.arn = arn; this.accessKey = accessKey; this.secretAccessKey = secretAccessKey; } @Override public void execute(PostDownloadExecution execution) { AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretAccessKey); AmazonSNS sns = snsClientBuilder.withRegion(region) .withCredentials(new AWSStaticCredentialsProvider(credentials)).build(); LOGGER.debug("SNS: Topic Arn : {}", arn); LOGGER.debug("SNS: Topic Region : {}", region); LOGGER.debug("SNS: Topic Access Key : {}", accessKey); LOGGER.debug("SNS: Topic Secret Access Key : {}", secretAccessKey); PublishRequest request = new PublishRequest(); request.setTopicArn(arn); request.setMessageStructure("json"); request.setMessage(formatJsonMessage(execution.fileName)); request.setSubject("A new file has been downloaded"); LOGGER.info("Publishing message to SNS"); PublishResult result = sns.publish(request); LOGGER.info("Publish successful!"); LOGGER.debug("{}", result.getMessageId()); } private String formatJsonMessage(String message) { return String.format("{\"default\": \"%s\"}", message); } @Override public String toString() { return getClass().getSimpleName(); } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/filter/FileFilter.java ================================================ package io.linuxserver.davos.schedule.workflow.filter; import java.util.List; import io.linuxserver.davos.transfer.ftp.FTPFile; public interface FileFilter { List filter(List allFiles); } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/filter/ReferentialFileFilter.java ================================================ package io.linuxserver.davos.schedule.workflow.filter; import static java.util.stream.Collectors.toList; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.transfer.ftp.FTPFile; public class ReferentialFileFilter implements FileFilter { private static final Logger LOGGER = LoggerFactory.getLogger(ReferentialFileFilter.class); private List filesToCompareWith = new ArrayList<>(); public ReferentialFileFilter(List files) { filesToCompareWith = files; } @Override public List filter(List allFiles) { if (filesToCompareWith.isEmpty()) { LOGGER.debug("No files in last scan. Using all files in this scan for filtering"); return allFiles; } LOGGER.debug("Files in last scan {}", filesToCompareWith); LOGGER.debug("Files in this scan {}", allFiles.stream().map(f -> f.getName()).collect(toList())); LOGGER.debug("Checking this scan for new files - comparing with files from last scan"); List collectedFiles = allFiles.stream().filter(f -> !filesToCompareWith.contains(f.getName())).collect(toList()); LOGGER.debug("New files {}", collectedFiles.stream().map(f -> f.getName()).collect(toList())); return collectedFiles; } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/filter/TemporalFileFilter.java ================================================ package io.linuxserver.davos.schedule.workflow.filter; import static java.util.stream.Collectors.toList; import java.util.List; import java.util.function.Predicate; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.transfer.ftp.FTPFile; public class TemporalFileFilter implements FileFilter { private static final Logger LOGGER = LoggerFactory.getLogger(TemporalFileFilter.class); private DateTime lastRun; public TemporalFileFilter(DateTime lastRun) { this.lastRun = lastRun; } @Override public List filter(List allFiles) { return allFiles.stream().filter(after(lastRun)).collect(toList()); } private Predicate after(DateTime lastRun) { LOGGER.debug("Filtering initial set of files by lastRun. Last run was {}", lastRun); return f -> f.getLastModified().isAfter(lastRun); } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/transfer/FTPTransfer.java ================================================ package io.linuxserver.davos.schedule.workflow.transfer; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.progress.ProgressListener; public class FTPTransfer { private State state = State.PENDING; private final FTPFile file; private ProgressListener listener; public FTPTransfer(FTPFile file) { this.file = file; } public FTPFile getFile() { return file; } public ProgressListener getListener() { return listener; } public void setListener(ProgressListener listener) { this.listener = listener; } public State getState() { return state; } public void setState(State state) { this.state = state; } public enum State { PENDING, DOWNLOADING, SKIPPED, FINISHED } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/transfer/FilesAndFoldersTranferStrategy.java ================================================ package io.linuxserver.davos.schedule.workflow.transfer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.util.FileUtils; public class FilesAndFoldersTranferStrategy extends TransferStrategy { private static final Logger LOGGER = LoggerFactory.getLogger(FilesAndFoldersTranferStrategy.class); public FilesAndFoldersTranferStrategy(Connection connection) { super(connection); } @Override public void transferFile(FTPTransfer transfer, String destination) { FTPFile file = transfer.getFile(); String filename = file.getName(); String cleanFilePath = FileUtils.ensureTrailingSlash(file.getPath()); String cleanDestination = FileUtils.ensureTrailingSlash(destination); LOGGER.info("Downloading {} to {}", cleanFilePath + filename, cleanDestination); transfer.setState(FTPTransfer.State.DOWNLOADING); connection.download(file, cleanDestination); transfer.setState(FTPTransfer.State.FINISHED); LOGGER.info("Successfully downloaded file."); LOGGER.info("Running post download actions on {}", filename); runPostDownloadAction(file); } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/transfer/FilesOnlyTransferStrategy.java ================================================ package io.linuxserver.davos.schedule.workflow.transfer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.util.FileUtils; public class FilesOnlyTransferStrategy extends TransferStrategy { private static final Logger LOGGER = LoggerFactory.getLogger(FilesOnlyTransferStrategy.class); public FilesOnlyTransferStrategy(Connection connection) { super(connection); } @Override public void transferFile(FTPTransfer transfer, String destination) { FTPFile file = transfer.getFile(); String filename = file.getName(); String cleanFilePath = FileUtils.ensureTrailingSlash(file.getPath()); String cleanDestination = FileUtils.ensureTrailingSlash(destination); if (!file.isDirectory()) { LOGGER.info("Downloading {} to {}", cleanFilePath + filename, cleanDestination); transfer.setState(FTPTransfer.State.DOWNLOADING); connection.download(file, cleanDestination); transfer.setState(FTPTransfer.State.FINISHED); LOGGER.info("Successfully downloaded file."); LOGGER.info("Running post download actions on {}", filename); runPostDownloadAction(file); } else { LOGGER.debug("Nullifying listener as it will never get used"); transfer.setState(FTPTransfer.State.SKIPPED); transfer.setListener(null); } } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/transfer/TransferStrategy.java ================================================ package io.linuxserver.davos.schedule.workflow.transfer; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.schedule.workflow.actions.PostDownloadAction; import io.linuxserver.davos.schedule.workflow.actions.PostDownloadExecution; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.Connection; public abstract class TransferStrategy { private static final Logger LOGGER = LoggerFactory.getLogger(TransferStrategy.class); protected Connection connection; private List postDownloadActions = new ArrayList(); public TransferStrategy(Connection connection) { this.connection = connection; } public void setPostDownloadActions(List postDownloadActions) { this.postDownloadActions = postDownloadActions; } @Override public String toString() { return getClass().getSimpleName(); } public abstract void transferFile(FTPTransfer fileToTransfer, String destination); protected void runPostDownloadAction(FTPFile file) { if (null == postDownloadActions) { LOGGER.warn("Post download actions have been nulled! This should not happen. Will not attempt run of actions"); return; } LOGGER.debug("Running actions..."); for (PostDownloadAction action : postDownloadActions) { PostDownloadExecution execution = new PostDownloadExecution(); execution.fileName = file.getName(); action.execute(execution); } LOGGER.debug("Finished running actions..."); } } ================================================ FILE: src/main/java/io/linuxserver/davos/schedule/workflow/transfer/TransferStrategyFactory.java ================================================ package io.linuxserver.davos.schedule.workflow.transfer; import io.linuxserver.davos.transfer.ftp.FileTransferType; import io.linuxserver.davos.transfer.ftp.connection.Connection; public class TransferStrategyFactory { public TransferStrategy getStrategy(FileTransferType type, Connection connection) { if (FileTransferType.FILE.equals(type)) return new FilesOnlyTransferStrategy(connection); return new FilesAndFoldersTranferStrategy(connection); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/FTPFile.java ================================================ package io.linuxserver.davos.transfer.ftp; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.joda.time.DateTime; public class FTPFile { private String name; private long size; private String path; private DateTime lastModified; private boolean directory; public FTPFile(String name, long size, String path, long mTime, boolean directory) { this.name = name; this.size = size; this.path = path; this.lastModified = new DateTime(mTime); this.directory = directory; } public String getName() { return name; } public long getSize() { return size; } public String getPath() { return path; } public DateTime getLastModified() { return lastModified; } public boolean isDirectory() { return directory; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/FileTransferType.java ================================================ package io.linuxserver.davos.transfer.ftp; public enum FileTransferType { FILE, RECURSIVE; } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/TransferProtocol.java ================================================ package io.linuxserver.davos.transfer.ftp; public enum TransferProtocol { FTP, FTPS, SFTP; } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/client/Client.java ================================================ package io.linuxserver.davos.transfer.ftp.client; import io.linuxserver.davos.transfer.ftp.connection.Connection; public abstract class Client { protected String host; protected int port; protected UserCredentials userCredentials = UserCredentials.ANONYMOUS; public void setCredentials(UserCredentials userCredentials) { this.userCredentials = userCredentials; } public void setHost(String host) { this.host = host; } public void setPort(int port) { this.port = port; } public abstract Connection connect(); public abstract void disconnect(); } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/client/ClientFactory.java ================================================ package io.linuxserver.davos.transfer.ftp.client; import io.linuxserver.davos.transfer.ftp.TransferProtocol; public class ClientFactory { public Client getClient(TransferProtocol protocol) { if (protocol.equals(TransferProtocol.SFTP)) return new SFTPClient(); if (protocol.equals(TransferProtocol.FTPS)) return new FTPSClient(); return new FTPClient(); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/client/FTPClient.java ================================================ package io.linuxserver.davos.transfer.ftp.client; import java.io.IOException; import java.net.SocketException; import org.apache.commons.net.ftp.FTPReply; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.transfer.ftp.connection.ConnectionFactory; import io.linuxserver.davos.transfer.ftp.exception.ClientConnectionException; import io.linuxserver.davos.transfer.ftp.exception.ClientDisconnectException; import io.linuxserver.davos.transfer.ftp.exception.FTPException; public class FTPClient extends Client { private static final Logger LOGGER = LoggerFactory.getLogger(FTPClient.class); private ConnectionFactory connectionFactory = new ConnectionFactory(); protected org.apache.commons.net.ftp.FTPClient ftpClient; public FTPClient() { LOGGER.debug("Initialising FTP Client"); ftpClient = new org.apache.commons.net.ftp.FTPClient(); } public Connection connect() { try { connectClientAndCheckStatus(); setSpecificModesOnClient(); login(); } catch (IOException e) { throw new ClientConnectionException(String.format("Unable to connect to host %s on port %d", host, port), e); } return connectionFactory.createFTPConnection(ftpClient); } public void disconnect() { try { if (null == ftpClient) throw new ClientDisconnectException("The underlying client was null."); if (ftpClient.isConnected()) { LOGGER.debug("Disconnecting..."); ftpClient.disconnect(); LOGGER.debug("Disconnected"); } } catch (IOException e) { throw new ClientDisconnectException("There was an unexpected error while trying to disconnect.", e); } } private void connectClientAndCheckStatus() throws SocketException, IOException, FTPException { LOGGER.debug("Connecting to {}:{}", host, port); ftpClient.connect(host, port); int replyCode = ftpClient.getReplyCode(); if (!FTPReply.isPositiveCompletion(replyCode)) { LOGGER.debug("Connection not made."); LOGGER.debug("Response status: {}", replyCode); LOGGER.debug("Disconnecting"); ftpClient.disconnect(); LOGGER.debug("Disconnected"); throw new ClientConnectionException(String.format("The host %s on port %d returned a bad status code.", host, port)); } } private void login() throws IOException, FTPException { String username = userCredentials.getUsername(); String password = userCredentials.getPassword(); LOGGER.debug("Username: {}", username); boolean hasLoggedIn = ftpClient.login(username, password); if (!hasLoggedIn) throw new ClientConnectionException(String.format("Unable to login for user %s", username)); ftpClient.setFileType(org.apache.commons.net.ftp.FTPClient.BINARY_FILE_TYPE); } private void setSpecificModesOnClient() throws IOException { ftpClient.enterLocalPassiveMode(); ftpClient.setControlKeepAliveTimeout(300); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/client/FTPSClient.java ================================================ package io.linuxserver.davos.transfer.ftp.client; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class FTPSClient extends FTPClient { private static final Logger LOGGER = LoggerFactory.getLogger(FTPSClient.class); public FTPSClient() { LOGGER.debug("Initialising FTPS Client"); ftpClient = new org.apache.commons.net.ftp.FTPSClient(); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/client/SFTPClient.java ================================================ package io.linuxserver.davos.transfer.ftp.client; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.jcraft.jsch.Channel; import com.jcraft.jsch.JSch; import com.jcraft.jsch.JSchException; import com.jcraft.jsch.Session; import io.linuxserver.davos.transfer.ftp.client.UserCredentials.Identity; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.transfer.ftp.connection.ConnectionFactory; import io.linuxserver.davos.transfer.ftp.exception.ClientConnectionException; import io.linuxserver.davos.transfer.ftp.exception.ClientDisconnectException; public class SFTPClient extends Client { private static final Logger LOGGER = LoggerFactory.getLogger(SFTPClient.class); private JSch jsch; private ConnectionFactory connectionFactory; private Session session; private Channel channel; public SFTPClient() { this.jsch = new JSch(); this.connectionFactory = new ConnectionFactory(); } @Override public Connection connect() { session = null; channel = null; try { configureSessionAndConnect(); openChannelFromSession(); } catch (JSchException e) { throw new ClientConnectionException(String.format("Unable to connect to host %s on port %d", host, port), e); } return connectionFactory.createSFTPConnection(channel); } @Override public void disconnect() { if (null == channel || null == session) throw new ClientDisconnectException("The underlying connection was never initially made."); LOGGER.debug("Disconnecting from channel"); channel.disconnect(); LOGGER.debug("Disconnecting from session"); session.disconnect(); } private void configureSessionAndConnect() throws JSchException { LOGGER.debug("Configuring connection credentials and options on session"); Identity identity = userCredentials.getIdentity(); if (null != identity) { String identityFile = identity.getIdentityFile(); LOGGER.debug("SSH identity found ({}). Setting against session", identityFile); jsch.addIdentity(identityFile); } String username = userCredentials.getUsername(); String password = userCredentials.getPassword(); LOGGER.debug("Username: {}", username); session = jsch.getSession(username, host, port); session.setConfig("StrictHostKeyChecking", "no"); // I'm going to have to think of a nicer way of doing this... if (null == userCredentials.getIdentity()) session.setPassword(password); session.connect(); LOGGER.debug("Connected to session"); } private void openChannelFromSession() throws JSchException { LOGGER.debug("Opening SFTP channel from session"); channel = session.openChannel("sftp"); channel.connect(); LOGGER.debug("Connected to channel"); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/client/UserCredentials.java ================================================ package io.linuxserver.davos.transfer.ftp.client; public class UserCredentials { public static final UserCredentials ANONYMOUS = new UserCredentials("anonymous", "stark@linuxserver.io"); private String username; private String password; private Identity identity; public UserCredentials(final String username, final String password) { this.username = username; this.password = password; } public UserCredentials(final String username, final Identity identity) { this.username = username; this.identity = identity; } public Identity getIdentity() { return identity; } public String getUsername() { return username; } public String getPassword() { return password; } public static class Identity { private final String identityFile; public Identity(String identityFile) { this.identityFile = identityFile; } public String getIdentityFile() { return identityFile; } } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/connection/Connection.java ================================================ package io.linuxserver.davos.transfer.ftp.connection; import java.util.List; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.progress.ProgressListener; import io.linuxserver.davos.transfer.ftp.exception.FTPException; public interface Connection { String currentDirectory() throws FTPException; void download(FTPFile remoteFilePath, String localFilePath) throws FTPException; void deleteRemoteFile(FTPFile file) throws FTPException; List listFiles() throws FTPException; List listFiles(String remoteDirectory) throws FTPException; void setProgressListener(ProgressListener progressListener); } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/connection/ConnectionFactory.java ================================================ package io.linuxserver.davos.transfer.ftp.connection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.jcraft.jsch.Channel; import com.jcraft.jsch.ChannelSftp; public class ConnectionFactory { private static final Logger LOGGER = LoggerFactory.getLogger(ConnectionFactory.class); public SFTPConnection createSFTPConnection(Channel channel) { LOGGER.debug("Creating SFTP connection for channel {}", channel); return new SFTPConnection((ChannelSftp) channel); } public FTPConnection createFTPConnection(org.apache.commons.net.ftp.FTPClient client) { LOGGER.debug("Creating FTP connection for client {}", client); return new FTPConnection(client); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/connection/FTPConnection.java ================================================ package io.linuxserver.davos.transfer.ftp.connection; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import java.util.function.Predicate; import java.util.stream.Collectors; import org.apache.commons.io.output.CountingOutputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.progress.ProgressListener; import io.linuxserver.davos.transfer.ftp.exception.DeleteFileException; import io.linuxserver.davos.transfer.ftp.exception.DownloadFailedException; import io.linuxserver.davos.transfer.ftp.exception.FTPException; import io.linuxserver.davos.transfer.ftp.exception.FileListingException; import io.linuxserver.davos.util.FileStreamFactory; import io.linuxserver.davos.util.FileUtils; public class FTPConnection implements Connection { private static final Logger LOGGER = LoggerFactory.getLogger(FTPConnection.class); private org.apache.commons.net.ftp.FTPClient client; private FileStreamFactory fileStreamFactory = new FileStreamFactory(); private FileUtils fileUtils = new FileUtils(); private ProgressListener progressListener; public FTPConnection(org.apache.commons.net.ftp.FTPClient client) { this.client = client; } @Override public String currentDirectory() { try { String workingDirectory = client.printWorkingDirectory(); LOGGER.debug("{}", workingDirectory); return workingDirectory; } catch (IOException e) { throw new FileListingException("Unable to print the working directory", e); } } @Override public void download(FTPFile file, String localFilePath) { String cleanRemotePath = FileUtils.ensureTrailingSlash(file.getPath()) + file.getName(); String cleanLocalPath = FileUtils.ensureTrailingSlash(localFilePath); LOGGER.debug("Remote path: {}", cleanRemotePath); LOGGER.debug("Local path: {}", cleanLocalPath); try { if (file.isDirectory()) downloadDirectoryAndContents(file, cleanLocalPath, cleanRemotePath); else doDownload(file, cleanRemotePath, cleanLocalPath); } catch (FileNotFoundException e) { throw new DownloadFailedException( String.format("Unable to write to local directory %s", cleanLocalPath + file.getName()), e); } catch (IOException e) { throw new DownloadFailedException(String.format("Unable to download file %s", cleanRemotePath), e); } } @Override public List listFiles() { return listFiles(currentDirectory()); } @Override public List listFiles(String remoteDirectory) { List files = new ArrayList(); try { String cleanRemoteDirectory = FileUtils.ensureTrailingSlash(remoteDirectory); LOGGER.debug("Listing all files in {}", cleanRemoteDirectory); org.apache.commons.net.ftp.FTPFile[] ftpFiles = client.listFiles(cleanRemoteDirectory); for (org.apache.commons.net.ftp.FTPFile file : ftpFiles) files.add(toFtpFile(file, cleanRemoteDirectory)); LOGGER.debug("{}", files); } catch (IOException e) { throw new FileListingException(String.format("Unable to list files in directory %s", remoteDirectory), e); } return files.stream().filter(removeCurrentAndParentDirs()).collect(Collectors.toList()); } @Override public void setProgressListener(ProgressListener progressListener) { this.progressListener = progressListener; } private CountingOutputStream listenOn(OutputStream outputStream) { LOGGER.debug("Creating wrapping output stream for progress listener"); CountingOutputStream countingStream = new CountingOutputStream(outputStream) { @Override protected void beforeWrite(int n) { super.beforeWrite(n); progressListener.setBytesWritten(getByteCount()); } }; return countingStream; } private void doDownload(FTPFile file, String cleanRemotePath, String cleanLocalPath) throws FileNotFoundException, IOException { LOGGER.info("Downloading {} to {}", cleanRemotePath, cleanLocalPath); LOGGER.debug("Creating output stream for file {}", cleanLocalPath + file.getName()); OutputStream outputStream = fileStreamFactory.createOutputStream(cleanLocalPath + file.getName()); boolean hasDownloaded; if (null != progressListener) { LOGGER.debug("ProgressListener has been set. Initialising..."); LOGGER.debug("Total file size is {}", file.getSize()); progressListener.reset(); progressListener.setTotalBytes(file.getSize()); progressListener.start(); hasDownloaded = client.retrieveFile(cleanRemotePath, listenOn(outputStream)); } else hasDownloaded = client.retrieveFile(cleanRemotePath, outputStream); outputStream.close(); if (!hasDownloaded) throw new DownloadFailedException("Server returned failure while downloading."); } private void downloadDirectoryAndContents(FTPFile file, String localDownloadFolder, String path) throws IOException { LOGGER.info("Item {} is a directory. Will now check sub-items", file.getName()); List subItems = listFiles(path).stream().filter(removeCurrentAndParentDirs()).collect(Collectors.toList()); LOGGER.debug("Counted {} sub items.", subItems.size()); String fullLocalDownloadPath = FileUtils.ensureTrailingSlash(localDownloadFolder + file.getName()); LOGGER.debug("Creating new local directory {}", fullLocalDownloadPath); fileUtils.createLocalDirectory(fullLocalDownloadPath); for (FTPFile subItem : subItems) { String subItemPath = FileUtils.ensureTrailingSlash(subItem.getPath()) + subItem.getName(); LOGGER.debug("Download. Sub item path: {}", subItemPath); if (subItem.isDirectory()) { String subLocalFilePath = FileUtils.ensureTrailingSlash(fullLocalDownloadPath); downloadDirectoryAndContents(subItem, subLocalFilePath, FileUtils.ensureTrailingSlash(subItemPath)); } else doDownload(subItem, subItemPath, fullLocalDownloadPath); } } private Predicate removeCurrentAndParentDirs() { return file -> !file.getName().equals(".") && !file.getName().equals(".."); } private FTPFile toFtpFile(org.apache.commons.net.ftp.FTPFile ftpFile, String filePath) throws IOException { String name = ftpFile.getName(); long fileSize = ftpFile.getSize(); long mTime = ftpFile.getTimestamp().getTime().getTime(); boolean isDirectory = ftpFile.isDirectory(); return new FTPFile(name, fileSize, filePath, mTime, isDirectory); } @Override public void deleteRemoteFile(FTPFile file) throws FTPException { String cleanRemotePath = FileUtils.ensureTrailingSlash(file.getPath()) + file.getName(); LOGGER.debug("Deleting remote file {}", cleanRemotePath); try { if (file.isDirectory()) { deleteDirectoryAndContents(file, cleanRemotePath); } else doDelete(cleanRemotePath); } catch (IOException e) { LOGGER.debug("client#deleteFile() threw exception. Assuming file not deleted"); throw new DeleteFileException("Unable to delete file on remote server", e); } } private void deleteDirectoryAndContents(FTPFile file, String remoteDirectoryPath) throws IOException { LOGGER.info("Item {} is a directory. Will now check sub-items", file.getName()); List subItems = listFiles(remoteDirectoryPath).stream().filter(removeCurrentAndParentDirs()) .collect(Collectors.toList()); for (FTPFile subItem : subItems) { String subItemPath = FileUtils.ensureTrailingSlash(subItem.getPath()) + subItem.getName(); LOGGER.debug("Delete. Sub item path: {}", subItemPath); if (subItem.isDirectory()) deleteDirectoryAndContents(subItem, subItemPath); else doDelete(subItemPath); } LOGGER.debug("Removing empty directory {}", remoteDirectoryPath); client.removeDirectory(remoteDirectoryPath); } private void doDelete(String subItemPath) throws IOException { LOGGER.debug("Deleting file: {}", subItemPath); boolean deleted = client.deleteFile(subItemPath); LOGGER.debug("File deleted"); if (!deleted) throw new DeleteFileException("Unable to delete file on remote server. Unknown reason"); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/connection/SFTPConnection.java ================================================ package io.linuxserver.davos.transfer.ftp.connection; import java.util.ArrayList; import java.util.List; import java.util.Vector; import java.util.function.Predicate; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.jcraft.jsch.ChannelSftp; import com.jcraft.jsch.ChannelSftp.LsEntry; import com.jcraft.jsch.SftpException; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.progress.ProgressListener; import io.linuxserver.davos.transfer.ftp.connection.progress.SFTPProgressListener; import io.linuxserver.davos.transfer.ftp.exception.DeleteFileException; import io.linuxserver.davos.transfer.ftp.exception.DownloadFailedException; import io.linuxserver.davos.transfer.ftp.exception.FTPException; import io.linuxserver.davos.transfer.ftp.exception.FileListingException; import io.linuxserver.davos.util.FileUtils; public class SFTPConnection implements Connection { private static final Logger LOGGER = LoggerFactory.getLogger(SFTPConnection.class); private ChannelSftp channel; private FileUtils fileUtils = new FileUtils(); private SFTPProgressListener progressListener; public SFTPConnection(ChannelSftp channel) { this.channel = channel; } @Override public String currentDirectory() { try { String pwd = channel.pwd(); LOGGER.debug("{}", pwd); return pwd; } catch (SftpException e) { throw new FileListingException("Unable to print the working directory", e); } } @Override public void download(FTPFile file, String localFilePath) { String path = FileUtils.ensureTrailingSlash(file.getPath()) + file.getName(); String cleanLocalPath = FileUtils.ensureTrailingSlash(localFilePath); LOGGER.debug("Download. Remote path: {}", path); LOGGER.debug("Download. Local path: {}", cleanLocalPath); try { if (file.isDirectory()) downloadDirectoryAndContents(file, cleanLocalPath, path); else doGet(path, cleanLocalPath); } catch (SftpException e) { throw new DownloadFailedException("Unable to download file " + path, e); } } @Override public List listFiles() { return listFiles(currentDirectory()); } @Override public List listFiles(String remoteDirectory) { try { String cleanRemoteDirectory = FileUtils.ensureTrailingSlash(remoteDirectory); List files = new ArrayList(); LOGGER.debug("Listing files in {}", cleanRemoteDirectory); @SuppressWarnings("unchecked") Vector lsEntries = channel.ls(cleanRemoteDirectory); for (LsEntry entry : lsEntries) files.add(toFtpFile(entry, cleanRemoteDirectory)); LOGGER.debug("{}", files); LOGGER.debug("Listed {} items from remote directory {}", files.size(), cleanRemoteDirectory); return files; } catch (SftpException e) { throw new FileListingException(String.format("Unable to list files in directory %s", remoteDirectory), e); } } @Override public void setProgressListener(ProgressListener progressListener) { this.progressListener = (SFTPProgressListener) progressListener; } private void doGet(String fullRemotePath, String fullLocalDownloadPath) throws SftpException { LOGGER.debug("Performing channel.get from {} to {}", fullRemotePath, fullLocalDownloadPath); if (null != progressListener) { LOGGER.debug("Progress listener has been enabled"); channel.get(fullRemotePath, fullLocalDownloadPath, progressListener); } else channel.get(fullRemotePath, fullLocalDownloadPath); } private void downloadDirectoryAndContents(FTPFile file, String localDownloadFolder, String path) throws SftpException { LOGGER.info("Item {} is a directory. Will now check sub-items", file.getName()); List subItems = listFiles(path).stream().filter(removeCurrentAndParentDirs()).collect(Collectors.toList()); String fullLocalDownloadPath = FileUtils.ensureTrailingSlash(localDownloadFolder + file.getName()); LOGGER.debug("Creating new local directory {}", fullLocalDownloadPath); fileUtils.createLocalDirectory(fullLocalDownloadPath); for (FTPFile subItem : subItems) { LOGGER.debug("{}", subItem); String subItemPath = FileUtils.ensureTrailingSlash(subItem.getPath()) + subItem.getName(); if (subItem.isDirectory()) { String subLocalFilePath = FileUtils.ensureTrailingSlash(fullLocalDownloadPath); downloadDirectoryAndContents(subItem, subLocalFilePath, FileUtils.ensureTrailingSlash(subItemPath)); } else { LOGGER.info("Downloading {} to {}", subItemPath, fullLocalDownloadPath); doGet(subItemPath, fullLocalDownloadPath); } } } private Predicate removeCurrentAndParentDirs() { return file -> !file.getName().equals(".") && !file.getName().equals(".."); } private FTPFile toFtpFile(LsEntry lsEntry, String filePath) throws SftpException { String name = lsEntry.getFilename(); long fileSize = lsEntry.getAttrs().getSize(); int mTime = lsEntry.getAttrs().getMTime(); boolean directory = lsEntry.getAttrs().isDir(); return new FTPFile(name, fileSize, filePath, (long) mTime * 1000, directory); } @Override public void deleteRemoteFile(FTPFile file) throws FTPException { String cleanRemotePath = FileUtils.ensureTrailingSlash(file.getPath()) + file.getName(); LOGGER.debug("Deleting remote file {}", cleanRemotePath); try { if (file.isDirectory()) { deleteDirectoryAndContents(file, cleanRemotePath); } else doDelete(cleanRemotePath); } catch (SftpException e) { LOGGER.debug("channel threw exception. Assuming file not deleted"); throw new DeleteFileException("Unable to delete file on remote server", e); } } private void deleteDirectoryAndContents(FTPFile file, String remoteDirectoryPath) throws SftpException { LOGGER.info("Item {} is a directory. Will now check sub-items", file.getName()); List subItems = listFiles(remoteDirectoryPath).stream().filter(removeCurrentAndParentDirs()) .collect(Collectors.toList()); for (FTPFile subItem : subItems) { LOGGER.debug("{}", subItem); String subItemPath = FileUtils.ensureTrailingSlash(subItem.getPath()) + subItem.getName(); if (subItem.isDirectory()) deleteDirectoryAndContents(subItem, subItemPath); else doDelete(subItemPath); } LOGGER.debug("Removing empty directory {}", remoteDirectoryPath); channel.rmdir(remoteDirectoryPath); } private void doDelete(String subItemPath) throws SftpException { LOGGER.debug("Deleting file: {}", subItemPath); channel.rm(subItemPath); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/connection/progress/ListenerFactory.java ================================================ package io.linuxserver.davos.transfer.ftp.connection.progress; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.transfer.ftp.TransferProtocol; public class ListenerFactory { private static final Logger LOGGER = LoggerFactory.getLogger(ListenerFactory.class); public ProgressListener createListener(TransferProtocol protocol) { if (TransferProtocol.SFTP.equals(protocol)) { LOGGER.debug("Chosen listener is SFTPProgressListener, for {}", protocol); return new SFTPProgressListener(); } LOGGER.debug("Chosen listener is ProgressListener, for {}", protocol); return new ProgressListener(); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/connection/progress/ProgressListener.java ================================================ package io.linuxserver.davos.transfer.ftp.connection.progress; public class ProgressListener { private long lastWriteTime; private long totalBytesWritten; private long bytesInWrite; private long totalBytes; private double currentTransferSpeed; public double getProgress() { if (totalBytes > 0) return ((double) totalBytesWritten / (double) totalBytes) * 100; return 100; } public double getTransferSpeed() { return currentTransferSpeed; } public void reset() { totalBytes = 0; } public void updateBytesWritten(long bytes) { setBytesWritten(totalBytesWritten + bytes); } public void setBytesWritten(long bytesWritten) { long currentTimeMillis = System.currentTimeMillis(); long timeSinceLastWrite = currentTimeMillis - this.lastWriteTime; this.lastWriteTime = currentTimeMillis; this.bytesInWrite = bytesWritten - this.totalBytesWritten; this.totalBytesWritten = bytesWritten; this.currentTransferSpeed = (double) this.bytesInWrite / (double) timeSinceLastWrite / 1000; } public void setTotalBytes(long totalBytes) { this.totalBytes = totalBytes; } public void start() { lastWriteTime = System.currentTimeMillis(); totalBytesWritten = 0; } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/connection/progress/SFTPProgressListener.java ================================================ package io.linuxserver.davos.transfer.ftp.connection.progress; import com.jcraft.jsch.SftpProgressMonitor; public class SFTPProgressListener extends ProgressListener implements SftpProgressMonitor { @Override public void init(int op, String src, String dest, long max) { setTotalBytes(max); start(); } @Override public boolean count(long count) { updateBytesWritten(count); return true; } @Override public void end() { // reset(); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/exception/ClientConnectionException.java ================================================ package io.linuxserver.davos.transfer.ftp.exception; public class ClientConnectionException extends FTPException { private static final long serialVersionUID = 7733358928451506618L; public ClientConnectionException() { super(); } public ClientConnectionException(String message) { super(message); } public ClientConnectionException(String message, Exception cause) { super(message, cause); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/exception/ClientDisconnectException.java ================================================ package io.linuxserver.davos.transfer.ftp.exception; public class ClientDisconnectException extends FTPException { private static final long serialVersionUID = 7733358928451506618L; public ClientDisconnectException() { super(); } public ClientDisconnectException(String message) { super(message); } public ClientDisconnectException(String message, Exception cause) { super(message, cause); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/exception/DeleteFileException.java ================================================ package io.linuxserver.davos.transfer.ftp.exception; public class DeleteFileException extends FTPException { private static final long serialVersionUID = 6191478212036531333L; public DeleteFileException() { super(); } public DeleteFileException(String message) { super(message); } public DeleteFileException(String message, Exception cause) { super(message, cause); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/exception/DownloadFailedException.java ================================================ package io.linuxserver.davos.transfer.ftp.exception; public class DownloadFailedException extends FTPException { private static final long serialVersionUID = 7733358928451506618L; public DownloadFailedException() { super(); } public DownloadFailedException(String message) { super(message); } public DownloadFailedException(String message, Exception cause) { super(message, cause); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/exception/FTPException.java ================================================ package io.linuxserver.davos.transfer.ftp.exception; public abstract class FTPException extends RuntimeException { private static final long serialVersionUID = 7733358928451506618L; public FTPException() { super(); } public FTPException(String message) { super(message); } public FTPException(String message, Exception cause) { super(message, cause); } } ================================================ FILE: src/main/java/io/linuxserver/davos/transfer/ftp/exception/FileListingException.java ================================================ package io.linuxserver.davos.transfer.ftp.exception; public class FileListingException extends FTPException { private static final long serialVersionUID = 7733358928451506618L; public FileListingException() { super(); } public FileListingException(String message) { super(message); } public FileListingException(String message, Exception cause) { super(message, cause); } } ================================================ FILE: src/main/java/io/linuxserver/davos/util/FileStreamFactory.java ================================================ package io.linuxserver.davos.util; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; public class FileStreamFactory { public FileInputStream createInputStream(String filePath) throws FileNotFoundException { return new FileInputStream(new File(filePath)); } public FileOutputStream createOutputStream(String filePath) throws FileNotFoundException { return new FileOutputStream(new File(filePath)); } } ================================================ FILE: src/main/java/io/linuxserver/davos/util/FileUtils.java ================================================ package io.linuxserver.davos.util; import java.io.File; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class FileUtils { private static final Logger LOGGER = LoggerFactory.getLogger(FileUtils.class); public File getFile(String filePath) { return new File(filePath); } public void moveFileToDirectory(String oldPath, String newPath) throws IOException { org.apache.commons.io.FileUtils.moveToDirectory(getFile(oldPath), getFile(newPath), true); } public void createLocalDirectory(String directoryPath) { boolean directoryCreated = new File(directoryPath).mkdirs(); if (!directoryCreated) LOGGER.debug("Directory was not created!"); } public static String ensureTrailingSlash(String path) { if (!path.endsWith("/")) return path + "/"; return path; } } ================================================ FILE: src/main/java/io/linuxserver/davos/util/PatternBuilder.java ================================================ package io.linuxserver.davos.util; public class PatternBuilder { public static String buildFromFilterString(String filter) { return filter.replace(".", "\\.").replaceAll("\\?", ".{1}").replaceAll("\\*", ".*"); } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/API.java ================================================ package io.linuxserver.davos.web; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import io.linuxserver.davos.web.selectors.MethodSelector; public class API { private Long id; private String url; private MethodSelector method = MethodSelector.POST; private String contentType; private String body; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public MethodSelector getMethod() { return method; } public void setMethod(MethodSelector method) { this.method = method; } public String getContentType() { return contentType; } public void setContentType(String contentType) { this.contentType = contentType; } public String getBody() { return body; } public void setBody(String body) { this.body = body; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/Filter.java ================================================ package io.linuxserver.davos.web; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; public class Filter { private Long id; private String value; public String getValue() { return value; } public void setValue(String value) { this.value = value; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/Host.java ================================================ package io.linuxserver.davos.web; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import io.linuxserver.davos.web.selectors.ProtocolSelector; public class Host { private Long id; private String name; private String address; private int port; private ProtocolSelector protocol = ProtocolSelector.SFTP; private String username; private String password; private String identityFile; private boolean identityFileEnabled; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getAddress() { return address; } public void setAddress(String address) { this.address = address; } public int getPort() { return port; } public void setPort(int port) { this.port = port; } public ProtocolSelector getProtocol() { return protocol; } public void setProtocol(ProtocolSelector protocol) { this.protocol = protocol; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } public String getIdentityFile() { return identityFile; } public void setIdentityFile(String identityFile) { this.identityFile = identityFile; } public boolean isIdentityFileEnabled() { return identityFileEnabled; } public void setIdentityFileEnabled(boolean identityFileEnabled) { this.identityFileEnabled = identityFileEnabled; } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/Notifications.java ================================================ package io.linuxserver.davos.web; import java.util.ArrayList; import java.util.List; public class Notifications { private List pushbullet = new ArrayList(); private List sns = new ArrayList(); public List getPushbullet() { return pushbullet; } public List getSns() { return sns; } public void setPushbullet(List pushbullet) { this.pushbullet = pushbullet; } public void setSns(List sns) { this.sns = sns; } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/Pushbullet.java ================================================ package io.linuxserver.davos.web; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; public class Pushbullet { private Long id; private String apiKey; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getApiKey() { return apiKey; } public void setApiKey(String apiKey) { this.apiKey = apiKey; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/SNS.java ================================================ package io.linuxserver.davos.web; public class SNS { private Long id; private String topicArn; private String region; private String accessKey; private String secretAccessKey; public String getTopicArn() { return topicArn; } public void setTopicArn(String topicArn) { this.topicArn = topicArn; } public String getRegion() { return region; } public void setRegion(String region) { this.region = region; } public String getAccessKey() { return accessKey; } public void setAccessKey(String accessKey) { this.accessKey = accessKey; } public String getSecretAccessKey() { return secretAccessKey; } public void setSecretAccessKey(String secretAccessKey) { this.secretAccessKey = secretAccessKey; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/Schedule.java ================================================ package io.linuxserver.davos.web; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import io.linuxserver.davos.web.selectors.TransferSelector; public class Schedule { private Long id; private String name; private int interval = 60; private Long host; private String hostDirectory; private String localDirectory; private TransferSelector transferType = TransferSelector.FILE; private boolean automatic; private String moveFileTo; private boolean running; private boolean filtersMandatory; private boolean invertFilters; private boolean deleteHostFile; private String lastRunTime; private Notifications notifications = new Notifications(); private List lastScannedFiles = new ArrayList<>(); private List filters = new ArrayList<>(); private List transfers = new ArrayList<>(); private List apis = new ArrayList<>(); public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getInterval() { return interval; } public void setInterval(int interval) { this.interval = interval; } public Long getHost() { return host; } public void setHost(Long host) { this.host = host; } public String getHostDirectory() { return hostDirectory; } public void setHostDirectory(String hostDirectory) { this.hostDirectory = hostDirectory; } public String getLocalDirectory() { return localDirectory; } public void setLocalDirectory(String localDirectory) { this.localDirectory = localDirectory; } public TransferSelector getTransferType() { return transferType; } public void setTransferType(TransferSelector transferType) { this.transferType = transferType; } public boolean isAutomatic() { return automatic; } public void setAutomatic(boolean automatic) { this.automatic = automatic; } public List getFilters() { return filters; } public String getMoveFileTo() { return moveFileTo; } public void setMoveFileTo(String moveFileTo) { this.moveFileTo = moveFileTo; } public List getApis() { return apis; } public boolean isRunning() { return running; } public void setRunning(boolean running) { this.running = running; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } public List getLastScannedFiles() { return lastScannedFiles; } public List getTransfers() { return transfers; } public boolean isFiltersMandatory() { return filtersMandatory; } public void setFiltersMandatory(boolean filtersMandatory) { this.filtersMandatory = filtersMandatory; } public boolean isInvertFilters() { return invertFilters; } public void setInvertFilters(boolean invertFilters) { this.invertFilters = invertFilters; } public boolean isDeleteHostFile() { return deleteHostFile; } public void setDeleteHostFile(boolean deleteHostFile) { this.deleteHostFile = deleteHostFile; } public Notifications getNotifications() { return notifications; } public void setNotifications(Notifications notifications) { this.notifications = notifications; } public void setLastRunTime(String lastRunTime) { this.lastRunTime = lastRunTime; } public String getLastRunTime() { return lastRunTime; } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/ScheduleCommand.java ================================================ package io.linuxserver.davos.web; public class ScheduleCommand { public Command command; public enum Command { START, STOP } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/Settings.java ================================================ package io.linuxserver.davos.web; import io.linuxserver.davos.web.selectors.LogLevelSelector; public class Settings { private LogLevelSelector logLevel; public LogLevelSelector getLogLevel() { return logLevel; } public void setLogLevel(LogLevelSelector logLevel) { this.logLevel = logLevel; } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/Transfer.java ================================================ package io.linuxserver.davos.web; public class Transfer { private String fileName; private long fileSize; private boolean directory; private Progress progress; private String status; public String getFileName() { return fileName; } public void setFileName(String fileName) { this.fileName = fileName; } public long getFileSize() { return fileSize; } public void setFileSize(long fileSize) { this.fileSize = fileSize; } public boolean isDirectory() { return directory; } public void setDirectory(boolean directory) { this.directory = directory; } public Progress getProgress() { return progress; } public void setProgress(Progress progress) { this.progress = progress; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public static class Progress { private double percentageComplete; private double transferSpeed; public double getPercentageComplete() { return percentageComplete; } public void setPercentageComplete(double percentageComplete) { this.percentageComplete = percentageComplete; } public double getTransferSpeed() { return transferSpeed; } public void setTransferSpeed(double transferSpeed) { this.transferSpeed = transferSpeed; } } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/VersionChecker.java ================================================ package io.linuxserver.davos.web; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.linuxserver.davos.Version; public class VersionChecker { private static Logger LOGGER = LoggerFactory.getLogger(VersionChecker.class); private boolean newVersionAvailable; private String newVersion; public VersionChecker(Version currentVersion, Version remoteVersion) { LOGGER.debug("Current version: {}, Remote version: {}", currentVersion, remoteVersion); newVersionAvailable = remoteVersion.isNewerThan(currentVersion); LOGGER.debug("Remote version is {}newer", newVersionAvailable ? "" : "not "); newVersion = remoteVersion.toString(); } public boolean isNewVersionAvailable() { return newVersionAvailable; } public String getNewVersion() { return newVersion; } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/controller/APIController.java ================================================ package io.linuxserver.davos.web.controller; import javax.annotation.Resource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import io.linuxserver.davos.delegation.services.HostService; import io.linuxserver.davos.delegation.services.ScheduleService; import io.linuxserver.davos.delegation.services.SettingsService; import io.linuxserver.davos.exception.HostInUseException; import io.linuxserver.davos.transfer.ftp.exception.FTPException; import io.linuxserver.davos.web.Host; import io.linuxserver.davos.web.Schedule; import io.linuxserver.davos.web.ScheduleCommand; import io.linuxserver.davos.web.controller.response.APIResponse; import io.linuxserver.davos.web.controller.response.APIResponseBuilder; import io.linuxserver.davos.web.selectors.LogLevelSelector; @RestController @RequestMapping("/api/v2") public class APIController { private static final Logger LOGGER = LoggerFactory.getLogger(APIController.class); @Resource private ScheduleService scheduleService; @Resource private HostService hostService; @Resource private SettingsService settingsService; @RequestMapping(value = "/schedule", method = RequestMethod.POST) public ResponseEntity createSchedule(@RequestBody Schedule schedule) { LOGGER.info("Creating new schedule"); LOGGER.debug("Schedule values are {}", schedule); if (!isSchedulePostPayloadValid(schedule)) { LOGGER.error("Unable to create schedule: An id was supplied in the payload"); return ResponseEntity.status(HttpStatus.BAD_REQUEST) .body(APIResponseBuilder.create().withBody("Payload contains ids")); } try { Schedule createdSchedule = scheduleService.createSchedule(schedule); LOGGER.info("New schedule has been created"); return ResponseEntity.status(HttpStatus.CREATED) .body(APIResponseBuilder.create().withStatus("Failure").withBody(createdSchedule)); } catch (IllegalArgumentException e) { LOGGER.error("Unable to create schedule: {}", e.getMessage()); return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(APIResponseBuilder.create().withBody(e.getMessage())); } } private boolean isSchedulePostPayloadValid(Schedule schedule) { boolean hasPushbulletIds = schedule.getNotifications().getPushbullet().stream().anyMatch(pb -> pb.getId() != null); boolean hasSnsIds = schedule.getNotifications().getSns().stream().anyMatch(pb -> pb.getId() != null); boolean hasFilterIds = schedule.getFilters().stream().anyMatch(f -> f.getId() != null); boolean hasApiIds = schedule.getApis().stream().anyMatch(a -> a.getId() != null); if (null != schedule.getId() || hasPushbulletIds || hasSnsIds || hasFilterIds || hasApiIds) return false; return true; } @RequestMapping(value = "/schedule/{id}", method = RequestMethod.GET) public ResponseEntity fetchSchedule(@PathVariable("id") Long id) { Schedule schedule = scheduleService.fetchSchedule(id); LOGGER.debug("Fetched schedule: {}", schedule); return ResponseEntity.status(HttpStatus.OK).body(APIResponseBuilder.create().withBody(schedule)); } @RequestMapping(value = "/schedule/{id}", method = RequestMethod.PUT) public ResponseEntity updateSchedule(@PathVariable("id") Long id, @RequestBody Schedule schedule) { LOGGER.info("Updating schedule with id {} and name {}", id, schedule.getName()); LOGGER.debug("Schedule values are {}", schedule); LOGGER.debug("Imposing id from URL into body"); schedule.setId(id); Schedule updatedSchedule = scheduleService.updateSchedule(schedule); LOGGER.debug("Schedule has been updated"); return ResponseEntity.status(HttpStatus.OK).body(APIResponseBuilder.create().withBody(updatedSchedule)); } @RequestMapping(value = "/schedule/{id}", method = RequestMethod.DELETE) public ResponseEntity deleteSchedule(@PathVariable("id") Long id) { LOGGER.info("Deleting schedule with id {}", id); scheduleService.deleteSchedule(id); return ResponseEntity.status(HttpStatus.OK).body(APIResponseBuilder.create()); } @RequestMapping(value = "/schedule/{id}/scannedFiles", method = RequestMethod.DELETE) public ResponseEntity deleteScheduleScannedFiles(@PathVariable("id") Long id) { LOGGER.info("Clearing last scanned file list for schedule {}", id); scheduleService.clearScannedFilesFromSchedule(id); return ResponseEntity.status(HttpStatus.OK).body(APIResponseBuilder.create()); } @RequestMapping(value = "/schedule/{id}/execute", method = RequestMethod.POST) public ResponseEntity executeSchedule(@PathVariable("id") Long id, @RequestBody ScheduleCommand command) { if (command.command == ScheduleCommand.Command.START) scheduleService.startSchedule(id); if (command.command == ScheduleCommand.Command.STOP) scheduleService.stopSchedule(id); return ResponseEntity.status(HttpStatus.OK).body(APIResponseBuilder.create()); } @RequestMapping(value = "/host/{id}", method = RequestMethod.GET) public ResponseEntity getHost(@PathVariable("id") Long id) { LOGGER.info("Getting host with id: {}", id); Host host = hostService.fetchHost(id); return ResponseEntity.status(HttpStatus.OK).body(APIResponseBuilder.create().withBody(host)); } @RequestMapping(value = "/host", method = RequestMethod.POST) public ResponseEntity createHost(@RequestBody Host host) { LOGGER.info("Saving new host"); LOGGER.debug("Host values are {}", host); Host createdHost = hostService.saveHost(host); LOGGER.info("Host has been created"); return ResponseEntity.status(HttpStatus.CREATED).body(APIResponseBuilder.create().withBody(createdHost)); } @RequestMapping(value = "/host/{id}", method = RequestMethod.PUT) public ResponseEntity updateHost(@PathVariable("id") Long id, @RequestBody Host host) { LOGGER.info("Updating host with id {} and name {}", id, host.getName()); LOGGER.debug("Host values are {}", host); LOGGER.debug("Imposing id from URL into body"); host.setId(id); Host updatedHost = hostService.saveHost(host); LOGGER.debug("Host has been updated"); return ResponseEntity.status(HttpStatus.OK).body(APIResponseBuilder.create().withBody(updatedHost)); } @RequestMapping(value = "/host/{id}", method = RequestMethod.DELETE) public ResponseEntity deleteHost(@PathVariable("id") Long id) { LOGGER.info("Deleting host with id {}", id); try { hostService.deleteHost(id); } catch (HostInUseException e) { return ResponseEntity.status(HttpStatus.BAD_REQUEST) .body(APIResponseBuilder.create().withStatus("Failed").withBody(e.getMessage())); } return ResponseEntity.status(HttpStatus.OK).body(APIResponseBuilder.create()); } @RequestMapping(value = "/testConnection", method = RequestMethod.POST) public ResponseEntity testConnection(@RequestBody Host host) { APIResponse response = APIResponseBuilder.create(); HttpStatus status = HttpStatus.OK; try { hostService.testConnection(host); } catch (FTPException e) { LOGGER.error("Failed to connect to host"); LOGGER.debug("Exception: ", e); response.withBody(e.getCause().getMessage()).withStatus("Failed"); status = HttpStatus.BAD_REQUEST; } return ResponseEntity.status(status).body(response); } @RequestMapping(value = "/settings/log", method = RequestMethod.POST) public ResponseEntity setLogLevel(@RequestParam("level") LogLevelSelector level) { settingsService.setLoggingLevel(level); return ResponseEntity.status(HttpStatus.OK).body(APIResponseBuilder.create()); } @ExceptionHandler(Exception.class) public ResponseEntity handleException(Exception e) { return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) .body(APIResponseBuilder.create().withBody(e.getMessage()).withStatus("Failed")); } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/controller/FragmentController.java ================================================ package io.linuxserver.davos.web.controller; import java.util.Arrays; import java.util.List; import javax.annotation.Resource; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import io.linuxserver.davos.delegation.services.ScheduleService; import io.linuxserver.davos.web.selectors.MethodSelector; @Controller @RequestMapping("/fragments") public class FragmentController { @Resource private ScheduleService scheduleService; @ModelAttribute("allMethods") public List populateMethods() { return Arrays.asList(MethodSelector.ALL); } @RequestMapping("/filter") public String filter(@RequestParam("value") String value, Model model) { model.addAttribute("value", value); return "fragments/filter"; } @RequestMapping("/notification/pushbullet") public String notificationPushbullet() { return "fragments/pushbullet"; } @RequestMapping("/notification/sns") public String notificationSns() { return "fragments/sns"; } @RequestMapping("/api") public String api() { return "fragments/api"; } @RequestMapping("/schedule/{id}/transfers") public String transfers(@PathVariable Long id, Model model) { model.addAttribute("schedule", scheduleService.fetchSchedule(id)); return "fragments/transfers"; } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/controller/ViewController.java ================================================ package io.linuxserver.davos.web.controller; import java.util.Arrays; import java.util.List; import javax.annotation.Resource; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import io.linuxserver.davos.Version; import io.linuxserver.davos.delegation.services.HostService; import io.linuxserver.davos.delegation.services.ScheduleService; import io.linuxserver.davos.delegation.services.SettingsService; import io.linuxserver.davos.web.Host; import io.linuxserver.davos.web.Schedule; import io.linuxserver.davos.web.Settings; import io.linuxserver.davos.web.VersionChecker; import io.linuxserver.davos.web.selectors.IntervalSelector; import io.linuxserver.davos.web.selectors.LogLevelSelector; import io.linuxserver.davos.web.selectors.MethodSelector; import io.linuxserver.davos.web.selectors.ProtocolSelector; import io.linuxserver.davos.web.selectors.TransferSelector; @Controller public class ViewController { @Resource private ScheduleService scheduleService; @Resource private HostService hostService; @Resource private SettingsService settingsService; @Value("${davos.version}") private String currentVersion; @ModelAttribute("currentVersion") public String currentVersion() { return currentVersion; } @ModelAttribute("allIntervals") public List populateIntervals() { return Arrays.asList(IntervalSelector.ALL); } @ModelAttribute("versionChecker") public VersionChecker versionChecker() { return new VersionChecker(new Version(currentVersion()), settingsService.retrieveRemoteVersion()); } @ModelAttribute("allProtocols") public List populateProtocols() { return Arrays.asList(ProtocolSelector.ALL); } @ModelAttribute("allTransferTypes") public List populateTypes() { return Arrays.asList(TransferSelector.ALL); } @ModelAttribute("allMethods") public List populateMethods() { return Arrays.asList(MethodSelector.ALL); } @ModelAttribute("allHosts") public List allHosts() { return hostService.fetchAllHosts(); } @ModelAttribute("allLogLevels") public List allLogLevels() { return Arrays.asList(LogLevelSelector.ALL); } @RequestMapping("/") public String index() { return "redirect:/schedules"; } @RequestMapping("/settings") public String settings(Model model) { Settings settings = new Settings(); settings.setLogLevel(settingsService.getCurrentLoggingLevel()); model.addAttribute("settings", settings); return "v2/settings"; } @RequestMapping("/schedules") public String schedules(Model model) { model.addAttribute("schedules", scheduleService.fetchAllSchedules()); return "v2/schedules"; } @RequestMapping("/schedules/new") public String newSchedule(Model model) { model.addAttribute("schedule", new Schedule()); return "v2/edit-schedule"; } @RequestMapping("/schedules/{id}") public String schedules(@PathVariable Long id, Model model) { model.addAttribute("schedule", scheduleService.fetchSchedule(id)); return "v2/edit-schedule"; } @RequestMapping("/hosts") public String hosts() { return "v2/hosts"; } @RequestMapping("/hosts/new") public String newHost(Model model) { model.addAttribute("host", new Host()); return "v2/edit-host"; } @RequestMapping("/hosts/{id}") public String hosts(@PathVariable Long id, Model model) { model.addAttribute("host", hostService.fetchHost(id)); model.addAttribute("usedBy", hostService.fetchSchedulesUsingHost(id)); return "v2/edit-host"; } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/controller/response/APIResponse.java ================================================ package io.linuxserver.davos.web.controller.response; public class APIResponse { public String status = "OK"; public Object body; public APIResponse withBody(Object body) { this.body = body; return this; } public APIResponse withStatus(String status) { this.status = status; return this; } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/controller/response/APIResponseBuilder.java ================================================ package io.linuxserver.davos.web.controller.response; public class APIResponseBuilder { public static APIResponse create() { return new APIResponse(); } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/selectors/IntervalSelector.java ================================================ package io.linuxserver.davos.web.selectors; public enum IntervalSelector { MINS_1(1, "Every minute"), MINS_5(5, "Every 5 minutes"), MINS_15(15, "Every 15 minutes"), MINS_30(30, "Every 30 minutes"), EVERY_HOUR(60, "Every hour"), EVERY_2_HOURS(120, "Every two hours"), TWICE_A_DAY(720, "Twice a day"), EVERY_DAY(1440, "Once a day"); public static final IntervalSelector[] ALL = { MINS_1, MINS_5, MINS_15, MINS_30, EVERY_HOUR, EVERY_2_HOURS, TWICE_A_DAY, EVERY_DAY }; private IntervalSelector(int minutes, String text) { this.minutes = minutes; this.text = text; } private final int minutes; private final String text; public int getMinutes() { return minutes; } public String getText() { return text; } } ================================================ FILE: src/main/java/io/linuxserver/davos/web/selectors/LogLevelSelector.java ================================================ package io.linuxserver.davos.web.selectors; public enum LogLevelSelector { DEBUG, INFO, WARN, ERROR; public static final LogLevelSelector[] ALL = { DEBUG, INFO, WARN, ERROR }; } ================================================ FILE: src/main/java/io/linuxserver/davos/web/selectors/MethodSelector.java ================================================ package io.linuxserver.davos.web.selectors; public enum MethodSelector { GET, POST, PUT, DELETE; public static final MethodSelector[] ALL = { GET, POST, PUT, DELETE }; } ================================================ FILE: src/main/java/io/linuxserver/davos/web/selectors/ProtocolSelector.java ================================================ package io.linuxserver.davos.web.selectors; public enum ProtocolSelector { FTP, FTPS, SFTP; public static final ProtocolSelector[] ALL = { FTP, FTPS, SFTP }; } ================================================ FILE: src/main/java/io/linuxserver/davos/web/selectors/TransferSelector.java ================================================ package io.linuxserver.davos.web.selectors; public enum TransferSelector { FILE, RECURSIVE; public static final TransferSelector[] ALL = { FILE, RECURSIVE }; } ================================================ FILE: src/main/resources/static/browserconfig.xml ================================================ #2b5797 ================================================ FILE: src/main/resources/static/css/davos.css ================================================ body { padding-top: 40px; } .filter-label { padding: 10px; font-weight: normal; font-size: 12px; margin-bottom: 3px; display: inline-block; } .filter-close { margin-left: 5px; cursor: pointer; } .form-group.last { margin-top: 40px; } .form-group.filters { margin-bottom: 50px; } .glyphicon.help { cursor: pointer; } .align-right { text-align: right; } td .progress { margin-bottom: 0; height: 15px; border-radius: 3px; } .downloads { font-size: 12px; } .panel .panel-heading, .panel .panel-title { font-size: 13px; line-height: 26px; } .panel .panel-heading .glyphicon, .edit-host { cursor: pointer; } .label-title { background-color: #333333; } .dropdown-menu li a { cursor: pointer; } a.link { color: #333333 } .popover { max-width: 600px; } .hide { display: none; } .navbar-brand small { color: #cdcdcd; font-size: 14px; } #announcement { margin-top: 25px; } .transfer-speed { width: 80px; } ================================================ FILE: src/main/resources/static/js/davos.js ================================================ /*global $, jQuery, base, Materialize */ var settings = (function($) { 'use strict'; var initialise, makeNotify, validate; makeNotify = function(notificationType, messageText, icon) { $.notify({ icon: 'glyphicon ' + icon, message: messageText }, { // settings type: notificationType, placement: { from: "top", align: "right" }, delay: 3000 }); }; validate = function() { var validationPassed = true; $('input[type="text"].validate, input[type="number"].validate').each(function() { if ($.trim($(this).val()).length === 0) { $(this).parents('.form-group').addClass('has-error'); validationPassed = false; } else { $(this).parents('.form-group').removeClass('has-error'); } }); return validationPassed; }; initialise = function() { $('#logLevel').on('change', function() { var logLevel = $(this).find('option:selected').val(); makeNotify('info', 'Changing logging level to ' + logLevel, 'glyphicon-info-sign'); $.ajax({ method: 'POST', url: '/api/v2/settings/log?level=' + logLevel }).done(function(msg) { makeNotify('success', 'Settings saved!', 'glyphicon-ok-sign'); }).fail(function(msg) { makeNotify('danger', 'There was an error: ' + msg.responseJSON.status, 'glyphicon-warning-sign'); }); }); }; return { init: initialise, notify: makeNotify, validate: validate } }(jQuery)); var fragments = (function($) { 'use strict'; var initialise, clicks, removes, keypresses; initialise = function() { clicks(); removes(); keypresses(); }; clicks = function() { $('#newAPI').on('click', function() { $('#apis').append($("
").load("/fragments/api")); }); $('#newPushbullet').on('click', function() { $('#notifications').append($("
").load("/fragments/notification/pushbullet")); }); $('#newSns').on('click', function() { $('#notifications').append($("
").load("/fragments/notification/sns")); }); $('#addFilter').on('click', function() { if ($.trim($('#newFilter').val()).length > 0) { $('#filters').append($("").load("/fragments/filter?value=" + $('#newFilter').val())); $('#newFilter').val(''); } }); }; keypresses = function() { $('#newFilter').on('keypress', function(e) { if (e.keyCode == 13) { if ($.trim($('#newFilter').val()).length > 0) { $('#filters').append($("").load("/fragments/filter?value=" + $('#newFilter').val())); $('#newFilter').val(''); } } }); }; removes = function() { $('#notifications').on('click', '.remove-notification', function() { $(this).parents('.notification').remove(); }); $('#apis').on('click', '.remove-api', function() { $(this).parents('.api').remove(); }); $('#filters').on('click', '.filter-close', function() { $(this).parents('.filter-label').remove(); }); } return { init: initialise }; }(jQuery)); var schedule = (function($, settings) { 'use strict'; var initialise, cleanId, success, error; initialise = function() { $('#schedule-form').on('submit', function(e) { e.preventDefault(); e.stopPropagation(); }); $('#saveSchedule').on('click', function() { settings.notify('info', 'Saving...', 'glyphicon-info-sign'); if (settings.validate()) { var postData = { id: cleanId($('#id').val()), name: $('#name').val(), interval: parseInt($('#interval option:checked').attr('value'), 10), host: parseInt($('#host option:checked').attr('value'), 10), hostDirectory: $('#hostDirectory').val(), localDirectory: $('#localDirectory').val(), transferType: $('input[name="transferType"]:checked').val(), automatic: $('input[name="automatic"]').prop('checked'), filtersMandatory: $('input[name="filtersMandatory"]').prop('checked'), invertFilters: $('input[name="invertFilters"]').prop('checked'), deleteHostFile: $('input[name="deleteHostFile"]').prop('checked'), moveFileTo: $('#moveFileTo').val(), filters: [], notifications: { pushbullet: [], sns: [] }, apis: [] }; $('.filter-label').each(function() { postData.filters.push({ "id": cleanId($(this).attr('data-filter-id')), "value": $(this).attr('data-filter-value') }); }); $('#notifications .notification.pushbullet').each(function() { postData.notifications.pushbullet.push({ "id": cleanId($(this).attr('data-notification-id')), "apiKey": $(this).find('.apiKey').val() }); }); $('#notifications .notification.sns').each(function() { postData.notifications.sns.push({ "id": cleanId($(this).attr('data-notification-id')), "topicArn": $(this).find('.topicArn').val(), "region": $(this).find('.region').val(), "accessKey": $(this).find('.accessKey').val(), "secretAccessKey": $(this).find('.secretAccessKey').val() }); }); $('#apis .api').each(function() { postData.apis.push({ "id": cleanId($(this).attr('data-api-id')), "url": $(this).find('.url').val(), "method": $(this).find('.method option:checked').attr('value'), "contentType": $(this).find('.contentType').val(), "body": $(this).find('.body').val() }); }); var url = "/api/v2/schedule"; var method = "POST"; if (null !== cleanId($('#id').val())) { url += "/" + cleanId($('#id').val()); method = "PUT"; } $.ajax({ method: method, url: url, dataType: "json", contentType: 'application/json', data: JSON.stringify(postData) }).done(success).fail(error); } else { settings.notify('danger', 'Required fields are missing.', 'glyphicon-warning-sign'); } }); $('#deleteSchedule').on('click', function() { $.ajax({ method: 'DELETE', url: '/api/v2/schedule/' + $('#id').val() }).done(function(msg) { window.location.replace('/schedules'); }).fail(error); }); $('.start-schedule').on('click', function() { var id = $(this).attr('data-schedule-id'), name = $(this).attr('data-schedule-name'); settings.notify('info', 'Starting schedule "' + name + '"', 'glyphicon-info-sign'); $.ajax({ method: 'POST', url: '/api/v2/schedule/' + id + '/execute', dataType: "json", contentType: 'application/json', data: JSON.stringify({ command: 'START' }) }).done(function(msg) { settings.notify('success', 'Schedule Started', 'glyphicon-ok-sign'); $('span[data-schedule-id="' + id + '"].start-schedule').toggleClass('hide'); $('span[data-schedule-id="' + id + '"].stop-schedule').parents('span').toggleClass('hide'); }).fail(error); }); $('.stop-schedule').on('click', function() { var id = $(this).attr('data-schedule-id'), name = $(this).attr('data-schedule-name'); settings.notify('info', 'Stopping schedule "' + name + '"', 'glyphicon-info-sign'); $.ajax({ method: 'POST', url: '/api/v2/schedule/' + id + '/execute', dataType: "json", contentType: 'application/json', data: JSON.stringify({ command: 'STOP' }) }).done(function(msg) { settings.notify('success', 'Schedule Stopped', 'glyphicon-ok-sign'); $('span[data-schedule-id="' + id + '"].start-schedule').toggleClass('hide'); $('span[data-schedule-id="' + id + '"].stop-schedule').parents('span').toggleClass('hide'); }).fail(error); }); $('.clearLastScanned').on('click', function() { var id = $(this).attr('data-schedule-id'); $.ajax({ method: 'DELETE', url: '/api/v2/schedule/' + id + '/scannedFiles', dataType: "json" }).done(function(msg) { $('#lastScanned' + id + ' table tbody').empty(); }).fail(error); }); }; cleanId = function(id) { if (id && $.trim(id).length > 0) { return parseInt(id, 10); } return null; }; success = function(msg) { settings.notify('success', 'Schedule Saved', 'glyphicon-ok-sign'); if (window.location.pathname === '/schedules/new') { window.location.replace('/schedules/' + msg.body.id); } }; error = function(msg) { settings.notify('danger', 'There was an error: ' + msg.responseJSON.body, 'glyphicon-warning-sign'); }; return { init: initialise } }(jQuery, settings)); var host = (function($, settings) { 'use strict'; var initialise, cleanId, makeRequest, success, error, validate; initialise = function() { if ($('input[name="identityFileEnabled"]').prop('checked')) { $('#password-group').hide(); $('#identityFile-group').show(); $('#identityFile').addClass('validate'); } $('input[name="identityFileEnabled"]').on('change', function() { $('#password-group').toggle(); $('#identityFile-group').toggle(); $('#identityFile').toggleClass('validate'); }); $('input[name="protocol"]').on('change', function() { if ($('input[name="protocol"]:checked').val() !== 'SFTP') { $('input[name="identityFileEnabled"]').prop('checked', false); $('#identityFile-group').hide(); $('#toggleIdentity-group').hide(); $('#password-group').show(); } else { $('#toggleIdentity-group').show(); } }); $('#testConnection').on('click', function() { if (settings.validate()) { settings.notify('info', 'Testing connection...', 'glyphicon-info-sign'); var postData = { address: $('#address').val(), port: parseInt($('#port').val(), 10), protocol: $('input[name="protocol"]:checked').val(), username: $('#username').val(), password: $('#password').val(), identityFileEnabled: $('input[name="identityFileEnabled"]').prop('checked'), identityFile: $('#identityFile').val() }; var url = "/api/v2/testConnection"; var method = "POST"; makeRequest(url, method, postData, function(msg) { settings.notify('success', 'Connection successful!', 'glyphicon-ok-sign'); }, error); } }); $('#saveHost').on('click', function() { settings.notify('info', 'Saving...', 'glyphicon-info-sign'); if (settings.validate()) { var postData = { id: cleanId($('#id').val()), name: $('#name').val(), address: $('#address').val(), port: parseInt($('#port').val(), 10), protocol: $('input[name="protocol"]:checked').val(), username: $('#username').val(), password: $('#password').val(), identityFileEnabled: $('input[name="identityFileEnabled"]').prop('checked'), identityFile: $('#identityFile').val() }; var url = "/api/v2/host"; var method = "POST"; if (null !== cleanId($('#id').val())) { url += "/" + cleanId($('#id').val()); method = "PUT"; } makeRequest(url, method, postData, success, error); } else { settings.notify('danger', 'Required fields are missing', 'glyphicon-warning-sign'); } }); $('#deleteHost').on('click', function() { $.ajax({ method: 'DELETE', url: '/api/v2/host/' + $('#id').val() }).done(function(msg) { window.location.replace('/hosts'); }).fail(error); }); }; makeRequest = function(url, method, postData, successCallback, errorCallback) { $.ajax({ method: method, url: url, dataType: "json", contentType: 'application/json', data: JSON.stringify(postData) }).done(successCallback).fail(errorCallback); }; success = function(msg) { settings.notify('success', 'Host Saved!', 'glyphicon-ok-sign'); if (window.location.pathname === '/hosts/new') { window.location.replace('/hosts/' + msg.body.id); } }; error = function(msg) { settings.notify('danger', 'There was an error: ' + msg.responseJSON.body, 'glyphicon-warning-sign'); }; cleanId = function(id) { if (id && $.trim(id).length > 0) { return parseInt(id, 10); } return null; }; return { init: initialise } }(jQuery, settings)) var interval = (function($) { var init; init = function() { setInterval(function() { $(".downloads").each(function() { var $this = $(this); var scheduleId = $this.attr('data-schedule-id'); $this.load('/fragments/schedule/' + scheduleId + '/transfers') }); }, 2000); }; return { init: init }; }(jQuery)); jQuery(document).ready(host.init); jQuery(document).ready(schedule.init); jQuery(document).ready(fragments.init); jQuery(document).ready(settings.init); jQuery(document).ready(interval.init); ================================================ FILE: src/main/resources/static/manifest.json ================================================ { "name": "", "icons": [ { "src": "/android-chrome-192x192.png", "sizes": "192x192", "type": "image/png" }, { "src": "/android-chrome-512x512.png", "sizes": "512x512", "type": "image/png" } ], "theme_color": "#ffffff", "background_color": "#ffffff", "display": "standalone" } ================================================ FILE: src/main/resources/templates/fragments/api.html ================================================

API Call

================================================ FILE: src/main/resources/templates/fragments/filter.html ================================================ [[${value}]] × ================================================ FILE: src/main/resources/templates/fragments/header.html ================================================
  New version available! Version of davos has been released. You can get it in the usual way.
================================================ FILE: src/main/resources/templates/fragments/pushbullet.html ================================================

Pushbullet

================================================ FILE: src/main/resources/templates/fragments/sns.html ================================================

Amazon SNS

================================================ FILE: src/main/resources/templates/fragments/transfers.html ================================================
File Size Status Progress Speed
================================================ FILE: src/main/resources/templates/v2/edit-host.html ================================================ Edit Host
================================================ FILE: src/main/resources/templates/v2/edit-schedule.html ================================================ Edit Schedule
[[${filter.value}]] ×
These provide a way for davos to inform other applications, such as file managers or 3rd party notifiers that the schedule has completed downloading a file. This is useful when part of a wider workflow. Each downstream action will be triggered after each file has been downloaded, in order for a more granular workflow.

Pushbullet

Amazon SNS

API Call

================================================ FILE: src/main/resources/templates/v2/hosts.html ================================================ Hosts
Name Protocol Address Username
================================================ FILE: src/main/resources/templates/v2/schedules.html ================================================ Schedules
No schedules!
[[${schedule.name}]]    Running        
File Size Status Progress Speed
================================================ FILE: src/main/resources/templates/v2/settings.html ================================================ App Settings
================================================ FILE: src/test/java/io/linuxserver/davos/VersionTest.java ================================================ package io.linuxserver.davos; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; import io.linuxserver.davos.Version; public class VersionTest { @Test public void shouldSetVersionBitsFromString() { Version version = new Version("2.1.3"); assertThat(version.getMajor()).isEqualTo(2); assertThat(version.getMinor()).isEqualTo(1); assertThat(version.getPatch()).isEqualTo(3); } @Test public void shouldSetVersionBits() { Version version = new Version(2, 1, 3); assertThat(version.getMajor()).isEqualTo(2); assertThat(version.getMinor()).isEqualTo(1); assertThat(version.getPatch()).isEqualTo(3); } @Test public void shouldCompareToOthers() { assertThat(new Version("0.0.2").isNewerThan(new Version("0.0.1"))).isTrue(); assertThat(new Version("0.1.0").isNewerThan(new Version("0.0.2"))).isTrue(); assertThat(new Version("1.0.0").isNewerThan(new Version("0.2.0"))).isTrue(); assertThat(new Version("1.1.0").isNewerThan(new Version("1.0.0"))).isTrue(); assertThat(new Version("1.1.1").isNewerThan(new Version("1.0.1"))).isTrue(); assertThat(new Version("1.1.1").isNewerThan(new Version("1.2.1"))).isFalse(); assertThat(new Version("0.1.1").isNewerThan(new Version("0.2.1"))).isFalse(); assertThat(new Version("0.0.0").isNewerThan(new Version("0.0.1"))).isFalse(); assertThat(new Version("2.1.2").isNewerThan(new Version("2.2.0"))).isFalse(); assertThat(new Version("2.2.0").isNewerThan(new Version("2.2.1"))).isFalse(); } } ================================================ FILE: src/test/java/io/linuxserver/davos/delegation/services/ScheduleServiceImplTest.java ================================================ package io.linuxserver.davos.delegation.services; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import java.util.ArrayList; import java.util.List; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Spy; import io.linuxserver.davos.converters.ScheduleConverter; import io.linuxserver.davos.persistence.dao.HostDAO; import io.linuxserver.davos.persistence.dao.ScheduleDAO; import io.linuxserver.davos.persistence.model.HostModel; import io.linuxserver.davos.persistence.model.ScannedFileModel; import io.linuxserver.davos.persistence.model.ScheduleModel; import io.linuxserver.davos.schedule.ScheduleExecutor; import io.linuxserver.davos.web.Schedule; public class ScheduleServiceImplTest { @InjectMocks private ScheduleService scheduleService = new ScheduleServiceImpl(); @Mock private ScheduleDAO mockScheduleDAO; @Spy private ScheduleConverter scheduleConverter; @Mock private ScheduleExecutor mockExecutor; @Mock private HostDAO mockHostDAO; @Captor public ArgumentCaptor scheduleCaptor; @Before public void before() { initMocks(this); } @Test public void shouldStartScheduleFromExecutor() { scheduleService.startSchedule(1L); verify(mockExecutor).startSchedule(1L); } @Test public void shouldStopScheduleFromExecutor() { scheduleService.stopSchedule(1L); verify(mockExecutor).stopSchedule(1L); } @Test public void shouldDeleteScheduleWhenNotRunning() { scheduleService.deleteSchedule(1L); verify(mockExecutor, never()).stopSchedule(1L); verify(mockScheduleDAO).deleteSchedule(1L); } @Test public void shouldCheckIfScheduleIsRunningAndStopIfSoBeforeDeleting() { when(mockExecutor.isScheduleRunning(1L)).thenReturn(true); scheduleService.deleteSchedule(1L); verify(mockExecutor).stopSchedule(1L); verify(mockScheduleDAO).deleteSchedule(1L); } @Test public void shouldGetAllSchedulesAndConvert() { List models = new ArrayList(); ScheduleModel model1 = new ScheduleModel(); model1.id = 1L; model1.name = "Test 1"; model1.host = new HostModel(); ScheduleModel model2 = new ScheduleModel(); model2.id = 2L; model2.name = "Test 2"; model2.host = new HostModel(); models.add(model1); models.add(model2); when(mockScheduleDAO.getAll()).thenReturn(models); List schedules = scheduleService.fetchAllSchedules(); assertThat(schedules).hasSize(2); assertThat(schedules.get(0).getId()).isEqualTo(1L); assertThat(schedules.get(0).getName()).isEqualTo("Test 1"); assertThat(schedules.get(1).getId()).isEqualTo(2L); assertThat(schedules.get(1).getName()).isEqualTo("Test 2"); } @Test public void shouldReturnOneSchedule() { ScheduleModel model1 = new ScheduleModel(); model1.id = 1L; model1.name = "Test 1"; model1.host = new HostModel(); when(mockScheduleDAO.fetchSchedule(1L)).thenReturn(model1); Schedule schedule = scheduleService.fetchSchedule(1L); assertThat(schedule.getId()).isEqualTo(1L); assertThat(schedule.getName()).isEqualTo("Test 1"); } @Test(expected = IllegalArgumentException.class) public void shouldGetHostFromDatabaseToCheckItExistsWhenCreating() { Schedule schedule = new Schedule(); schedule.setHost(null); scheduleService.createSchedule(schedule); } @Test public void shouldOverlayHostFromDatabaseInScheduleWhenCreating() { ScheduleModel model1 = new ScheduleModel(); model1.id = 1L; model1.name = "Test 1"; model1.host = new HostModel(); when(mockScheduleDAO.updateConfig(any(ScheduleModel.class))).thenReturn(model1); HostModel hostModell = new HostModel(); when(mockHostDAO.fetchHost(2L)).thenReturn(hostModell); Schedule schedule = new Schedule(); schedule.setHost(2L); scheduleService.createSchedule(schedule); verify(mockScheduleDAO).updateConfig(scheduleCaptor.capture()); assertThat(scheduleCaptor.getValue().host).isEqualTo(hostModell); } @Test public void shouldReturnConvertedScheduleOnceCreated() { ScheduleModel model1 = new ScheduleModel(); model1.id = 1L; model1.name = "Test 1"; model1.host = new HostModel(); when(mockScheduleDAO.updateConfig(any(ScheduleModel.class))).thenReturn(model1); HostModel hostModell = new HostModel(); when(mockHostDAO.fetchHost(2L)).thenReturn(hostModell); Schedule schedule = new Schedule(); schedule.setHost(2L); Schedule createdSchedule = scheduleService.createSchedule(schedule); assertThat(createdSchedule.getId()).isEqualTo(1L); assertThat(createdSchedule.getName()).isEqualTo("Test 1"); } @Test public void shouldOverlayHostFromDatabaseInScheduleWhenUpdating() { setUpScheduleMocks(); HostModel hostModel1 = new HostModel(); when(mockHostDAO.fetchHost(2L)).thenReturn(hostModel1); Schedule schedule = new Schedule(); schedule.setId(1L); schedule.setHost(2L); scheduleService.updateSchedule(schedule); verify(mockScheduleDAO).updateConfig(scheduleCaptor.capture()); assertThat(scheduleCaptor.getValue().host).isEqualTo(hostModel1); } @Test public void shouldReturnConvertedScheduleOnceUpdated() { setUpScheduleMocks(); Schedule schedule = new Schedule(); schedule.setId(1L); schedule.setHost(2L); Schedule createdSchedule = scheduleService.updateSchedule(schedule); assertThat(createdSchedule.getId()).isEqualTo(1L); assertThat(createdSchedule.getName()).isEqualTo("Test 1"); } @Test public void shouldOverlayLastRunTimeOfExistingScheduleToNewOne() { setUpScheduleMocks(); Schedule schedule = new Schedule(); schedule.setId(1L); schedule.setHost(2L); scheduleService.updateSchedule(schedule); verify(mockScheduleDAO).updateConfig(scheduleCaptor.capture()); assertThat(scheduleCaptor.getValue().getLastRunTime()).isEqualTo(12345L); } @Test(expected = IllegalArgumentException.class) public void shouldThrowExceptionIfScheduleHasNoIdWhenUpdating() { Schedule schedule = new Schedule(); schedule.setHost(2L); HostModel hostModell = new HostModel(); when(mockHostDAO.fetchHost(2L)).thenReturn(hostModell); scheduleService.updateSchedule(schedule); } @Test public void shouldClearScannedFiles() { ScheduleModel model = new ScheduleModel(); model.scannedFiles = new ArrayList(); model.scannedFiles.add(new ScannedFileModel()); when(mockScheduleDAO.fetchSchedule(1L)).thenReturn(model); assertThat(model.scannedFiles).hasSize(1); scheduleService.clearScannedFilesFromSchedule(1L); assertThat(model.scannedFiles).isEmpty(); verify(mockScheduleDAO).updateConfig(model); } private void setUpScheduleMocks() { ScheduleModel model1 = new ScheduleModel(); model1.id = 1L; model1.name = "Test 1"; model1.setLastRunTime(12345L); model1.host = new HostModel(); when(mockScheduleDAO.updateConfig(any(ScheduleModel.class))).thenReturn(model1); when(mockScheduleDAO.fetchSchedule(1L)).thenReturn(model1); when(mockHostDAO.fetchHost(2L)).thenReturn(new HostModel()); } } ================================================ FILE: src/test/java/io/linuxserver/davos/delegation/services/SettingsServiceImplTest.java ================================================ package io.linuxserver.davos.delegation.services; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.springframework.http.HttpEntity; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; import io.linuxserver.davos.Version; public class SettingsServiceImplTest { @Mock private RestTemplate mockRestTemplate; @Captor private ArgumentCaptor> entityCaptor; @InjectMocks private SettingsService settingsService = new SettingsServiceImpl(); @Before public void before() { initMocks(this); when(mockRestTemplate.exchange(eq("https://raw.githubusercontent.com/linuxserver/davos/LatestRelease/version.txt"), eq(HttpMethod.GET), any(HttpEntity.class), eq(String.class))) .thenReturn(new ResponseEntity("2.2.2", HttpStatus.OK)); } @Test public void checkVersionShouldCallGitHub() { settingsService.retrieveRemoteVersion(); verify(mockRestTemplate).exchange(eq("https://raw.githubusercontent.com/linuxserver/davos/LatestRelease/version.txt"), eq(HttpMethod.GET), entityCaptor.capture(), eq(String.class)); } @Test public void checkVersionShouldReturnVersionFromGithub() { Version version = settingsService.retrieveRemoteVersion(); assertThat(version.toString()).isEqualTo("2.2.2"); } @Test public void ifRestTemplateFailsThenReturnEmptyVersion() { when(mockRestTemplate.exchange(eq("https://raw.githubusercontent.com/linuxserver/davos/LatestRelease/version.txt"), eq(HttpMethod.GET), any(HttpEntity.class), eq(String.class))).thenThrow(new RestClientException("")); Version version = settingsService.retrieveRemoteVersion(); assertThat(version.toString()).isEqualTo("0.0.0"); } } ================================================ FILE: src/test/java/io/linuxserver/davos/persistence/dao/DefaultScheduleDAOTest.java ================================================ package io.linuxserver.davos.persistence.dao; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import java.util.Arrays; import java.util.List; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import io.linuxserver.davos.persistence.model.ScannedFileModel; import io.linuxserver.davos.persistence.model.ScheduleModel; import io.linuxserver.davos.persistence.repository.ScheduleRepository; public class DefaultScheduleDAOTest { @Mock private ScheduleRepository mockRepository; @InjectMocks private DefaultScheduleDAO configDAO = new DefaultScheduleDAO(); @Before public void setUp() { initMocks(this); } @Test public void updatingScannedFilesShouldWork() { ScheduleModel model = new ScheduleModel(); model.id = 1L; model.scannedFiles.add(toScannedFileModel("oldFile", model)); model.scannedFiles.add(toScannedFileModel("another", model)); model.scannedFiles.add(toScannedFileModel("blah", model)); List files = Arrays.asList("file1", "file2"); when(mockRepository.findOne(1L)).thenReturn(model); configDAO.updateScannedFilesOnSchedule(1L, files); assertThat(model.scannedFiles).hasSize(2); assertThat(model.scannedFiles.get(0).file).isEqualTo("file1"); assertThat(model.scannedFiles.get(0).schedule).isEqualTo(model); assertThat(model.scannedFiles.get(1).file).isEqualTo("file2"); assertThat(model.scannedFiles.get(1).schedule).isEqualTo(model); } private ScannedFileModel toScannedFileModel(String fileName, ScheduleModel model) { ScannedFileModel scannedFileModel = new ScannedFileModel(); scannedFileModel.file = fileName; scannedFileModel.schedule = model; return scannedFileModel; } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/ScheduleConfigurationFactoryTest.java ================================================ package io.linuxserver.davos.schedule; import static org.assertj.core.api.Assertions.assertThat; import java.util.ArrayList; import org.junit.Test; import io.linuxserver.davos.persistence.model.ActionModel; import io.linuxserver.davos.persistence.model.FilterModel; import io.linuxserver.davos.persistence.model.HostModel; import io.linuxserver.davos.persistence.model.ScheduleModel; import io.linuxserver.davos.schedule.workflow.actions.HttpAPICallAction; import io.linuxserver.davos.schedule.workflow.actions.MoveFileAction; import io.linuxserver.davos.schedule.workflow.actions.PushbulletNotifyAction; import io.linuxserver.davos.schedule.workflow.actions.SNSNotifyAction; import io.linuxserver.davos.transfer.ftp.FileTransferType; import io.linuxserver.davos.transfer.ftp.TransferProtocol; public class ScheduleConfigurationFactoryTest { @Test public void shouldConvertAllMainFields() { ScheduleModel model = new ScheduleModel(); model.host = new HostModel(); model.host.protocol = TransferProtocol.FTP; model.host.address = "hostname"; model.host.password = "password"; model.host.port = 8; model.host.username = "username"; model.setFiltersMandatory(true); model.localFilePath = "local/"; model.name = "schedulename"; model.remoteFilePath = "thing/"; model.setStartAutomatically(true); model.transferType = FileTransferType.FILE; ScheduleConfiguration config = ScheduleConfigurationFactory.createConfig(model); assertThat(config.getConnectionType()).isEqualTo(model.host.protocol); assertThat(config.getHostName()).isEqualTo(model.host.address); assertThat(config.getLocalFilePath()).isEqualTo(model.localFilePath); assertThat(config.getScheduleName()).isEqualTo(model.name); assertThat(config.getCredentials().getPassword()).isEqualTo(model.host.password); assertThat(config.getCredentials().getIdentity()).isNull(); assertThat(config.getPort()).isEqualTo(model.host.port); assertThat(config.getRemoteFilePath()).isEqualTo(model.remoteFilePath); assertThat(config.getTransferType()).isEqualTo(model.transferType); assertThat(config.getCredentials().getUsername()).isEqualTo(model.host.username); assertThat(config.isFiltersMandatory()).isTrue(); } @Test public void shouldUseCorrectCredentialsIfIdentityPresent() { ScheduleModel model = new ScheduleModel(); model.host = new HostModel(); model.host.protocol = TransferProtocol.FTP; model.host.address = "hostname"; model.host.password = "password"; model.host.port = 8; model.host.username = "username"; model.host.setIdentityFileEnabled(true); model.host.identityFile = "blah"; model.setFiltersMandatory(true); model.localFilePath = "local/"; model.name = "schedulename"; model.remoteFilePath = "thing/"; model.setStartAutomatically(true); model.transferType = FileTransferType.FILE; ScheduleConfiguration config = ScheduleConfigurationFactory.createConfig(model); assertThat(config.getConnectionType()).isEqualTo(model.host.protocol); assertThat(config.getHostName()).isEqualTo(model.host.address); assertThat(config.getLocalFilePath()).isEqualTo(model.localFilePath); assertThat(config.getScheduleName()).isEqualTo(model.name); assertThat(config.getCredentials().getPassword()).isNull(); assertThat(config.getCredentials().getIdentity().getIdentityFile()).isEqualTo("blah"); assertThat(config.getPort()).isEqualTo(model.host.port); assertThat(config.getRemoteFilePath()).isEqualTo(model.remoteFilePath); assertThat(config.getTransferType()).isEqualTo(model.transferType); assertThat(config.getCredentials().getUsername()).isEqualTo(model.host.username); assertThat(config.isFiltersMandatory()).isTrue(); } @Test public void shouldAddAllFiltersIfAny() { ScheduleModel model = new ScheduleModel(); model.host = new HostModel(); model.host.protocol = TransferProtocol.FTP; model.host.address = "hostname"; model.host.password = "password"; model.host.port = 8; model.host.username = "username"; model.filters = new ArrayList(); FilterModel filterModel = new FilterModel(); filterModel.value = "filter1"; FilterModel filterModel2 = new FilterModel(); filterModel2.value = "filter2"; model.filters.add(filterModel); model.filters.add(filterModel2); ScheduleConfiguration config = ScheduleConfigurationFactory.createConfig(model); assertThat(config.getFilters()).contains("filter1", "filter2"); assertThat(config.getFilters()).hasSize(2); } @Test public void shouldAddAllActionsIfAny() { ScheduleModel model = new ScheduleModel(); model.host = new HostModel(); model.host.protocol = TransferProtocol.FTP; model.host.address = "hostname"; model.host.password = "password"; model.host.port = 8; model.host.username = "username"; model.localFilePath = "a/local/path/"; model.moveFileTo = "/local/path"; model.actions = new ArrayList(); ActionModel action2 = new ActionModel(); action2.actionType = "pushbullet"; action2.f1 = "apiKey"; ActionModel action3 = new ActionModel(); action3.actionType = "api"; action3.f1 = "url"; action3.f2 = "POST"; action3.f3 = "application/json"; action3.f4 = "some body"; ActionModel action4 = new ActionModel(); action4.actionType = "sns"; action4.f1 = "topic"; action4.f2 = "region"; action4.f3 = "Access"; action4.f4 = "secret"; model.actions.add(action2); model.actions.add(action3); model.actions.add(action4); ScheduleConfiguration config = ScheduleConfigurationFactory.createConfig(model); assertThat(config.getActions().get(0)).isInstanceOf(MoveFileAction.class); assertThat(config.getActions().get(1)).isInstanceOf(PushbulletNotifyAction.class); assertThat(config.getActions().get(2)).isInstanceOf(HttpAPICallAction.class); assertThat(config.getActions().get(3)).isInstanceOf(SNSNotifyAction.class); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/ScheduleExecutorTest.java ================================================ package io.linuxserver.davos.schedule; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import io.linuxserver.davos.exception.ScheduleAlreadyRunningException; import io.linuxserver.davos.exception.ScheduleNotRunningException; import io.linuxserver.davos.persistence.dao.ScheduleDAO; import io.linuxserver.davos.persistence.model.ScheduleModel; public class ScheduleExecutorTest { @InjectMocks private ScheduleExecutor scheduleExecutor = new ScheduleExecutor(); @Mock private ScheduleDAO mockConfigurationDAO; @Mock private ScheduledExecutorService mockExecutorService; @Before public void setUp() { initMocks(this); } @Test public void shouldScheduleBasedOnIntervalAndAutoStartup() { List models = new ArrayList(); ScheduleModel nonAutoModel = new ScheduleModel(); nonAutoModel.setStartAutomatically(false); ScheduleModel autoModel = new ScheduleModel(); autoModel.setStartAutomatically(true); autoModel.interval = 50; models.add(nonAutoModel); models.add(autoModel); when(mockConfigurationDAO.getAll()).thenReturn(models); scheduleExecutor.runAutomaticStartupSchedules(); verify(mockExecutorService).scheduleAtFixedRate(any(RunnableSchedule.class), eq(0l), eq(50l), eq(TimeUnit.MINUTES)); } @Test public void startScheduleShouldRunThatSchedule() { ScheduleModel config = new ScheduleModel(); config.interval = 86; when(mockConfigurationDAO.fetchSchedule(1337L)).thenReturn(config); scheduleExecutor.startSchedule(1337L); verify(mockExecutorService).scheduleAtFixedRate(any(RunnableSchedule.class), eq(0l), eq(86l), eq(TimeUnit.MINUTES)); } @Test(expected = ScheduleAlreadyRunningException.class) public void startScheduleShouldNotRunScheduleIfAlreadyRunning() { ScheduleModel config = new ScheduleModel(); config.interval = 86; config.id = 1337L; when(mockConfigurationDAO.fetchSchedule(1337L)).thenReturn(config); scheduleExecutor.startSchedule(1337L); scheduleExecutor.startSchedule(1337L); } @Test @SuppressWarnings("unchecked") public void stopScheduleShouldStopRunningSchedule() { ScheduleModel config = new ScheduleModel(); config.interval = 86; config.id = 1337L; @SuppressWarnings("rawtypes") ScheduledFuture mockFuture = mock(ScheduledFuture.class); when(mockConfigurationDAO.fetchSchedule(1337L)).thenReturn(config); when(mockExecutorService.scheduleAtFixedRate(any(Runnable.class), eq(0l), eq(86l), eq(TimeUnit.MINUTES))).thenReturn(mockFuture); scheduleExecutor.startSchedule(1337L); scheduleExecutor.stopSchedule(1337L); verify(mockFuture).cancel(true); } @Test @SuppressWarnings("unchecked") public void shouldBeAbleToInformWhetherScheduleIsRunningOrNot() { ScheduleModel config = new ScheduleModel(); config.interval = 86; config.id = 1337L; @SuppressWarnings("rawtypes") ScheduledFuture mockFuture = mock(ScheduledFuture.class); when(mockConfigurationDAO.fetchSchedule(1337L)).thenReturn(config); when(mockExecutorService.scheduleAtFixedRate(any(Runnable.class), eq(0l), eq(86l), eq(TimeUnit.MINUTES))).thenReturn(mockFuture); scheduleExecutor.startSchedule(1337L); assertThat(scheduleExecutor.isScheduleRunning(1337L)).isTrue(); scheduleExecutor.stopSchedule(1337L); assertThat(scheduleExecutor.isScheduleRunning(1337L)).isFalse(); verify(mockFuture).cancel(true); } @Test @SuppressWarnings("unchecked") public void stopScheduleShouldNotStopRunningScheduleIfItHasAlreadyBeenCancelled() { ScheduleModel config = new ScheduleModel(); config.interval = 86; config.id = 1337L; @SuppressWarnings("rawtypes") ScheduledFuture mockFuture = mock(ScheduledFuture.class); when(mockFuture.isCancelled()).thenReturn(true); when(mockConfigurationDAO.fetchSchedule(1337L)).thenReturn(config); when(mockExecutorService.scheduleAtFixedRate(any(Runnable.class), eq(0l), eq(86l), eq(TimeUnit.MINUTES))).thenReturn(mockFuture); scheduleExecutor.startSchedule(1337L); scheduleExecutor.stopSchedule(1337L); verify(mockFuture, never()).cancel(true); } @Test(expected = ScheduleNotRunningException.class) public void stopScheduleShouldNotAttemptToStopNonRunningSchedule() { ScheduleModel config = new ScheduleModel(); config.interval = 86; config.id = 1337L; when(mockConfigurationDAO.fetchSchedule(1337L)).thenReturn(config); scheduleExecutor.stopSchedule(1337L); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/ConnectWorkflowStepTest.java ================================================ package io.linuxserver.davos.schedule.workflow; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import io.linuxserver.davos.schedule.ScheduleConfiguration; import io.linuxserver.davos.transfer.ftp.FileTransferType; import io.linuxserver.davos.transfer.ftp.TransferProtocol; import io.linuxserver.davos.transfer.ftp.client.Client; import io.linuxserver.davos.transfer.ftp.client.ClientFactory; import io.linuxserver.davos.transfer.ftp.client.UserCredentials; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.transfer.ftp.exception.ClientConnectionException; public class ConnectWorkflowStepTest { @InjectMocks private ConnectWorkflowStep workflowStep = new ConnectWorkflowStep(); @Mock private ClientFactory mockClientFactory; @Mock private Client mockClient; @Mock(name = "nextStep") private WorkflowStep mockNextStep; @Mock(name = "backoutStep") private WorkflowStep mockBackoutStep; @Before public void setUp() { initMocks(this); when(mockClientFactory.getClient(TransferProtocol.SFTP)).thenReturn(mockClient); } @Test public void runStepShouldCreateNewClient() { ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "", FileTransferType.FILE, false, false, false); workflowStep.runStep(new ScheduleWorkflow(config)); verify(mockClientFactory).getClient(TransferProtocol.SFTP); } @Test public void runStepShouldSetClientIntoWorkflow() { ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "", FileTransferType.FILE, false, false, false); ScheduleWorkflow schedule = new ScheduleWorkflow(config); workflowStep.runStep(schedule); assertThat(schedule.getClient()).isEqualTo(mockClient); } @Test public void runStepShouldConnectToNewlyCreatedClient() { ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "", FileTransferType.FILE, false, false, false); workflowStep.runStep(new ScheduleWorkflow(config)); verify(mockClient).connect(); } @Test public void runStepShouldConnectToTheClientUsingTheConfigsHostAndCredentialInformation() { String hostIP = "123.456.789.0"; int port = 1337; UserCredentials credentials = new UserCredentials(hostIP, hostIP); ScheduleConfiguration config = new ScheduleConfiguration("scheduleName", TransferProtocol.SFTP, hostIP, port, credentials, "remotePath", "localPath", FileTransferType.FILE, false, false, false); workflowStep.runStep(new ScheduleWorkflow(config)); InOrder inOrder = Mockito.inOrder(mockClient); inOrder.verify(mockClient).setCredentials(credentials); inOrder.verify(mockClient).setHost(hostIP); inOrder.verify(mockClient).setPort(port); inOrder.verify(mockClient).connect(); } @Test public void runStepShouldPlaceConnectedClientConnectionIntoSchedule() { Connection mockConnection = mock(Connection.class); when(mockClient.connect()).thenReturn(mockConnection); ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "", FileTransferType.FILE, false, false, false); ScheduleWorkflow schedule = new ScheduleWorkflow(config); workflowStep.runStep(schedule); assertThat(schedule.getConnection()).isEqualTo(mockConnection); } @Test public void runStepShouldCallOnNextStepWhenComplete() { ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "", FileTransferType.FILE, false, false, false); ScheduleWorkflow schedule = new ScheduleWorkflow(config); workflowStep.runStep(schedule); InOrder inOrder = Mockito.inOrder(mockClient, mockNextStep); inOrder.verify(mockClient).connect(); inOrder.verify(mockNextStep).runStep(schedule); } @Test public void ifClientCannotConnectThenDoNotCallNextStep() { ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "", FileTransferType.FILE, false, false, false); when(mockClient.connect()).thenThrow(new ClientConnectionException()); ScheduleWorkflow schedule = new ScheduleWorkflow(config); workflowStep.runStep(schedule); InOrder inOrder = Mockito.inOrder(mockClient, mockNextStep); inOrder.verify(mockClient).connect(); inOrder.verify(mockNextStep, never()).runStep(schedule); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/DisconnectWorkflowStepTest.java ================================================ package io.linuxserver.davos.schedule.workflow; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.verify; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import io.linuxserver.davos.schedule.ScheduleConfiguration; import io.linuxserver.davos.transfer.ftp.FileTransferType; import io.linuxserver.davos.transfer.ftp.TransferProtocol; import io.linuxserver.davos.transfer.ftp.client.Client; import io.linuxserver.davos.transfer.ftp.client.UserCredentials; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.transfer.ftp.exception.ClientDisconnectException; public class DisconnectWorkflowStepTest { private DisconnectWorkflowStep workflowStep = new DisconnectWorkflowStep(); @Mock private Client mockClient; @Mock private Connection mockConnection; @Before public void setUp() { initMocks(this); } @Test public void runStepShouldCloseTheConnection() { ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "", FileTransferType.FILE, false, false, false); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); schedule.setClient(mockClient); workflowStep.runStep(schedule); verify(mockClient).disconnect(); } @Test public void ifDisconnectingFailsThenDoNothing() { ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "", FileTransferType.FILE, false, false, false); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); schedule.setClient(mockClient); doThrow(new ClientDisconnectException()).when(mockClient).disconnect(); workflowStep.runStep(schedule); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/DownloadFilesWorkflowStepTest.java ================================================ package io.linuxserver.davos.schedule.workflow; import static java.util.stream.Collectors.toList; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import java.util.ArrayList; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.InOrder; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import io.linuxserver.davos.schedule.ScheduleConfiguration; import io.linuxserver.davos.schedule.workflow.actions.MoveFileAction; import io.linuxserver.davos.schedule.workflow.actions.PostDownloadAction; import io.linuxserver.davos.schedule.workflow.transfer.FTPTransfer; import io.linuxserver.davos.schedule.workflow.transfer.TransferStrategy; import io.linuxserver.davos.schedule.workflow.transfer.TransferStrategyFactory; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.FileTransferType; import io.linuxserver.davos.transfer.ftp.TransferProtocol; import io.linuxserver.davos.transfer.ftp.client.UserCredentials; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.transfer.ftp.exception.DownloadFailedException; import io.linuxserver.davos.util.FileUtils; public class DownloadFilesWorkflowStepTest { @InjectMocks private DownloadFilesWorkflowStep workflowStep = new DownloadFilesWorkflowStep(); @Mock(name = "nextStep") private WorkflowStep mockNextStep; @Mock(name = "backoutStep") private WorkflowStep mockBackoutStep; @Mock private Connection mockConnection; @Mock private TransferStrategyFactory mockTransferStrategyFactory; @Mock private FileUtils mockFileUtils; @Captor private ArgumentCaptor transferCaptor; private TransferStrategy mockTransferStrategy; @Before public void setUp() { initMocks(this); mockTransferStrategy = mock(TransferStrategy.class); when(mockTransferStrategyFactory.getStrategy(any(FileTransferType.class), eq(mockConnection))) .thenReturn(mockTransferStrategy); } @Test public void shouldCallStrategyFactoryToGetCorrectStrategyAndPassFileThrough() { ArrayList filesToDownload = new ArrayList(); FTPFile file = new FTPFile("", 0, "", 0, false); FTPFile file2 = new FTPFile("", 0, "", 0, false); filesToDownload.add(file); filesToDownload.add(file2); ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "local/", FileTransferType.FILE, false, false, false); ArrayList actions = new ArrayList(); actions.add(new MoveFileAction("", "")); config.setActions(actions); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); schedule.getFilesToDownload().addAll(filesToDownload.stream().map(f -> new FTPTransfer(f)).collect(toList())); workflowStep.runStep(schedule); verify(mockTransferStrategyFactory).getStrategy(FileTransferType.FILE, mockConnection); verify(mockTransferStrategy).setPostDownloadActions(actions); verify(mockTransferStrategy, times(2)).transferFile(transferCaptor.capture(), eq("local/")); assertThat(transferCaptor.getAllValues().get(0).getFile()).isEqualTo(file); assertThat(transferCaptor.getAllValues().get(1).getFile()).isEqualTo(file2); } @Test public void shouldCallOnNextStepWhenFinished() { ArrayList filesToDownload = new ArrayList(); FTPFile file = new FTPFile("", 0, "", 0, false); filesToDownload.add(file); ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "local/", FileTransferType.FILE, false, false, false); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.getFilesToDownload().addAll(filesToDownload.stream().map(f -> new FTPTransfer(f)).collect(toList())); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); InOrder inOrder = Mockito.inOrder(mockTransferStrategy, mockNextStep); inOrder.verify(mockTransferStrategy).transferFile(any(FTPTransfer.class), eq("local/")); inOrder.verify(mockNextStep).runStep(schedule); } @Test public void ifStrategyTranferFailsThenShouldStillCallNextStep() { ArrayList filesToDownload = new ArrayList(); FTPFile file = new FTPFile("", 0, "", 0, false); filesToDownload.add(file); ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "local/", FileTransferType.FILE, false, false, false); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.getFilesToDownload().addAll(filesToDownload.stream().map(f -> new FTPTransfer(f)).collect(toList())); schedule.setConnection(mockConnection); doThrow(new DownloadFailedException()).when(mockTransferStrategy).transferFile(any(FTPTransfer.class), eq("local/")); workflowStep.runStep(schedule); verify(mockNextStep).runStep(schedule); } @Test public void shouldDeleteHostFileIfOptionSet() { ArrayList filesToDownload = new ArrayList(); FTPFile file = new FTPFile("", 0, "", 0, false); filesToDownload.add(file); ScheduleConfiguration config = new ScheduleConfiguration("", TransferProtocol.SFTP, "", 0, new UserCredentials("", ""), "", "local/", FileTransferType.FILE, false, false, true); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.getFilesToDownload().addAll(filesToDownload.stream().map(f -> new FTPTransfer(f)).collect(toList())); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); verify(mockConnection).deleteRemoteFile(file); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/FilterFilesWorkflowStepTest.java ================================================ package io.linuxserver.davos.schedule.workflow; import static java.util.stream.Collectors.toList; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import java.util.ArrayList; import java.util.Arrays; import org.joda.time.DateTime; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import io.linuxserver.davos.schedule.ScheduleConfiguration; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.FileTransferType; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.transfer.ftp.exception.FileListingException; public class FilterFilesWorkflowStepTest { @InjectMocks private FilterFilesWorkflowStep workflowStep = new FilterFilesWorkflowStep(); @Mock(name = "nextStep") private DownloadFilesWorkflowStep mockNextStep; @Mock(name = "backoutStep") private DisconnectWorkflowStep mockBackupStep; @Mock private Connection mockConnection; @Before public void setUp() { initMocks(this); } @Test public void workflowStepShouldListFilesInTheRemoteDirectory() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, false, false, false); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); verify(mockConnection).listFiles("remote/"); } @Test public void workflowStepShouldSetTheFilesFromLastScanToThisScanAtEnd() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, false, false, false); config.setFilters(Arrays.asList("file1", "file2", "file4")); ArrayList files = new ArrayList(); FTPFile file1 = new FTPFile("file1", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file2 = new FTPFile("file2", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file3 = new FTPFile("file3", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file4 = new FTPFile("file4", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file5 = new FTPFile("file5", 0, "remote/", DateTime.now().getMillis(), false); files.add(file1); files.add(file2); files.add(file3); files.add(file4); files.add(file5); when(mockConnection.listFiles("remote/")).thenReturn(files); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.getFilesFromLastScan().addAll(Arrays.asList("some", "old", "files")); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); assertThat(schedule.getFilesFromLastScan()).isEqualTo(Arrays.asList("file1", "file2", "file3", "file4", "file5")); } @Test public void workflowStepShouldFilterOutAnyFilesThatAreNotInTheGivenConfigList() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, false, false, false); config.setFilters(Arrays.asList("file1", "file2", "file4")); ArrayList files = new ArrayList(); FTPFile file1 = new FTPFile("file1", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file2 = new FTPFile("file2", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file3 = new FTPFile("file3", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file4 = new FTPFile("file4", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file5 = new FTPFile("file5", 0, "remote/", DateTime.now().getMillis(), false); files.add(file1); files.add(file2); files.add(file3); files.add(file4); files.add(file5); when(mockConnection.listFiles("remote/")).thenReturn(files); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); assertThat(schedule.getFilesToDownload().stream().map(t -> t.getFile()).collect(toList())) .isEqualTo(Arrays.asList(file1, file2, file4)); } @Test public void workflowStepShouldFilterOutAnyFilesThatAreNotInTheGivenConfigListAndWereNotScannedInLastRun() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, false, false, false); config.setFilters(Arrays.asList("file1", "file2", "file4")); ArrayList files = new ArrayList(); FTPFile file1 = new FTPFile("file1", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file2 = new FTPFile("file2", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file3 = new FTPFile("file3", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file4 = new FTPFile("file4", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file5 = new FTPFile("file5", 0, "remote/", DateTime.now().getMillis(), false); files.add(file1); files.add(file2); files.add(file3); files.add(file4); files.add(file5); when(mockConnection.listFiles("remote/")).thenReturn(files); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.getFilesFromLastScan().addAll(Arrays.asList("file1", "file3", "file4", "file5")); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); assertThat(schedule.getFilesToDownload().stream().map(t -> t.getFile()).collect(toList())) .isEqualTo(Arrays.asList(file2)); } @Test public void shouldOnlyAddOneInstanceOfAFileEvenIfTwoFiltersMatch() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, false, false, false); config.setFilters(Arrays.asList("file1", "file2", "file2")); ArrayList files = new ArrayList(); FTPFile file1 = new FTPFile("file1", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file2 = new FTPFile("file2", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file3 = new FTPFile("file3", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file4 = new FTPFile("file4", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file5 = new FTPFile("file5", 0, "remote/", DateTime.now().getMillis(), false); files.add(file1); files.add(file2); files.add(file3); files.add(file4); files.add(file5); when(mockConnection.listFiles("remote/")).thenReturn(files); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); assertThat(schedule.getFilesToDownload().stream().map(t -> t.getFile()).collect(toList())) .isEqualTo(Arrays.asList(file1, file2)); } @Test public void workflowStepShouldFilterOutAnyFilesThatDoNotMatchTheWildcards() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, false, false, false); config.setFilters(Arrays.asList("file1?and?Stuff", "file2*something", "file4*", "file5")); ArrayList files = new ArrayList(); FTPFile file1 = new FTPFile("file1.and-stuff", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file2 = new FTPFile("file2.andMoreTextsomething", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file3 = new FTPFile("file3", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file4 = new FTPFile("file4.txt", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file5 = new FTPFile("file5.txt", 0, "remote/", DateTime.now().getMillis(), false); files.add(file1); files.add(file2); files.add(file3); files.add(file4); files.add(file5); when(mockConnection.listFiles("remote/")).thenReturn(files); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); assertThat(schedule.getFilesToDownload().stream().map(t -> t.getFile()).collect(toList())) .isEqualTo(Arrays.asList(file1, file2, file4)); } @Test public void workflowStepShouldCallNextStepRunMethodOnceSettingFilters() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, false, false, false); config.setFilters(Arrays.asList("file1", "file2", "file4")); ArrayList files = new ArrayList(); FTPFile file1 = new FTPFile("file1", 0, "remote/", DateTime.now().getMillis(), false); files.add(file1); when(mockConnection.listFiles("remote/")).thenReturn(files); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); InOrder inOrder = Mockito.inOrder(mockNextStep); assertThat(schedule.getFilesToDownload().stream().map(t -> t.getFile()).collect(toList())) .isEqualTo(Arrays.asList(file1)); inOrder.verify(mockNextStep).runStep(schedule); } @Test public void ifFilterListIsInitiallyEmptyThenAssumeThatAllFilesShouldBeDownloaded() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, false, false, false); ArrayList files = new ArrayList(); FTPFile file1 = new FTPFile("file1", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file2 = new FTPFile("file2", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file3 = new FTPFile("file3", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file4 = new FTPFile("file4", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file5 = new FTPFile("file5", 0, "remote/", DateTime.now().getMillis(), false); files.add(file1); files.add(file2); files.add(file3); files.add(file4); files.add(file5); when(mockConnection.listFiles("remote/")).thenReturn(files); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); assertThat(schedule.getFilesToDownload().stream().map(t -> t.getFile()).collect(toList())) .isEqualTo(Arrays.asList(file1, file2, file3, file4, file5)); } @Test public void ifFilterListIsInitiallyEmptyButFiltersAreMandatoryThenNoFilesShouldBeDownloaded() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, true, false, false); ArrayList files = new ArrayList(); FTPFile file1 = new FTPFile("file1", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file2 = new FTPFile("file2", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file3 = new FTPFile("file3", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file4 = new FTPFile("file4", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file5 = new FTPFile("file5", 0, "remote/", DateTime.now().getMillis(), false); files.add(file1); files.add(file2); files.add(file3); files.add(file4); files.add(file5); when(mockConnection.listFiles("remote/")).thenReturn(files); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); assertThat(schedule.getFilesToDownload().stream().map(t -> t.getFile()).collect(toList())).isEmpty(); } @Test public void shouldFilterFilesThatDoNotMatchSetFiltersIfInvertingSet() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, false, true, false); config.setFilters(Arrays.asList("file1?and?Stuff", "file2*something", "file4*", "file5")); ArrayList files = new ArrayList(); FTPFile file1 = new FTPFile("file1.and-stuff", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file2 = new FTPFile("file2.andMoreTextsomething", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file3 = new FTPFile("file3", 0, "remote/", DateTime.now().minusDays(2).getMillis(), false); FTPFile file4 = new FTPFile("file4.txt", 0, "remote/", DateTime.now().getMillis(), false); FTPFile file5 = new FTPFile("file5.txt", 0, "remote/", DateTime.now().getMillis(), false); files.add(file1); files.add(file2); files.add(file3); files.add(file4); files.add(file5); when(mockConnection.listFiles("remote/")).thenReturn(files); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); assertThat(schedule.getFilesToDownload().stream().map(t -> t.getFile()).collect(toList())) .isEqualTo(Arrays.asList(file3, file5)); } @Test public void ifListingFilesIsUnsuccessfulThenDoNotCallNextStepAndCallBackupStepInstead() { ScheduleConfiguration config = new ScheduleConfiguration(null, null, null, 0, null, "remote/", "local/", FileTransferType.FILE, false, false, false); when(mockConnection.listFiles("remote/")).thenThrow(new FileListingException()); ScheduleWorkflow schedule = new ScheduleWorkflow(config); schedule.setConnection(mockConnection); workflowStep.runStep(schedule); verify(mockNextStep, never()).runStep(schedule); verify(mockBackupStep).runStep(schedule); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/actions/HttpAPICallActionTest.java ================================================ package io.linuxserver.davos.schedule.workflow.actions; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.MediaType; import org.springframework.http.converter.HttpMessageConversionException; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; public class HttpAPICallActionTest { @InjectMocks private HttpAPICallAction httpAPICallAction; @Mock private RestTemplate mockRestTemplate; @Captor private ArgumentCaptor> entityCaptor; @Before public void setUp() { httpAPICallAction = new HttpAPICallAction("http://url", "POST", "application/json", "{\"hello\":\"$filename\"}"); initMocks(this); } @Test public void shouldCallRestTemplateWithCorrectParams() { PostDownloadExecution execution = new PostDownloadExecution(); execution.fileName = "file.txt"; httpAPICallAction.execute(execution); verify(mockRestTemplate).exchange(eq("http://url"), eq(HttpMethod.POST), entityCaptor.capture(), eq(Object.class)); String body = entityCaptor.getValue().getBody(); assertThat(body).isEqualTo("{\"hello\":\"file.txt\"}"); } @Test public void shouldResolveFilenameInUrlAsWell() { PostDownloadExecution execution = new PostDownloadExecution(); execution.fileName = "file.txt"; httpAPICallAction = new HttpAPICallAction("http://url?file=$filename", "POST", "application/json", "{\"hello\":\"$filename\"}"); initMocks(this); httpAPICallAction.execute(execution); verify(mockRestTemplate).exchange(eq("http://url?file=file.txt"), eq(HttpMethod.POST), entityCaptor.capture(), eq(Object.class)); String body = entityCaptor.getValue().getBody(); assertThat(body).isEqualTo("{\"hello\":\"file.txt\"}"); } @Test public void postDataShouldHaveCorrectHeaderValue() { PostDownloadExecution execution = new PostDownloadExecution(); execution.fileName = "filename"; httpAPICallAction.execute(execution); verify(mockRestTemplate).exchange(eq("http://url"), eq(HttpMethod.POST), entityCaptor.capture(), eq(Object.class)); HttpHeaders headers = entityCaptor.getValue().getHeaders(); assertThat(headers.getContentType()).isEqualTo(MediaType.APPLICATION_JSON); } @Test public void ifRestTemplateFailsThenDoNothing() { PostDownloadExecution execution = new PostDownloadExecution(); execution.fileName = "filename"; when(mockRestTemplate.exchange(eq("http://url"), eq(HttpMethod.POST), any(HttpEntity.class), eq(Object.class))) .thenThrow(new RestClientException("")); httpAPICallAction.execute(execution); } @Test public void ifRestTemplateFailsBecauseMessageIsUnreadbleThenDoNothing() { PostDownloadExecution execution = new PostDownloadExecution(); execution.fileName = "filename"; when(mockRestTemplate.exchange(eq("http://url"), eq(HttpMethod.POST), any(HttpEntity.class), eq(Object.class))) .thenThrow(new HttpMessageConversionException("")); httpAPICallAction.execute(execution); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/actions/MoveFileActionTest.java ================================================ package io.linuxserver.davos.schedule.workflow.actions; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.verify; import static org.mockito.MockitoAnnotations.initMocks; import java.io.IOException; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import io.linuxserver.davos.util.FileUtils; public class MoveFileActionTest { @InjectMocks private MoveFileAction moveFileAction = new MoveFileAction("oldPath", "newPath"); @Mock private FileUtils mockFileUtils; @Before public void setUp() { initMocks(this); } @Test public void executeShouldMoveTheFile() throws IOException { PostDownloadExecution execution = new PostDownloadExecution(); execution.fileName = "filename"; moveFileAction.execute(execution); verify(mockFileUtils).moveFileToDirectory("oldPath/filename", "newPath/"); } @Test public void ifMovingOfFileFailsThenDoNotPerpetuateError() throws IOException { doThrow(new IOException()).when(mockFileUtils).moveFileToDirectory(anyString(), anyString()); PostDownloadExecution execution = new PostDownloadExecution(); execution.fileName = "filename"; moveFileAction.execute(execution); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/actions/PushbulletNotifyActionTest.java ================================================ package io.linuxserver.davos.schedule.workflow.actions; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.InjectMocks; import org.mockito.Mock; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.MediaType; import org.springframework.http.converter.HttpMessageConversionException; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; import io.linuxserver.davos.schedule.workflow.actions.PushbulletNotifyAction.PushbulletRequest; public class PushbulletNotifyActionTest { @InjectMocks private PushbulletNotifyAction pushbulletNotifyAction; @Mock private RestTemplate mockRestTemplate; @Captor private ArgumentCaptor> entityCaptor; @Before public void setUp() { pushbulletNotifyAction = new PushbulletNotifyAction("apiKey"); initMocks(this); } @Test public void executeShouldSendCorrectData() { PostDownloadExecution execution = new PostDownloadExecution(); execution.fileName = "filename"; pushbulletNotifyAction.execute(execution); verify(mockRestTemplate).exchange(eq("https://api.pushbullet.com/v2/pushes"), eq(HttpMethod.POST), entityCaptor.capture(), eq(Object.class)); PushbulletRequest request = entityCaptor.getValue().getBody(); assertThat(request.type).isEqualTo("note"); assertThat(request.title).isEqualTo("A new file has been downloaded"); assertThat(request.body).isEqualTo("filename"); } @Test public void postDataShouldHaveCorrectHeaderValue() { PostDownloadExecution execution = new PostDownloadExecution(); execution.fileName = "filename"; pushbulletNotifyAction.execute(execution); verify(mockRestTemplate).exchange(eq("https://api.pushbullet.com/v2/pushes"), eq(HttpMethod.POST), entityCaptor.capture(), eq(Object.class)); HttpHeaders headers = entityCaptor.getValue().getHeaders(); assertThat(headers.getContentType()).isEqualTo(MediaType.APPLICATION_JSON); assertThat(headers.get("Authorization").get(0)).isEqualTo("Bearer apiKey"); } @Test public void ifRestTemplateFailsThenDoNothing() { when(mockRestTemplate.exchange(eq("https://api.pushbullet.com/v2/pushes"), eq(HttpMethod.POST), any(HttpEntity.class), eq(Object.class))).thenThrow(new RestClientException("")); pushbulletNotifyAction.execute(new PostDownloadExecution()); } @Test public void ifRestTemplateFailsBecauseMessageIsUnreadbleThenDoNothing() { when(mockRestTemplate.exchange(eq("https://api.pushbullet.com/v2/pushes"), eq(HttpMethod.POST), any(HttpEntity.class), eq(Object.class))).thenThrow(new HttpMessageConversionException("")); pushbulletNotifyAction.execute(new PostDownloadExecution()); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/filter/ReferentialFileFilterTest.java ================================================ package io.linuxserver.davos.schedule.workflow.filter; import static org.assertj.core.api.Assertions.assertThat; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.Test; import io.linuxserver.davos.transfer.ftp.FTPFile; public class ReferentialFileFilterTest { @Test public void shouldReturnAllFTPFilesIfLastScanIsEmpty() { List newFiles = new ArrayList<>(); newFiles.add(new FTPFile("file1", 0, "", 0, false)); newFiles.add(new FTPFile("file2", 0, "", 0, false)); newFiles.add(new FTPFile("file3", 0, "", 0, false)); List oldFiles = new ArrayList<>(); List filteredFiles = new ReferentialFileFilter(oldFiles).filter(newFiles); assertThat(filteredFiles).hasSize(3); assertThat(filteredFiles.get(0).getName()).isEqualTo("file1"); assertThat(filteredFiles.get(1).getName()).isEqualTo("file2"); assertThat(filteredFiles.get(2).getName()).isEqualTo("file3"); } @Test public void shouldReturnFilteredFTPFilesIfLastScanIsMissingFiles() { List newFiles = new ArrayList<>(); newFiles.add(new FTPFile("file1", 0, "", 0, false)); newFiles.add(new FTPFile("file2", 0, "", 0, false)); newFiles.add(new FTPFile("file3", 0, "", 0, false)); List oldFiles = Arrays.asList("file1", "file3"); List filteredFiles = new ReferentialFileFilter(oldFiles).filter(newFiles); assertThat(filteredFiles).hasSize(1); assertThat(filteredFiles.get(0).getName()).isEqualTo("file2"); } @Test public void shouldReturnEmptyListIfNewFilesAreEmpty() { List newFiles = new ArrayList<>(); List oldFiles = Arrays.asList("file1", "file3"); List filteredFiles = new ReferentialFileFilter(oldFiles).filter(newFiles); assertThat(filteredFiles).isEmpty(); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/transfer/FilesAndFoldersTranferStrategyTest.java ================================================ package io.linuxserver.davos.schedule.workflow.transfer; import static org.mockito.Mockito.verify; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.Connection; public class FilesAndFoldersTranferStrategyTest { private FilesAndFoldersTranferStrategy strategy; @Mock private Connection mockConnection; @Before public void setUp() { initMocks(this); strategy = new FilesAndFoldersTranferStrategy(mockConnection); } @Test public void strategyShouldCallDownloadMethodForFiles() { FTPFile file = new FTPFile("file1", 0, "remotePath/", 0, false); strategy.transferFile(new FTPTransfer(file), "destination"); verify(mockConnection).download(file, "destination/"); } @Test public void strategyShouldCallDownloadMethodForDirectories() { FTPFile file = new FTPFile("file1", 0, "remotePath/", 0, true); strategy.transferFile(new FTPTransfer(file), "destination"); verify(mockConnection).download(file, "destination/"); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/transfer/FilesOnlyTransferStrategyTest.java ================================================ package io.linuxserver.davos.schedule.workflow.transfer; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.transfer.ftp.connection.progress.ProgressListener; public class FilesOnlyTransferStrategyTest { private FilesOnlyTransferStrategy strategy; @Mock private Connection mockConnection; @Before public void setUp() { initMocks(this); strategy = new FilesOnlyTransferStrategy(mockConnection); } @Test public void strategyShouldCallDownloadMethodForFiles() { FTPFile file = new FTPFile("file1", 0, "remotePath/", 0, false); strategy.transferFile(new FTPTransfer(file), "destination"); verify(mockConnection).download(file, "destination/"); } @Test public void strategyShouldNotCallDownloadMethodForDirectories() { FTPFile file = new FTPFile("file1", 0, "remotePath/", 0, true); strategy.transferFile(new FTPTransfer(file), "destination"); verify(mockConnection, never()).download(any(FTPFile.class), anyString()); } @Test public void shouldNullifyListenerIfTransferIsForAFolder() { FTPFile file = new FTPFile("file1", 0, "remotePath/", 0, true); FTPTransfer transfer = new FTPTransfer(file); transfer.setListener(new ProgressListener()); assertThat(transfer.getListener()).isNotNull(); strategy.transferFile(transfer, "destination"); assertThat(transfer.getListener()).isNull(); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/transfer/TransferStrategyFactoryTest.java ================================================ package io.linuxserver.davos.schedule.workflow.transfer; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; import io.linuxserver.davos.transfer.ftp.FileTransferType; public class TransferStrategyFactoryTest { @Test public void shouldReturnCorrectStrategies() { assertThat(new TransferStrategyFactory().getStrategy(FileTransferType.FILE, null)) .isInstanceOf(FilesOnlyTransferStrategy.class); assertThat(new TransferStrategyFactory().getStrategy(FileTransferType.RECURSIVE, null)) .isInstanceOf(FilesAndFoldersTranferStrategy.class); } } ================================================ FILE: src/test/java/io/linuxserver/davos/schedule/workflow/transfer/TransferStrategyTest.java ================================================ package io.linuxserver.davos.schedule.workflow.transfer; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import java.util.Arrays; import org.junit.Test; import org.mockito.ArgumentCaptor; import io.linuxserver.davos.schedule.workflow.actions.PostDownloadAction; import io.linuxserver.davos.schedule.workflow.actions.PostDownloadExecution; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.Connection; public class TransferStrategyTest { @Test public void toStringShouldPrintClassName() { assertThat(new TestTransferStrategy(null).toString()).isEqualTo("TestTransferStrategy"); assertThat(new AnotherTestTransferStrategy(null).toString()).isEqualTo("AnotherTestTransferStrategy"); } @Test public void runPostDownloadActionShouldCallAllGivenActionsWithTheFile() { ArgumentCaptor captor = ArgumentCaptor.forClass(PostDownloadExecution.class); DownloadActionImplTestTransferStrategy strategy = new DownloadActionImplTestTransferStrategy(null); PostDownloadAction mockAction1 = mock(PostDownloadAction.class); PostDownloadAction mockAction2 = mock(PostDownloadAction.class); strategy.setPostDownloadActions(Arrays.asList(mockAction1, mockAction2)); strategy.transferFile(new FTPTransfer(new FTPFile("file1", 0, null, 0, false)), "destination/"); verify(mockAction1).execute(captor.capture()); verify(mockAction2).execute(captor.capture()); assertThat(captor.getAllValues().get(0).fileName).isEqualTo("file1"); assertThat(captor.getAllValues().get(1).fileName).isEqualTo("file1"); } @Test public void ensureNulLActionsAreCheckedBeforeAttemptingToRun() { DownloadActionImplTestTransferStrategy strategy = new DownloadActionImplTestTransferStrategy(null); strategy.setPostDownloadActions(null); strategy.transferFile(new FTPTransfer(new FTPFile("file1", 0, null, 0, false)), "destination/"); } class TestTransferStrategy extends TransferStrategy { public TestTransferStrategy(Connection connection) { super(connection); } @Override public void transferFile(FTPTransfer fileToTransfer, String destination) { } } class AnotherTestTransferStrategy extends TransferStrategy { public AnotherTestTransferStrategy(Connection connection) { super(connection); } @Override public void transferFile(FTPTransfer fileToTransfer, String destination) { } } class DownloadActionImplTestTransferStrategy extends TransferStrategy { public DownloadActionImplTestTransferStrategy(Connection connection) { super(connection); } @Override public void transferFile(FTPTransfer fileToTransfer, String destination) { runPostDownloadAction(fileToTransfer.getFile()); } } } ================================================ FILE: src/test/java/io/linuxserver/davos/transfer/ftp/client/ClientFactoryTest.java ================================================ package io.linuxserver.davos.transfer.ftp.client; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; import io.linuxserver.davos.transfer.ftp.TransferProtocol; public class ClientFactoryTest { @Test public void shouldReturnSFTPClientWhenProtocolIsSFTP() { assertThat(new ClientFactory().getClient(TransferProtocol.SFTP)).isInstanceOf(SFTPClient.class); } @Test public void shouldReturnFTPClientForAnythingElse() { assertThat(new ClientFactory().getClient(TransferProtocol.FTP)).isInstanceOf(FTPClient.class); assertThat(new ClientFactory().getClient(TransferProtocol.FTPS)).isInstanceOf(FTPSClient.class); } } ================================================ FILE: src/test/java/io/linuxserver/davos/transfer/ftp/client/FTPClientTest.java ================================================ package io.linuxserver.davos.transfer.ftp.client; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import java.io.IOException; import java.net.SocketException; import java.net.UnknownHostException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.InOrder; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.transfer.ftp.connection.ConnectionFactory; import io.linuxserver.davos.transfer.ftp.connection.FTPConnection; import io.linuxserver.davos.transfer.ftp.exception.ClientConnectionException; import io.linuxserver.davos.transfer.ftp.exception.ClientDisconnectException; public class FTPClientTest { @InjectMocks public FTPClient ftpClient = new FTPClient(); @Mock private org.apache.commons.net.ftp.FTPClient mockFtpClient; @Mock private ConnectionFactory mockConnectionFactory; @Rule public ExpectedException expectedException = ExpectedException.none(); private String hostname; private int port; private UserCredentials userCredentials; @Before public void setUp() throws IOException { initMocks(this); hostname = "this is a hostname"; port = 80; userCredentials = new UserCredentials("thisisausername", "thisisapassword"); ftpClient.setHost(hostname); ftpClient.setPort(port); ftpClient.setCredentials(userCredentials); when(mockFtpClient.getReplyCode()).thenReturn(200); when(mockFtpClient.login(userCredentials.getUsername(), userCredentials.getPassword())).thenReturn(true); when(mockFtpClient.isConnected()).thenReturn(true); when(mockConnectionFactory.createFTPConnection(mockFtpClient)).thenReturn(new FTPConnection(mockFtpClient)); } @Test public void newFtpClientShouldCreateFTPClientInstance() { assertThat(ftpClient.ftpClient).isInstanceOf(org.apache.commons.net.ftp.FTPClient.class); } @Test public void connectMethodShouldCallonUnderlyingFtpClientConnectMethodWithHostname() throws SocketException, IOException { ftpClient.connect(); verify(mockFtpClient).connect(hostname, port); } @Test public void connectMethodShouldEnterPassiveModeLoginToUnderlyingFtpClient() throws IOException { ftpClient.connect(); InOrder inOrder = Mockito.inOrder(mockFtpClient); inOrder.verify(mockFtpClient).enterLocalPassiveMode(); inOrder.verify(mockFtpClient).login(userCredentials.getUsername(), userCredentials.getPassword()); } @Test public void connectMethodShouldSetKeepAliveCommandToEveryFiveMinutes() { ftpClient.connect(); verify(mockFtpClient).setControlKeepAliveTimeout(300); } @Test public void onceLoggedInTheClientShouldHaveFileTypeSetToBinary() throws IOException { ftpClient.connect(); InOrder inOrder = Mockito.inOrder(mockFtpClient); inOrder.verify(mockFtpClient).login(userCredentials.getUsername(), userCredentials.getPassword()); inOrder.verify(mockFtpClient).setFileType(org.apache.commons.net.ftp.FTPClient.BINARY_FILE_TYPE); } @Test public void connectMethodShouldReturnNewFtpConnectionTakingInUnderlyingFtpClient() { Connection connection = ftpClient.connect(); verify(mockConnectionFactory).createFTPConnection(mockFtpClient); assertThat(connection).isInstanceOf(FTPConnection.class); } @Test public void disconnectMethodShouldCallOnUnderlyingFtpClientDisconnectMethod() throws IOException { ftpClient.disconnect(); verify(mockFtpClient).disconnect(); } @Test public void ifConnectionFailsThenCatchThrownExceptionAndThrowFtpException() throws SocketException, IOException { expectedException.expect(ClientConnectionException.class); expectedException.expectMessage(is(equalTo("Unable to connect to host " + hostname + " on port " + port))); doThrow(new IOException()).when(mockFtpClient).connect(hostname, port); ftpClient.connect(); } @Test public void ifConnectionFailsDueToUnknownHostThenCatchThrownExceptionAndThrowFtpException() throws SocketException, IOException { expectedException.expect(ClientConnectionException.class); expectedException.expectMessage(is(equalTo("Unable to connect to host " + hostname + " on port " + port))); doThrow(new UnknownHostException()).when(mockFtpClient).connect(hostname, port); ftpClient.connect(); } @Test public void ifUnderlyingClientReturnsBadConnectionCodeThenThrowConnectionException() { expectedException.expect(ClientConnectionException.class); expectedException .expectMessage(is(equalTo("The host " + hostname + " on port " + port + " returned a bad status code."))); when(mockFtpClient.getReplyCode()).thenReturn(500); ftpClient.connect(); } @Test public void ifUnableToLoginToFtpClientThenThrowFtpException() throws IOException { expectedException.expect(ClientConnectionException.class); expectedException.expectMessage(is(equalTo("Unable to login for user " + userCredentials.getUsername()))); when(mockFtpClient.login(userCredentials.getUsername(), userCredentials.getPassword())).thenReturn(false); ftpClient.connect(); } @Test public void whenDisconnectingThenClientShouldCheckToSeeIfAlreadyDisconnected() { ftpClient.disconnect(); verify(mockFtpClient).isConnected(); } @Test public void whenAlreadyDisconnectedThenClientShoudlNotCallOnUnderlyingClientDisconnectMethod() throws IOException { when(mockFtpClient.isConnected()).thenReturn(false); ftpClient.disconnect(); verify(mockFtpClient, times(0)).disconnect(); } @Test public void whenClientIsStillConnectedThenShouldCallOnUnderlyingClientDisconnectMethod() throws IOException { ftpClient.disconnect(); verify(mockFtpClient).disconnect(); } @Test public void ifUnderlyingClientThrowsExceptionWhenDisconnectingThenClientShouldCatchAndRethrow() throws IOException { expectedException.expect(ClientDisconnectException.class); expectedException.expectMessage(is(equalTo("There was an unexpected error while trying to disconnect."))); doThrow(new IOException()).when(mockFtpClient).disconnect(); ftpClient.disconnect(); } @Test public void ifUnderlyingClientIsNullifiedBeforeDisconnectionThenDisconnectShouldThrow() { expectedException.expect(ClientDisconnectException.class); expectedException.expectMessage(is(equalTo("The underlying client was null."))); ftpClient.ftpClient = null; ftpClient.disconnect(); } } ================================================ FILE: src/test/java/io/linuxserver/davos/transfer/ftp/client/FTPSClientTest.java ================================================ package io.linuxserver.davos.transfer.ftp.client; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; public class FTPSClientTest { private FTPClient client = new FTPSClient(); @Test public void newFtpsClientShouldCreateFTPSClientInstance() { assertThat(client.ftpClient).isInstanceOf(org.apache.commons.net.ftp.FTPSClient.class); } } ================================================ FILE: src/test/java/io/linuxserver/davos/transfer/ftp/client/SFTPClientTest.java ================================================ package io.linuxserver.davos.transfer.ftp.client; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.Answers; import org.mockito.InOrder; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import com.jcraft.jsch.Channel; import com.jcraft.jsch.ChannelSftp; import com.jcraft.jsch.JSch; import com.jcraft.jsch.JSchException; import com.jcraft.jsch.Session; import io.linuxserver.davos.transfer.ftp.client.UserCredentials.Identity; import io.linuxserver.davos.transfer.ftp.connection.Connection; import io.linuxserver.davos.transfer.ftp.connection.ConnectionFactory; import io.linuxserver.davos.transfer.ftp.connection.SFTPConnection; import io.linuxserver.davos.transfer.ftp.exception.ClientDisconnectException; public class SFTPClientTest { private static final String SFTP = "sftp"; private static final String CONNECTION_ERROR_MESSAGE = "Unable to connect to host %s on port %d"; @InjectMocks private SFTPClient SFTPClient = new SFTPClient(); @Mock(answer = Answers.RETURNS_DEEP_STUBS) private JSch mockJsch; @Mock private ConnectionFactory mockConnectionFactory; @Rule public ExpectedException expectedException = ExpectedException.none(); private UserCredentials userCredentials; @Before public void setUp() throws JSchException { initMocks(this); userCredentials = new UserCredentials("user", "password"); SFTPClient.setHost("host"); SFTPClient.setPort(999); SFTPClient.setCredentials(userCredentials); when(mockConnectionFactory.createSFTPConnection(any(Channel.class))).thenReturn(new SFTPConnection(new ChannelSftp())); } @Test public void connectMethodShouldCreateSessionUsingHostPortAndUsername() throws JSchException { SFTPClient.connect(); verify(mockJsch).getSession("user", "host", 999); } @Test public void sessionFromInitialConnectionNeedsConfigAndIdentitySettingBeforeConnecting() throws JSchException { Session mockSession = mockJsch.getSession("user", "host", 999); InOrder inOrder = Mockito.inOrder(mockJsch, mockSession); userCredentials = new UserCredentials("user", new Identity(".ssh/id_rsa")); SFTPClient.setCredentials(userCredentials); SFTPClient.connect(); inOrder.verify(mockJsch).addIdentity(".ssh/id_rsa"); inOrder.verify(mockSession).setConfig("StrictHostKeyChecking", "no"); inOrder.verify(mockSession, never()).setPassword("password"); inOrder.verify(mockSession).connect(); } @Test public void sessionFromInitialConnectionNeedsConfigAndPasswordSettingBeforeConnecting() throws JSchException { Session mockSession = mockJsch.getSession("user", "host", 999); InOrder inOrder = Mockito.inOrder(mockSession); SFTPClient.connect(); inOrder.verify(mockSession).setConfig("StrictHostKeyChecking", "no"); inOrder.verify(mockSession).setPassword("password"); inOrder.verify(mockSession).connect(); } @Test public void returnedSessionObjectShouldSetChannelToSftpAndOpen() throws JSchException { Session mockSession = mockJsch.getSession("user", "host", 999); SFTPClient.connect(); verify(mockSession).openChannel(SFTP); } @Test public void ifForAnyReasonTheUnderlyingSessionCantConnectThenCatchTheExceptionAndRethrow() throws JSchException { expectedException.expect(RuntimeException.class); expectedException.expectMessage(is(equalTo(String.format(CONNECTION_ERROR_MESSAGE, "host", 999)))); Session mockSession = mockJsch.getSession("user", "host", 999); doThrow(new JSchException()).when(mockSession).connect(); SFTPClient.connect(); } @Test public void sessionChannelShouldBeConnectedTo() throws JSchException { Session mockSession = mockJsch.getSession("user", "host", 999); Channel mockChannel = mockSession.openChannel(SFTP); SFTPClient.connect(); verify(mockChannel).connect(); } @Test public void connectMethodShouldReturnLiveInstanceOfSftpChannelWrappedInStfpConnection() { Connection connection = SFTPClient.connect(); assertThat(connection).isInstanceOf(SFTPConnection.class); } @Test public void disconnectMethodShouldDisconnectUnderlyingChannelAndSession() throws JSchException { Session mockSession = mockJsch.getSession("user", "host", 999); Channel mockChannel = mockSession.openChannel(SFTP); SFTPClient.connect(); SFTPClient.disconnect(); verify(mockSession).disconnect(); verify(mockChannel).disconnect(); } @Test public void disconnectMethodShouldThrowExceptionWhenNotInitiallyConnected() { expectedException.expect(ClientDisconnectException.class); expectedException.expectMessage(is(equalTo("The underlying connection was never initially made."))); SFTPClient.disconnect(); } } ================================================ FILE: src/test/java/io/linuxserver/davos/transfer/ftp/connection/FTPConnectionTest.java ================================================ package io.linuxserver.davos.transfer.ftp.connection; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Calendar; import java.util.Date; import java.util.List; import org.apache.commons.io.output.CountingOutputStream; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.InOrder; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.progress.ProgressListener; import io.linuxserver.davos.transfer.ftp.exception.DownloadFailedException; import io.linuxserver.davos.transfer.ftp.exception.FTPException; import io.linuxserver.davos.transfer.ftp.exception.FileListingException; import io.linuxserver.davos.util.FileStreamFactory; import io.linuxserver.davos.util.FileUtils; public class FTPConnectionTest { private static final String LOCAL_DIRECTORY = "."; private static final String DIRECTORY_PATH = "this/is/a/directory"; @InjectMocks private FTPConnection ftpConnection; @Mock private FileStreamFactory mockFileStreamFactory; @Mock private FileUtils mockFileUtils; @Mock private FileOutputStream mockFileOutputStream; private org.apache.commons.net.ftp.FTPClient mockFtpClient; @Rule public ExpectedException expectedException = ExpectedException.none(); @Before public void setUp() throws IOException { mockFtpClient = mock(org.apache.commons.net.ftp.FTPClient.class); when(mockFtpClient.changeWorkingDirectory(anyString())).thenReturn(true); when(mockFtpClient.printWorkingDirectory()).thenReturn(DIRECTORY_PATH); when(mockFtpClient.retrieveFile(anyString(), any(OutputStream.class))).thenReturn(true); when(mockFtpClient.deleteFile(any(String.class))).thenReturn(true); org.apache.commons.net.ftp.FTPFile[] files = createRemoteFTPFiles(); ftpConnection = new FTPConnection(mockFtpClient); initMocks(this); when(mockFtpClient.listFiles(anyString())).thenReturn(files); when(mockFileStreamFactory.createOutputStream("./remote.file")).thenReturn(mockFileOutputStream); } @Test public void whenListingFilesThenFtpClientListFilesMethodShouldBeCalledForCurrentWorkingDirectory() throws IOException { ftpConnection.listFiles(); verify(mockFtpClient).listFiles("this/is/a/directory/"); } @Test public void ifWhenListingFilesFtpClientThrowsExceptionThenCatchAndRethrowFileListingExcepton() throws IOException { expectedException.expect(FileListingException.class); expectedException.expectMessage(is(equalTo("Unable to list files in directory " + DIRECTORY_PATH))); when(mockFtpClient.listFiles("this/is/a/directory/")).thenThrow(new IOException()); ftpConnection.listFiles(); } @Test public void whenListingFilesThenFileArrayThatListFilesReturnsShouldBeConvertedToListOfFtpFilesAndReturned() throws IOException { List returnedFiles = ftpConnection.listFiles(); assertThat(returnedFiles.get(0).getName()).isEqualTo("File 1"); assertThat(returnedFiles.get(0).getSize()).isEqualTo(1000l); assertThat(returnedFiles.get(0).getPath()).isEqualTo("this/is/a/directory/"); assertThat(returnedFiles.get(0).isDirectory()).isFalse(); assertThat(returnedFiles.get(1).getName()).isEqualTo("File 2"); assertThat(returnedFiles.get(1).getSize()).isEqualTo(2000l); assertThat(returnedFiles.get(1).getPath()).isEqualTo("this/is/a/directory/"); assertThat(returnedFiles.get(1).isDirectory()).isTrue(); assertThat(returnedFiles.get(2).getName()).isEqualTo("File 3"); assertThat(returnedFiles.get(2).getSize()).isEqualTo(3000l); assertThat(returnedFiles.get(2).getPath()).isEqualTo("this/is/a/directory/"); assertThat(returnedFiles.get(2).isDirectory()).isFalse(); } @Test public void returnedFtpFilesShouldHaveCorrectModifiedDateTimesAgainstThem() { List files = ftpConnection.listFiles(); assertThat(files.get(0).getLastModified().toString("dd/MM/yyyy HH:mm:ss")).isEqualTo("19/03/2014 21:40:00"); assertThat(files.get(1).getLastModified().toString("dd/MM/yyyy HH:mm:ss")).isEqualTo("19/03/2014 21:40:00"); assertThat(files.get(2).getLastModified().toString("dd/MM/yyyy HH:mm:ss")).isEqualTo("19/03/2014 21:40:00"); } @Test public void whenListingFilesAndGivingRelativePathThenThatPathShouldBeUsedAlongsideCurrentWorkingDir() throws IOException { ftpConnection.listFiles("relativePath"); verify(mockFtpClient).listFiles("relativePath/"); } @Test public void downloadMethodShouldCreateLocalFileStreamFromCorrectPathBasedOnRemoteFileName() throws FileNotFoundException { FTPFile file = new FTPFile("remote.file", 0l, "path/to", 0, false); ftpConnection.download(file, LOCAL_DIRECTORY); verify(mockFileStreamFactory).createOutputStream(LOCAL_DIRECTORY + "/remote.file"); } @Test public void downloadMethodShouldCreateLocalFileStreamContainingProgressListener() throws IOException { FTPFile file = new FTPFile("remote.file", 0l, "path/to", 0, false); ftpConnection.setProgressListener(new ProgressListener()); ftpConnection.download(file, LOCAL_DIRECTORY); verify(mockFileStreamFactory).createOutputStream(LOCAL_DIRECTORY + "/remote.file"); verify(mockFtpClient).retrieveFile(eq("path/to/remote.file"), any(CountingOutputStream.class)); } @Test public void downloadMethodShouldCallOnFtpClientRetrieveFilesMethodWithRemoteFilename() throws IOException { FTPFile file = new FTPFile("remote.file", 0l, "path/to", 0, false); ftpConnection.download(file, LOCAL_DIRECTORY); verify(mockFtpClient).retrieveFile("path/to/remote.file", mockFileOutputStream); } @Test public void downloadMethodShouldThrowExceptionIfUnableToOpenStreamToLocalFile() throws IOException { expectedException.expect(DownloadFailedException.class); expectedException.expectMessage(is(equalTo("Unable to write to local directory " + LOCAL_DIRECTORY + "/remote.file"))); when(mockFtpClient.retrieveFile("path/to/remote.file", mockFileOutputStream)).thenThrow(new FileNotFoundException()); FTPFile file = new FTPFile("remote.file", 0l, "path/to", 0, false); ftpConnection.download(file, LOCAL_DIRECTORY); } @Test public void shouldDownloadFailForAnyReasonWhileInProgressThenCatchIOExceptionAndThrowNewDownloadFailedException() throws IOException { expectedException.expect(DownloadFailedException.class); expectedException.expectMessage(is(equalTo("Unable to download file path/to/remote.file"))); when(mockFtpClient.retrieveFile("path/to/remote.file", mockFileOutputStream)).thenThrow(new IOException()); FTPFile file = new FTPFile("remote.file", 0l, "path/to", 0, false); ftpConnection.download(file, LOCAL_DIRECTORY); } @Test public void ifRetrieveFileMethodInClientReturnsFalseThenThrowDownloadFailedException() throws IOException { expectedException.expect(DownloadFailedException.class); expectedException.expectMessage(is(equalTo("Server returned failure while downloading."))); when(mockFtpClient.retrieveFile("path/to/remote.file", mockFileOutputStream)).thenReturn(false); FTPFile file = new FTPFile("remote.file", 0l, "path/to", 0, false); ftpConnection.download(file, LOCAL_DIRECTORY); } @Test public void printingWorkingDirectoryShouldCallOnUnderlyingClientMethodToGetCurrentDirectory() throws IOException { ftpConnection.currentDirectory(); verify(mockFtpClient).printWorkingDirectory(); } @Test public void printingWorkingDirectoryShouldReturnExactlyWhatTheUnderlyingClientReturns() { assertThat(ftpConnection.currentDirectory()).isEqualTo(DIRECTORY_PATH); } @Test public void ifClientThrowsExceptionWhenTryingToGetWorkingDirectoryThenCatchExceptionAndRethrow() throws IOException { expectedException.expect(FileListingException.class); expectedException.expectMessage(is(equalTo("Unable to print the working directory"))); when(mockFtpClient.printWorkingDirectory()).thenThrow(new IOException()); ftpConnection.currentDirectory(); } @Test public void shouldDeleteRemoteFile() throws IOException { FTPFile file = new FTPFile("file.name", 0, "/some/directory", 0, false); ftpConnection.deleteRemoteFile(file); verify(mockFtpClient).deleteFile("/some/directory/file.name"); } @Test public void ifDeleteFailsThenExceptionShouldBeThrown() throws IOException { expectedException.expect(FTPException.class); expectedException.expectMessage(equalTo("Unable to delete file on remote server. Unknown reason")); FTPFile file = new FTPFile("file.name", 0, "/some/directory", 0, false); when(mockFtpClient.deleteFile(anyString())).thenReturn(false); ftpConnection.deleteRemoteFile(file); } @Test public void ifDeleteThrowsExceptionItShouldBeCaughtAndRethrown() throws IOException { expectedException.expect(FTPException.class); expectedException.expectMessage(equalTo("Unable to delete file on remote server")); FTPFile file = new FTPFile("file.name", 0, "/some/directory", 0, false); when(mockFtpClient.deleteFile(anyString())).thenThrow(new IOException()); ftpConnection.deleteRemoteFile(file); } @Test public void shouldRecursivelyDeleteRemoteFileIfItIsADirectoryWithContents() throws IOException { initRecursiveListings(); ftpConnection.deleteRemoteFile(new FTPFile("folder", 0, "path/to", 0, true)); InOrder inOrder = Mockito.inOrder(mockFtpClient); inOrder.verify(mockFtpClient).listFiles("path/to/folder/"); inOrder.verify(mockFtpClient).deleteFile("path/to/folder/file1.txt"); inOrder.verify(mockFtpClient).deleteFile("path/to/folder/file2.txt"); inOrder.verify(mockFtpClient).listFiles("path/to/folder/directory1/"); inOrder.verify(mockFtpClient).deleteFile("path/to/folder/directory1/file3.txt"); inOrder.verify(mockFtpClient).listFiles("path/to/folder/directory1/directory2/"); inOrder.verify(mockFtpClient).deleteFile("path/to/folder/directory1/directory2/file5.txt"); inOrder.verify(mockFtpClient).deleteFile("path/to/folder/directory1/directory2/file6.txt"); inOrder.verify(mockFtpClient).removeDirectory("path/to/folder/directory1/directory2"); inOrder.verify(mockFtpClient).deleteFile("path/to/folder/directory1/file4.txt"); inOrder.verify(mockFtpClient).removeDirectory("path/to/folder/directory1"); inOrder.verify(mockFtpClient).removeDirectory("path/to/folder"); } @Test public void downloadShouldRecursivelyCheckFileIfFolderThenLsThatAndGetOnlyFiles() throws IOException { initRecursiveListings(); FileOutputStream stream1 = mock(FileOutputStream.class); FileOutputStream stream2 = mock(FileOutputStream.class); FileOutputStream stream3 = mock(FileOutputStream.class); FileOutputStream stream4 = mock(FileOutputStream.class); FileOutputStream stream5 = mock(FileOutputStream.class); FileOutputStream stream6 = mock(FileOutputStream.class); when(mockFileStreamFactory.createOutputStream("some/directory/folder/file1.txt")).thenReturn(stream1); when(mockFileStreamFactory.createOutputStream("some/directory/folder/file2.txt")).thenReturn(stream2); when(mockFileStreamFactory.createOutputStream("some/directory/folder/directory1/file3.txt")).thenReturn(stream3); when(mockFileStreamFactory.createOutputStream("some/directory/folder/directory1/directory2/file5.txt")) .thenReturn(stream4); when(mockFileStreamFactory.createOutputStream("some/directory/folder/directory1/directory2/file6.txt")) .thenReturn(stream5); when(mockFileStreamFactory.createOutputStream("some/directory/folder/directory1/file4.txt")).thenReturn(stream6); FTPFile directory = new FTPFile("folder", 0, "path/to", 0, true); ftpConnection.download(directory, "some/directory"); verify(mockFileUtils).createLocalDirectory("some/directory/folder/"); verify(mockFtpClient).listFiles("path/to/folder/"); verify(mockFileUtils).createLocalDirectory("some/directory/folder/directory1/"); verify(mockFtpClient).listFiles("path/to/folder/directory1/"); verify(mockFileUtils).createLocalDirectory("some/directory/folder/directory1/directory2/"); verify(mockFtpClient).listFiles("path/to/folder/directory1/directory2/"); InOrder inOrder = Mockito.inOrder(mockFtpClient, stream1, stream2, stream3, stream4, stream5, stream6); inOrder.verify(mockFtpClient).retrieveFile("path/to/folder/file1.txt", stream1); inOrder.verify(stream1).close(); inOrder.verify(mockFtpClient).retrieveFile("path/to/folder/file2.txt", stream2); inOrder.verify(stream2).close(); inOrder.verify(mockFtpClient).retrieveFile("path/to/folder/directory1/file3.txt", stream3); inOrder.verify(stream3).close(); inOrder.verify(mockFtpClient).retrieveFile("path/to/folder/directory1/directory2/file5.txt", stream4); inOrder.verify(stream4).close(); inOrder.verify(mockFtpClient).retrieveFile("path/to/folder/directory1/directory2/file6.txt", stream5); inOrder.verify(stream5).close(); inOrder.verify(mockFtpClient).retrieveFile("path/to/folder/directory1/file4.txt", stream6); inOrder.verify(stream6).close(); } private void initRecursiveListings() throws IOException { org.apache.commons.net.ftp.FTPFile[] entries = new org.apache.commons.net.ftp.FTPFile[5]; entries[0] = (createSingleEntry(".", 123l, 1394525265, true)); entries[1] = (createSingleEntry("..", 123l, 1394525265, true)); entries[2] = (createSingleEntry("file1.txt", 123l, 1394525265, false)); entries[3] = (createSingleEntry("file2.txt", 456l, 1394652161, false)); entries[4] = (createSingleEntry("directory1", 789l, 1391879364, true)); when(mockFtpClient.listFiles("path/to/folder/")).thenReturn(entries); org.apache.commons.net.ftp.FTPFile[] subEntries = new org.apache.commons.net.ftp.FTPFile[5]; subEntries[0] = (createSingleEntry(".", 123l, 1394525265, true)); subEntries[1] = (createSingleEntry("..", 123l, 1394525265, true)); subEntries[2] = (createSingleEntry("file3.txt", 789l, 1394525265, false)); subEntries[3] = (createSingleEntry("directory2", 789l, 1394525265, true)); subEntries[4] = (createSingleEntry("file4.txt", 789l, 1394525265, false)); when(mockFtpClient.listFiles("path/to/folder/directory1/")).thenReturn(subEntries); org.apache.commons.net.ftp.FTPFile[] subSubEntries = new org.apache.commons.net.ftp.FTPFile[4]; subSubEntries[0] = (createSingleEntry(".", 123l, 1394525265, true)); subSubEntries[1] = (createSingleEntry("..", 123l, 1394525265, true)); subSubEntries[2] = (createSingleEntry("file5.txt", 789l, 1394525265, false)); subSubEntries[3] = (createSingleEntry("file6.txt", 789l, 1394525265, false)); when(mockFtpClient.listFiles("path/to/folder/directory1/directory2/")).thenReturn(subSubEntries); } private org.apache.commons.net.ftp.FTPFile createSingleEntry(String fileName, long size, int mTime, boolean directory) { org.apache.commons.net.ftp.FTPFile file = mock(org.apache.commons.net.ftp.FTPFile.class); Calendar calendar = Calendar.getInstance(); calendar.setTime(new Date(mTime)); when(file.getName()).thenReturn(fileName); when(file.getTimestamp()).thenReturn(calendar); when(file.getSize()).thenReturn(size); when(file.isDirectory()).thenReturn(directory); return file; } private org.apache.commons.net.ftp.FTPFile[] createRemoteFTPFiles() { Calendar calendar = Calendar.getInstance(); calendar.set(2014, 2, 19, 21, 40, 00); org.apache.commons.net.ftp.FTPFile[] files = new org.apache.commons.net.ftp.FTPFile[5]; org.apache.commons.net.ftp.FTPFile currentDir = mock(org.apache.commons.net.ftp.FTPFile.class); when(currentDir.getName()).thenReturn("."); when(currentDir.getTimestamp()).thenReturn(calendar); org.apache.commons.net.ftp.FTPFile parentDir = mock(org.apache.commons.net.ftp.FTPFile.class); when(parentDir.getName()).thenReturn(".."); when(parentDir.getTimestamp()).thenReturn(calendar); files[0] = currentDir; files[1] = parentDir; for (int i = 2; i < 5; i++) { org.apache.commons.net.ftp.FTPFile file = mock(org.apache.commons.net.ftp.FTPFile.class); when(file.getName()).thenReturn("File " + (i - 1)); when(file.getSize()).thenReturn((long) (i - 1) * 1000); when(file.getTimestamp()).thenReturn(calendar); when(file.isDirectory()).thenReturn(setTrueIfNumberIsEven(i)); files[i] = file; } return files; } private boolean setTrueIfNumberIsEven(int i) { return (i + 1) % 2 == 0 ? true : false; } } ================================================ FILE: src/test/java/io/linuxserver/davos/transfer/ftp/connection/SFTPConnectionTest.java ================================================ package io.linuxserver.davos.transfer.ftp.connection; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import java.util.List; import java.util.Vector; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.InOrder; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import com.jcraft.jsch.ChannelSftp; import com.jcraft.jsch.ChannelSftp.LsEntry; import com.jcraft.jsch.SftpATTRS; import com.jcraft.jsch.SftpException; import io.linuxserver.davos.transfer.ftp.FTPFile; import io.linuxserver.davos.transfer.ftp.connection.progress.SFTPProgressListener; import io.linuxserver.davos.transfer.ftp.exception.DownloadFailedException; import io.linuxserver.davos.transfer.ftp.exception.FTPException; import io.linuxserver.davos.transfer.ftp.exception.FileListingException; import io.linuxserver.davos.util.FileUtils; public class SFTPConnectionTest { @InjectMocks private SFTPConnection sftpConnection; @Mock private FileUtils mockFileUtils; private ChannelSftp mockChannel; @Rule public ExpectedException expectedException = ExpectedException.none(); @Before public void setUp() throws SftpException { mockChannel = mock(ChannelSftp.class); Vector lsEntries = createEntries(); when(mockChannel.ls(anyString())).thenReturn(lsEntries); when(mockChannel.pwd()).thenReturn("a/directory"); sftpConnection = new SFTPConnection(mockChannel); initMocks(this); } @Test public void listFilesMethodShouldCallOnChannelLsMethodForPresentDirectory() throws SftpException { sftpConnection.listFiles(); verify(mockChannel).ls("a/directory/"); } @Test public void whenListingFilesGivingRelativePathThenChannelLsMethodShouldUseGivenPath() throws SftpException { sftpConnection.listFiles("some/other/path"); verify(mockChannel).ls("some/other/path/"); } @Test public void ifUnderlyingChannelIsUnableToListFilesInPWDThenExceptionShouldBeCaughtAndRethrown() throws SftpException { expectedException.expect(FileListingException.class); expectedException.expectMessage(is(equalTo("Unable to list files in directory a/directory"))); when(mockChannel.ls("a/directory/")).thenThrow(new SftpException(0, "")); sftpConnection.listFiles(); } @Test public void lsEntriesReturnedFromChannelShouldBeParsedIntoFtpFileAndReturnedInList() { List files = sftpConnection.listFiles(); assertThat(files.get(0).getName()).isEqualTo("File 1"); assertThat(files.get(0).getSize()).isEqualTo(123l); assertThat(files.get(0).getPath()).isEqualTo("a/directory/"); assertThat(files.get(0).isDirectory()).isTrue(); assertThat(files.get(1).getName()).isEqualTo("File 2"); assertThat(files.get(1).getSize()).isEqualTo(456l); assertThat(files.get(1).getPath()).isEqualTo("a/directory/"); assertThat(files.get(1).isDirectory()).isFalse(); assertThat(files.get(2).getName()).isEqualTo("File 3"); assertThat(files.get(2).getSize()).isEqualTo(789l); assertThat(files.get(2).getPath()).isEqualTo("a/directory/"); assertThat(files.get(2).isDirectory()).isTrue(); } @Test public void returnedFtpFilesShouldHaveCorrectModifiedDateTimesAgainstThem() { List files = sftpConnection.listFiles(); assertThat(files.get(0).getLastModified().toString("dd/MM/yyyy HH:mm:ss")).isEqualTo("11/03/2014 08:07:45"); assertThat(files.get(1).getLastModified().toString("dd/MM/yyyy HH:mm:ss")).isEqualTo("12/03/2014 19:22:41"); assertThat(files.get(2).getLastModified().toString("dd/MM/yyyy HH:mm:ss")).isEqualTo("08/02/2014 17:09:24"); } @Test public void printingWorkingDirectoryShouldCallOnUnderlyingClientMethodToGetCurrentDirectory() throws SftpException { sftpConnection.currentDirectory(); verify(mockChannel).pwd(); } @Test public void printingWorkingDirectoryShouldReturnExactlyWhatTheUnderlyingClientReturns() { assertThat(sftpConnection.currentDirectory()).isEqualTo("a/directory"); } @Test public void ifClientThrowsExceptionWhenTryingToGetWorkingDirectoryThenCatchExceptionAndRethrow() throws SftpException { expectedException.expect(FileListingException.class); expectedException.expectMessage(is(equalTo("Unable to print the working directory"))); when(mockChannel.pwd()).thenThrow(new SftpException(0, "")); sftpConnection.currentDirectory(); } @Test public void downloadMethodShouldCallChannelGetMethodWithFtpFileNameAndDirectory() throws SftpException { FTPFile file = new FTPFile("name", 0, "path", 0, false); sftpConnection.download(file, "some/directory"); verify(mockChannel).get("path/name", "some/directory/"); } @Test public void downloadMethodShouldCallChannelGetMethodWithListenerIfSet() throws SftpException { FTPFile file = new FTPFile("name", 0, "path", 0, false); SFTPProgressListener progressListener = new SFTPProgressListener(); sftpConnection.setProgressListener(progressListener); sftpConnection.download(file, "some/directory"); verify(mockChannel).get("path/name", "some/directory/", progressListener); verify(mockChannel, never()).get("path/name", "some/directory/"); } @Test public void downloadMethodShouldThrowDownloadFailedExceptionWhenChannelThrowsSftpConnection() throws SftpException { expectedException.expect(DownloadFailedException.class); expectedException.expectMessage(is(equalTo("Unable to download file path/to/file.txt"))); doThrow(new SftpException(999, "")).when(mockChannel).get("path/to/file.txt", "some/directory/"); sftpConnection.download(new FTPFile("file.txt", 0, "path/to", 0, false), "some/directory"); } @Test public void downloadShouldRecursivelyCheckFileIfFolderThenLsThatAndGetOnlyFiles() throws SftpException { initRecursiveListings(); FTPFile directory = new FTPFile("folder", 0, "path/to", 0, true); sftpConnection.download(directory, "some/directory"); verify(mockFileUtils).createLocalDirectory("some/directory/folder/"); verify(mockChannel).ls("path/to/folder/"); verify(mockFileUtils).createLocalDirectory("some/directory/folder/directory1/"); verify(mockChannel).ls("path/to/folder/directory1/"); verify(mockFileUtils).createLocalDirectory("some/directory/folder/directory1/directory2/"); verify(mockChannel).ls("path/to/folder/directory1/directory2/"); InOrder inOrder = Mockito.inOrder(mockChannel); inOrder.verify(mockChannel).get("path/to/folder/file1.txt", "some/directory/folder/"); inOrder.verify(mockChannel).get("path/to/folder/file2.txt", "some/directory/folder/"); inOrder.verify(mockChannel).get("path/to/folder/directory1/file3.txt", "some/directory/folder/directory1/"); inOrder.verify(mockChannel).get("path/to/folder/directory1/directory2/file5.txt", "some/directory/folder/directory1/directory2/"); inOrder.verify(mockChannel).get("path/to/folder/directory1/directory2/file6.txt", "some/directory/folder/directory1/directory2/"); inOrder.verify(mockChannel).get("path/to/folder/directory1/file4.txt", "some/directory/folder/directory1/"); } @Test public void shouldRecursivelyDeleteRemoteFileIfItIsADirectoryAndHasContents() throws SftpException { initRecursiveListings(); sftpConnection.deleteRemoteFile(new FTPFile("folder", 0, "path/to", 0, true)); InOrder inOrder = Mockito.inOrder(mockChannel); inOrder.verify(mockChannel).ls("path/to/folder/"); inOrder.verify(mockChannel).rm("path/to/folder/file1.txt"); inOrder.verify(mockChannel).rm("path/to/folder/file2.txt"); inOrder.verify(mockChannel).ls("path/to/folder/directory1/"); inOrder.verify(mockChannel).rm("path/to/folder/directory1/file3.txt"); inOrder.verify(mockChannel).ls("path/to/folder/directory1/directory2/"); inOrder.verify(mockChannel).rm("path/to/folder/directory1/directory2/file5.txt"); inOrder.verify(mockChannel).rm("path/to/folder/directory1/directory2/file6.txt"); inOrder.verify(mockChannel).rmdir("path/to/folder/directory1/directory2"); inOrder.verify(mockChannel).rm("path/to/folder/directory1/file4.txt"); inOrder.verify(mockChannel).rmdir("path/to/folder/directory1"); inOrder.verify(mockChannel).rmdir("path/to/folder"); } @Test public void shouldDeleteRemoteFile() throws SftpException { FTPFile file = new FTPFile("file.name", 0, "/some/directory", 0, false); sftpConnection.deleteRemoteFile(file); verify(mockChannel).rm("/some/directory/file.name"); } @Test public void shouldDeleteRemoteDirectory() throws SftpException { when(mockChannel.ls("/some/directory/file.name/")).thenReturn(new Vector()); FTPFile file = new FTPFile("file.name", 0, "/some/directory", 0, true); sftpConnection.deleteRemoteFile(file); verify(mockChannel).rmdir("/some/directory/file.name"); } @Test public void shouldCatchAndRethrowExceptionIfCaught() throws SftpException { expectedException.expect(FTPException.class); expectedException.expectMessage(equalTo("Unable to delete file on remote server")); when(mockChannel.ls("/some/directory/file.name/")).thenReturn(new Vector()); FTPFile file = new FTPFile("file.name", 0, "/some/directory", 0, true); doThrow(new SftpException(0, "")).when(mockChannel).rmdir("/some/directory/file.name"); sftpConnection.deleteRemoteFile(file); } private void initRecursiveListings() throws SftpException { Vector entries = new Vector(); entries.add(createSingleEntry(".", 123l, 1394525265, true)); entries.add(createSingleEntry("..", 123l, 1394525265, true)); entries.add(createSingleEntry("file1.txt", 123l, 1394525265, false)); entries.add(createSingleEntry("file2.txt", 456l, 1394652161, false)); entries.add(createSingleEntry("directory1", 789l, 1391879364, true)); when(mockChannel.ls("path/to/folder/")).thenReturn(entries); Vector subEntries = new Vector(); subEntries.add(createSingleEntry(".", 123l, 1394525265, true)); subEntries.add(createSingleEntry("..", 123l, 1394525265, true)); subEntries.add(createSingleEntry("file3.txt", 789l, 1394525265, false)); subEntries.add(createSingleEntry("directory2", 789l, 1394525265, true)); subEntries.add(createSingleEntry("file4.txt", 789l, 1394525265, false)); when(mockChannel.ls("path/to/folder/directory1/")).thenReturn(subEntries); Vector subSubEntries = new Vector(); subSubEntries.add(createSingleEntry(".", 123l, 1394525265, true)); subSubEntries.add(createSingleEntry("..", 123l, 1394525265, true)); subSubEntries.add(createSingleEntry("file5.txt", 789l, 1394525265, false)); subSubEntries.add(createSingleEntry("file6.txt", 789l, 1394525265, false)); when(mockChannel.ls("path/to/folder/directory1/directory2/")).thenReturn(subSubEntries); } private Vector createEntries() { Vector vector = new Vector(); vector.add(createSingleEntry("File 1", 123l, 1394525265, true)); vector.add(createSingleEntry("File 2", 456l, 1394652161, false)); vector.add(createSingleEntry("File 3", 789l, 1391879364, true)); return vector; } private LsEntry createSingleEntry(String fileName, long size, int mTime, boolean directory) { SftpATTRS attributes = mock(SftpATTRS.class); when(attributes.getSize()).thenReturn(size); when(attributes.getMTime()).thenReturn(mTime); LsEntry entry = mock(LsEntry.class); when(entry.getAttrs()).thenReturn(attributes); when(entry.getFilename()).thenReturn(fileName); when(entry.getAttrs().isDir()).thenReturn(directory); return entry; } } ================================================ FILE: src/test/java/io/linuxserver/davos/transfer/ftp/connection/progress/ListenerFactoryTest.java ================================================ package io.linuxserver.davos.transfer.ftp.connection.progress; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; import io.linuxserver.davos.transfer.ftp.TransferProtocol; public class ListenerFactoryTest { @Test public void shouldReturnCorrectListener() { ListenerFactory listenerFactory = new ListenerFactory(); assertThat(listenerFactory.createListener(TransferProtocol.FTP)).isInstanceOf(ProgressListener.class); assertThat(listenerFactory.createListener(TransferProtocol.FTPS)).isInstanceOf(ProgressListener.class); assertThat(listenerFactory.createListener(TransferProtocol.SFTP)).isInstanceOf(SFTPProgressListener.class); } } ================================================ FILE: src/test/java/io/linuxserver/davos/transfer/ftp/connection/progress/ProgressListenerTest.java ================================================ package io.linuxserver.davos.transfer.ftp.connection.progress; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; public class ProgressListenerTest { @Test public void shouldGiveCorrectSpeed() throws InterruptedException { ProgressListener listener = new ProgressListener(); listener.start(); Thread.sleep(1000); listener.setBytesWritten(1000000); assertThat(listener.getTransferSpeed()).isBetween(0.9, 1.1); } @Test public void shouldGiveCorrectSpeedWhenAlternating() throws InterruptedException { ProgressListener listener = new ProgressListener(); listener.start(); Thread.sleep(1000); listener.setBytesWritten(1000000); assertThat(listener.getTransferSpeed()).isBetween(0.9, 1.1); Thread.sleep(1000); listener.setBytesWritten(1500000); assertThat(listener.getTransferSpeed()).isBetween(0.45, 0.51); } @Test public void shouldReturn100IfTotalSizeIsZero() { ProgressListener listener = new ProgressListener(); listener.start(); listener.setTotalBytes(0); listener.setBytesWritten(0); assertThat(listener.getProgress()).isEqualTo(100); } @Test public void shouldReturn0IfTotalBytesWrittenIsZero() { ProgressListener listener = new ProgressListener(); listener.start(); listener.setTotalBytes(110); listener.setBytesWritten(0); assertThat(listener.getProgress()).isEqualTo(0); } @Test public void shouldShowProgress() { ProgressListener listener = new ProgressListener(); listener.setTotalBytes(2000); listener.setBytesWritten(500); assertThat(listener.getProgress()).isEqualTo(25); listener.setBytesWritten(1000); assertThat(listener.getProgress()).isEqualTo(50); listener.setBytesWritten(2000); assertThat(listener.getProgress()).isEqualTo(100); } } ================================================ FILE: src/test/java/io/linuxserver/davos/transfer/ftp/connection/progress/SFTPProgressListenerTest.java ================================================ package io.linuxserver.davos.transfer.ftp.connection.progress; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; import io.linuxserver.davos.transfer.ftp.connection.progress.SFTPProgressListener; public class SFTPProgressListenerTest { @Test public void shouldReturnCorrectProgress() { SFTPProgressListener listener = new SFTPProgressListener(); listener.init(0, "", "", 500); listener.count(100); assertThat(listener.getProgress()).isEqualTo(20); listener.count(250); assertThat(listener.getProgress()).isEqualTo(70); listener.count(150); assertThat(listener.getProgress()).isEqualTo(100); } } ================================================ FILE: src/test/java/io/linuxserver/davos/util/PatternBuilderTest.java ================================================ package io.linuxserver.davos.util; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; public class PatternBuilderTest { @Test public void builderShouldTurnQuestionMarksIntoSingleCharacterRegexMatcher() { String filter = "This?is?a filter"; String expected = "This.{1}is.{1}a filter"; assertThat(PatternBuilder.buildFromFilterString(filter)).isEqualTo(expected); } @Test public void builderShouldTurnAsterixesIntoManyCharacterRegexMatcher() { String filter = "This*is*a filter"; String expected = "This.*is.*a filter"; assertThat(PatternBuilder.buildFromFilterString(filter)).isEqualTo(expected); } @Test public void regexStringReturnedShouldBeAbleToActuallyMatchUsingRegexOperation() { String normalValue = "Clean Code.pdf"; String filteredValue = "Clean?Code*"; assertThat(normalValue.matches(PatternBuilder.buildFromFilterString(filteredValue))).isTrue(); } @Test public void stringWithBothAsterixAndQuestionMarkShouldMatchProperly() { String anotherValue = "File Name with a Prefix12Then some text"; String slightlyMoreComplicated = "File?Name*Prefix??Then some text"; assertThat(anotherValue.matches(PatternBuilder.buildFromFilterString(slightlyMoreComplicated))).isTrue(); } @Test public void dotsShouldBeTreatedVerbatim() { String normalValue = "Clean Code.pdf"; String filteredValue = "Clean?Code.pdf"; assertThat(normalValue.matches(PatternBuilder.buildFromFilterString(filteredValue))).isTrue(); assertThat("Clean Code_pdf".matches(PatternBuilder.buildFromFilterString(filteredValue))).isFalse(); } } ================================================ FILE: src/test/java/io/linuxserver/davos/web/controller/APIControllerTest.java ================================================ package io.linuxserver.davos.web.controller; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import io.linuxserver.davos.delegation.services.HostService; import io.linuxserver.davos.delegation.services.ScheduleService; import io.linuxserver.davos.web.Host; import io.linuxserver.davos.web.Schedule; import io.linuxserver.davos.web.controller.response.APIResponse; public class APIControllerTest { @InjectMocks private APIController controller = new APIController(); @Mock private ScheduleService mockScheduleFacade; @Mock private HostService mockHostFacade; @Before public void before() { initMocks(this); } @Test public void createScheduleShouldCallFacadeMethod() { Schedule schedule = new Schedule(); controller.createSchedule(schedule); verify(mockScheduleFacade).createSchedule(schedule); } @Test public void onSuccessNewScheduleShouldBeReturnedWhenCreated() { Schedule newSchedule = new Schedule(); Schedule schedule = new Schedule(); when(mockScheduleFacade.createSchedule(schedule)).thenReturn(newSchedule); ResponseEntity response = controller.createSchedule(schedule); assertThat(response.getStatusCode()).isEqualTo(HttpStatus.CREATED); assertThat(response.getBody().body).isEqualTo(newSchedule); } @Test public void updateScheduleShouldCallFacadeWithIdInMethod() { Schedule schedule = new Schedule(); controller.updateSchedule(1L, schedule); verify(mockScheduleFacade).updateSchedule(schedule); assertThat(schedule.getId()).isEqualTo(1L); } @Test public void onSuccessUpdatedScheduleShouldBeReturnedWhenSaved() { Schedule schedule = new Schedule(); when(mockScheduleFacade.updateSchedule(schedule)).thenReturn(schedule); ResponseEntity response = controller.updateSchedule(1L, schedule); assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK); assertThat(response.getBody().body).isEqualTo(schedule); } @Test public void deleteScheduleShouldCallFacade() { ResponseEntity response = controller.deleteSchedule(1L); verify(mockScheduleFacade).deleteSchedule(1L); assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK); assertThat(response.getBody().body).isNull(); } @Test public void createHostShouldCallFacade() { Host host = new Host(); controller.createHost(host); verify(mockHostFacade).saveHost(host); } @Test public void saveHostShouldReturnResponse() { Host host = new Host(); Host createdHost = new Host(); when(mockHostFacade.saveHost(host)).thenReturn(createdHost); ResponseEntity response = controller.createHost(host); assertThat(response.getStatusCode()).isEqualTo(HttpStatus.CREATED); assertThat(response.getBody().body).isEqualTo(createdHost); } @Test public void updateHostShouldCallFacadeWithIdInMethod() { Host host = new Host(); controller.updateHost(1L, host); verify(mockHostFacade).saveHost(host); assertThat(host.getId()).isEqualTo(1L); } @Test public void onSuccessUpdatedHostShouldBeReturnedWhenSaved() { Host host = new Host(); when(mockHostFacade.saveHost(host)).thenReturn(host); ResponseEntity response = controller.updateHost(1L, host); assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK); assertThat(response.getBody().body).isEqualTo(host); } @Test public void deleteHostShouldCallFacade() { ResponseEntity response = controller.deleteHost(1L); verify(mockHostFacade).deleteHost(1L); assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK); assertThat(response.getBody().body).isNull(); } } ================================================ FILE: src/test/java/io/linuxserver/davos/web/controller/ViewControllerTest.java ================================================ package io.linuxserver.davos.web.controller; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import java.util.ArrayList; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import org.springframework.ui.Model; import io.linuxserver.davos.delegation.services.HostService; import io.linuxserver.davos.delegation.services.ScheduleService; import io.linuxserver.davos.delegation.services.SettingsService; import io.linuxserver.davos.web.Host; import io.linuxserver.davos.web.Schedule; import io.linuxserver.davos.web.selectors.LogLevelSelector; public class ViewControllerTest { @InjectMocks private ViewController controller = new ViewController(); @Mock private HostService mockHostFacade; @Mock private ScheduleService mockScheduleFacade; @Mock private SettingsService mockSettingsService; @Mock private Model mockModel; @Before public void before() { initMocks(this); when(mockSettingsService.getCurrentLoggingLevel()).thenReturn(LogLevelSelector.DEBUG); } @Test public void viewsShouldResolveCorrectly() { assertThat(controller.index()).isEqualTo("redirect:/schedules"); assertThat(controller.settings(mockModel)).isEqualTo("v2/settings"); assertThat(controller.schedules(mockModel)).isEqualTo("v2/schedules"); assertThat(controller.schedules(1L, mockModel)).isEqualTo("v2/edit-schedule"); assertThat(controller.newSchedule(mockModel)).isEqualTo("v2/edit-schedule"); assertThat(controller.hosts()).isEqualTo("v2/hosts"); assertThat(controller.newHost(mockModel)).isEqualTo("v2/edit-host"); assertThat(controller.hosts(1L, mockModel)).isEqualTo("v2/edit-host"); } @Test public void schedulesShouldAddAllSchedulesToModel() { ArrayList schedules = new ArrayList(); schedules.add(new Schedule()); when(mockScheduleFacade.fetchAllSchedules()).thenReturn(schedules); controller.schedules(mockModel); verify(mockModel).addAttribute("schedules", schedules); } @Test public void schedulesWithIdShouldAddSpecificScheduleToModel() { Schedule schedule = new Schedule(); when(mockScheduleFacade.fetchSchedule(1L)).thenReturn(schedule); controller.schedules(1L, mockModel); verify(mockModel).addAttribute("schedule", schedule); } @Test public void newScheduleShouldAddScheduleToModel() { controller.newSchedule(mockModel); verify(mockModel).addAttribute(eq("schedule"), any(Schedule.class)); } @Test public void allHostsShouldAddHostsToModel() { controller.allHosts(); verify(mockHostFacade).fetchAllHosts(); } @Test public void newHostShouldAddNewHostToModel() { controller.newHost(mockModel); verify(mockModel).addAttribute(eq("host"), any(Host.class)); } @Test public void hostsWithIdShouldAddSpecificHostToModel() { Host host = new Host(); when(mockHostFacade.fetchHost(1L)).thenReturn(host); controller.hosts(1L, mockModel); verify(mockModel).addAttribute("host", host); } } ================================================ FILE: version.txt ================================================ 2.2.2