[
  {
    "path": ".gitattributes",
    "content": "###############################################################################\n# Set default behavior to automatically normalize line endings.\n###############################################################################\n* text=auto\n\n###############################################################################\n# Set default behavior for command prompt diff.\n#\n# This is need for earlier builds of msysgit that does not have it on by\n# default for csharp files.\n# Note: This is only used by command line\n###############################################################################\n#*.cs     diff=csharp\n\n###############################################################################\n# Set the merge driver for project and solution files\n#\n# Merging from the command prompt will add diff markers to the files if there\n# are conflicts (Merging from VS is not affected by the settings below, in VS\n# the diff markers are never inserted). Diff markers may cause the following \n# file extensions to fail to load in VS. An alternative would be to treat\n# these files as binary and thus will always conflict and require user\n# intervention with every merge. To do so, just uncomment the entries below\n###############################################################################\n#*.sln       merge=binary\n#*.csproj    merge=binary\n#*.vbproj    merge=binary\n#*.vcxproj   merge=binary\n#*.vcproj    merge=binary\n#*.dbproj    merge=binary\n#*.fsproj    merge=binary\n#*.lsproj    merge=binary\n#*.wixproj   merge=binary\n#*.modelproj merge=binary\n#*.sqlproj   merge=binary\n#*.wwaproj   merge=binary\n\n###############################################################################\n# behavior for image files\n#\n# image files are treated as binary by default.\n###############################################################################\n#*.jpg   binary\n#*.png   binary\n#*.gif   binary\n\n###############################################################################\n# diff behavior for common document formats\n# \n# Convert binary document formats to text before diffing them. This feature\n# is only available from the command line. Turn it on by uncommenting the \n# entries below.\n###############################################################################\n#*.doc   diff=astextplain\n#*.DOC   diff=astextplain\n#*.docx  diff=astextplain\n#*.DOCX  diff=astextplain\n#*.dot   diff=astextplain\n#*.DOT   diff=astextplain\n#*.pdf   diff=astextplain\n#*.PDF   diff=astextplain\n#*.rtf   diff=astextplain\n#*.RTF   diff=astextplain\n"
  },
  {
    "path": ".gitignore",
    "content": "## Ignore Visual Studio temporary files, build results, and\n## files generated by popular Visual Studio add-ons.\n\n# User-specific files\n*.suo\n*.user\n*.sln.docstates\n\n# Build results\n\n[Dd]ebug/\n[Rr]elease/\nx64/\nbuild/\n[Bb]in/\n[Oo]bj/\n\n# Enable \"build/\" folder in the NuGet Packages folder since NuGet packages use it for MSBuild targets\n!packages/*/build/\n\n# MSTest test Results\n[Tt]est[Rr]esult*/\n[Bb]uild[Ll]og.*\n\n*_i.c\n*_p.c\n*.ilk\n*.meta\n*.obj\n*.pch\n*.pdb\n*.pgc\n*.pgd\n*.rsp\n*.sbr\n*.tlb\n*.tli\n*.tlh\n*.tmp\n*.tmp_proj\n*.log\n*.vspscc\n*.vssscc\n.builds\n*.pidb\n*.log\n*.scc\n\n# Visual C++ cache files\nipch/\n*.aps\n*.ncb\n*.opensdf\n*.sdf\n*.cachefile\n\n# Visual Studio profiler\n*.psess\n*.vsp\n*.vspx\n\n# Guidance Automation Toolkit\n*.gpState\n\n# ReSharper is a .NET coding add-in\n_ReSharper*/\n*.[Rr]e[Ss]harper\n\n# TeamCity is a build add-in\n_TeamCity*\n\n# DotCover is a Code Coverage Tool\n*.dotCover\n\n# NCrunch\n*.ncrunch*\n.*crunch*.local.xml\n\n# Installshield output folder\n[Ee]xpress/\n\n# DocProject is a documentation generator add-in\nDocProject/buildhelp/\nDocProject/Help/*.HxT\nDocProject/Help/*.HxC\nDocProject/Help/*.hhc\nDocProject/Help/*.hhk\nDocProject/Help/*.hhp\nDocProject/Help/Html2\nDocProject/Help/html\n\n# Click-Once directory\npublish/\n\n# Publish Web Output\n*.Publish.xml\n\n# NuGet Packages Directory\n## TODO: If you have NuGet Package Restore enabled, uncomment the next line\npackages/\n\n# Windows Azure Build Output\ncsx\n*.build.csdef\n\n# Windows Store app package directory\nAppPackages/\n\n# Others\nsql/\n*.Cache\nClientBin/\n[Ss]tyle[Cc]op.*\n~$*\n*~\n*.dbmdl\n*.[Pp]ublish.xml\n*.pfx\n*.publishsettings\n\n# RIA/Silverlight projects\nGenerated_Code/\n\n# Backup & report files from converting an old project file to a newer\n# Visual Studio version. Backup files are not needed, because we have git ;-)\n_UpgradeReport_Files/\nBackup*/\nUpgradeLog*.XML\nUpgradeLog*.htm\n\n# SQL Server files\nApp_Data/*.mdf\nApp_Data/*.ldf\n\n\n#LightSwitch generated files\nGeneratedArtifacts/\n_Pvt_Extensions/\nModelManifest.xml\n\n# =========================\n# Windows detritus\n# =========================\n\n# Windows image file caches\nThumbs.db\nehthumbs.db\n\n# Folder config file\nDesktop.ini\n\n# Recycle Bin used on file shares\n$RECYCLE.BIN/\n\n# Mac desktop service store files\n.DS_Store\n"
  },
  {
    "path": ".paket/Paket.Restore.targets",
    "content": "<Project xmlns=\"http://schemas.microsoft.com/developer/msbuild/2003\">\n  <!-- Prevent dotnet template engine to parse this file -->\n  <!--/-:cnd:noEmit-->\n  <PropertyGroup>\n    <!-- make MSBuild track this file for incremental builds. -->\n    <!-- ref https://blogs.msdn.microsoft.com/msbuild/2005/09/26/how-to-ensure-changes-to-a-custom-target-file-prompt-a-rebuild/ -->\n    <MSBuildAllProjects>$(MSBuildAllProjects);$(MSBuildThisFileFullPath)</MSBuildAllProjects>\n\n    <DetectedMSBuildVersion>$(MSBuildVersion)</DetectedMSBuildVersion>\n    <DetectedMSBuildVersion Condition=\"'$(MSBuildVersion)' == ''\">15.0.0</DetectedMSBuildVersion>\n    <MSBuildSupportsHashing>false</MSBuildSupportsHashing>\n    <MSBuildSupportsHashing Condition=\" '$(DetectedMSBuildVersion)' &gt; '15.8.0' \">true</MSBuildSupportsHashing>\n    <!-- Mark that this target file has been loaded.  -->\n    <IsPaketRestoreTargetsFileLoaded>true</IsPaketRestoreTargetsFileLoaded>\n    <PaketToolsPath>$(MSBuildThisFileDirectory)</PaketToolsPath>\n    <PaketRootPath>$(MSBuildThisFileDirectory)..\\</PaketRootPath>\n    <PaketRestoreCacheFile>$(PaketRootPath)paket-files\\paket.restore.cached</PaketRestoreCacheFile>\n    <PaketLockFilePath>$(PaketRootPath)paket.lock</PaketLockFilePath>\n    <PaketBootstrapperStyle>classic</PaketBootstrapperStyle>\n    <PaketBootstrapperStyle Condition=\"Exists('$(PaketToolsPath)paket.bootstrapper.proj')\">proj</PaketBootstrapperStyle>\n    <PaketExeImage>assembly</PaketExeImage>\n    <PaketExeImage Condition=\" '$(PaketBootstrapperStyle)' == 'proj' \">native</PaketExeImage>\n    <MonoPath Condition=\"'$(MonoPath)' == '' AND Exists('/Library/Frameworks/Mono.framework/Commands/mono')\">/Library/Frameworks/Mono.framework/Commands/mono</MonoPath>\n    <MonoPath Condition=\"'$(MonoPath)' == ''\">mono</MonoPath>\n\n    <!-- PaketBootStrapper  -->\n    <PaketBootStrapperExePath Condition=\" '$(PaketBootStrapperExePath)' == '' AND Exists('$(PaketRootPath)paket.bootstrapper.exe')\">$(PaketRootPath)paket.bootstrapper.exe</PaketBootStrapperExePath>\n    <PaketBootStrapperExePath Condition=\" '$(PaketBootStrapperExePath)' == '' \">$(PaketToolsPath)paket.bootstrapper.exe</PaketBootStrapperExePath>\n    <PaketBootStrapperExeDir Condition=\" Exists('$(PaketBootStrapperExePath)') \" >$([System.IO.Path]::GetDirectoryName(\"$(PaketBootStrapperExePath)\"))\\</PaketBootStrapperExeDir>\n\n    <PaketBootStrapperCommand Condition=\" '$(OS)' == 'Windows_NT' \">\"$(PaketBootStrapperExePath)\"</PaketBootStrapperCommand>\n    <PaketBootStrapperCommand Condition=\" '$(OS)' != 'Windows_NT' \">$(MonoPath) --runtime=v4.0.30319 \"$(PaketBootStrapperExePath)\"</PaketBootStrapperCommand>\n\n    <!-- Disable automagic references for F# DotNet SDK -->\n    <!-- This will not do anything for other project types -->\n    <!-- see https://github.com/fsharp/fslang-design/blob/master/tooling/FST-1002-fsharp-in-dotnet-sdk.md -->\n    <DisableImplicitFSharpCoreReference>true</DisableImplicitFSharpCoreReference>\n    <DisableImplicitSystemValueTupleReference>true</DisableImplicitSystemValueTupleReference>\n\n    <!-- Disable Paket restore under NCrunch build -->\n    <PaketRestoreDisabled Condition=\"'$(NCrunch)' == '1'\">True</PaketRestoreDisabled>\n\n    <!-- Disable test for CLI tool completely - overrideable via properties in projects or via environment variables -->\n    <PaketDisableCliTest Condition=\" '$(PaketDisableCliTest)' == '' \">False</PaketDisableCliTest>\n\n    <PaketIntermediateOutputPath Condition=\" '$(PaketIntermediateOutputPath)' == '' \">$(BaseIntermediateOutputPath.TrimEnd('\\').TrimEnd('\\/'))</PaketIntermediateOutputPath>\n  </PropertyGroup>\n\n  <!-- Resolve how paket should be called -->\n  <!-- Current priority is: local (1: repo root, 2: .paket folder) => 3: as CLI tool => as bootstrapper (4: proj Bootstrapper style, 5: BootstrapperExeDir) => 6: global path variable -->\n  <Target Name=\"SetPaketCommand\" >\n    <!-- Test if paket is available in the standard locations. If so, that takes priority. Case 1/2 - non-windows specific -->\n    <PropertyGroup Condition=\" '$(OS)' != 'Windows_NT' \">\n      <!-- no windows, try native paket as default, root => tool -->\n      <PaketExePath Condition=\" '$(PaketExePath)' == '' AND Exists('$(PaketRootPath)paket') \">$(PaketRootPath)paket</PaketExePath>\n      <PaketExePath Condition=\" '$(PaketExePath)' == '' AND Exists('$(PaketToolsPath)paket') \">$(PaketToolsPath)paket</PaketExePath>\n    </PropertyGroup>\n\n    <!-- Test if paket is available in the standard locations. If so, that takes priority. Case 2/2 - same across platforms -->\n    <PropertyGroup>\n      <!-- root => tool -->\n      <PaketExePath Condition=\" '$(PaketExePath)' == '' AND Exists('$(PaketRootPath)paket.exe') \">$(PaketRootPath)paket.exe</PaketExePath>\n      <PaketExePath Condition=\" '$(PaketExePath)' == '' AND Exists('$(PaketToolsPath)paket.exe') \">$(PaketToolsPath)paket.exe</PaketExePath>\n    </PropertyGroup>\n\n    <!-- If paket hasn't be found in standard locations, test for CLI tool usage. -->\n    <!-- First test: Is CLI configured to be used in \"dotnet-tools.json\"? - can result in a false negative; only a positive outcome is reliable. -->\n    <PropertyGroup Condition=\" '$(PaketExePath)' == '' \">\n      <_DotnetToolsJson Condition=\"Exists('$(PaketRootPath)/.config/dotnet-tools.json')\">$([System.IO.File]::ReadAllText(\"$(PaketRootPath)/.config/dotnet-tools.json\"))</_DotnetToolsJson>\n      <_ConfigContainsPaket Condition=\" '$(_DotnetToolsJson)' != ''\">$(_DotnetToolsJson.Contains('\"paket\"'))</_ConfigContainsPaket>\n      <_ConfigContainsPaket Condition=\" '$(_ConfigContainsPaket)' == ''\">false</_ConfigContainsPaket>\n    </PropertyGroup>\n\n    <!-- Second test: Call 'dotnet paket' and see if it returns without an error. Mute all the output. Only run if previous test failed and the test has not been disabled. -->\n    <!-- WARNING: This method can lead to processes hanging forever, and should be used as little as possible. See https://github.com/fsprojects/Paket/issues/3705 for details. -->\n    <Exec Condition=\" '$(PaketExePath)' == '' AND !$(PaketDisableCliTest) AND !$(_ConfigContainsPaket)\" Command=\"dotnet paket --version\" IgnoreExitCode=\"true\" StandardOutputImportance=\"low\" StandardErrorImportance=\"low\" >\n      <Output TaskParameter=\"ExitCode\" PropertyName=\"LocalPaketToolExitCode\" />\n    </Exec>\n\n    <!-- If paket is installed as CLI use that. Again, only if paket haven't already been found in standard locations. -->\n    <PropertyGroup Condition=\" '$(PaketExePath)' == '' AND ($(_ConfigContainsPaket) OR '$(LocalPaketToolExitCode)' == '0') \">\n      <_PaketCommand>dotnet paket</_PaketCommand>\n    </PropertyGroup>\n\n    <!-- If neither local files nor CLI tool can be found, final attempt is searching for boostrapper config before falling back to global path variable. -->\n    <PropertyGroup Condition=\" '$(PaketExePath)' == '' AND '$(_PaketCommand)' == '' \">\n      <!-- Test for bootstrapper setup -->\n      <PaketExePath Condition=\" '$(PaketExePath)' == '' AND '$(PaketBootstrapperStyle)' == 'proj' \">$(PaketToolsPath)paket</PaketExePath>\n      <PaketExePath Condition=\" '$(PaketExePath)' == '' AND Exists('$(PaketBootStrapperExeDir)') \">$(PaketBootStrapperExeDir)paket</PaketExePath>\n\n      <!-- If all else fails, use global path approach. -->\n      <PaketExePath Condition=\" '$(PaketExePath)' == ''\">paket</PaketExePath>\n    </PropertyGroup>\n\n    <!-- If not using CLI, setup correct execution command. -->\n    <PropertyGroup Condition=\" '$(_PaketCommand)' == '' \">\n      <_PaketExeExtension>$([System.IO.Path]::GetExtension(\"$(PaketExePath)\"))</_PaketExeExtension>\n      <_PaketCommand Condition=\" '$(_PaketCommand)' == '' AND '$(_PaketExeExtension)' == '.dll' \">dotnet \"$(PaketExePath)\"</_PaketCommand>\n      <_PaketCommand Condition=\" '$(_PaketCommand)' == '' AND '$(OS)' != 'Windows_NT' AND '$(_PaketExeExtension)' == '.exe' \">$(MonoPath) --runtime=v4.0.30319 \"$(PaketExePath)\"</_PaketCommand>\n      <_PaketCommand Condition=\" '$(_PaketCommand)' == '' \">\"$(PaketExePath)\"</_PaketCommand>\n    </PropertyGroup>\n\n    <!-- The way to get a property to be available outside the target is to use this task. -->\n    <CreateProperty Value=\"$(_PaketCommand)\">\n      <Output TaskParameter=\"Value\" PropertyName=\"PaketCommand\"/>\n    </CreateProperty>\n\n  </Target>\n\n  <Target Name=\"PaketBootstrapping\" Condition=\"Exists('$(PaketToolsPath)paket.bootstrapper.proj')\">\n    <MSBuild Projects=\"$(PaketToolsPath)paket.bootstrapper.proj\" Targets=\"Restore\" />\n  </Target>\n\n  <!-- Official workaround for https://docs.microsoft.com/en-us/visualstudio/msbuild/getfilehash-task?view=vs-2019 -->\n  <UsingTask TaskName=\"Microsoft.Build.Tasks.GetFileHash\" AssemblyName=\"Microsoft.Build.Tasks.Core, Version=15.1.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a\" Condition=\" '$(MSBuildSupportsHashing)' == 'true' And '$(DetectedMSBuildVersion)' &lt; '16.0.360' \" />\n  <UsingTask TaskName=\"Microsoft.Build.Tasks.VerifyFileHash\" AssemblyName=\"Microsoft.Build.Tasks.Core, Version=15.1.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a\" Condition=\" '$(MSBuildSupportsHashing)' == 'true' And '$(DetectedMSBuildVersion)' &lt; '16.0.360' \" />\n  <Target Name=\"PaketRestore\" Condition=\"'$(PaketRestoreDisabled)' != 'True'\" BeforeTargets=\"_GenerateDotnetCliToolReferenceSpecs;_GenerateProjectRestoreGraphPerFramework;_GenerateRestoreGraphWalkPerFramework;CollectPackageReferences\" DependsOnTargets=\"SetPaketCommand;PaketBootstrapping\">\n\n    <!-- Step 1 Check if lockfile is properly restored (if the hash of the lockfile and the cache-file match) -->\n    <PropertyGroup>\n      <PaketRestoreRequired>true</PaketRestoreRequired>\n      <NoWarn>$(NoWarn);NU1603;NU1604;NU1605;NU1608</NoWarn>\n      <CacheFilesExist>false</CacheFilesExist>\n      <CacheFilesExist Condition=\" Exists('$(PaketRestoreCacheFile)') And Exists('$(PaketLockFilePath)') \">true</CacheFilesExist>\n    </PropertyGroup>\n\n    <!-- Read the hash of the lockfile -->\n    <GetFileHash Condition=\" '$(MSBuildSupportsHashing)' == 'true' And '$(CacheFilesExist)' == 'true' \" Files=\"$(PaketLockFilePath)\" Algorithm=\"SHA256\" HashEncoding=\"hex\" >\n      <Output TaskParameter=\"Hash\" PropertyName=\"PaketRestoreLockFileHash\" />\n    </GetFileHash>\n    <!-- Read the hash of the cache, which is json, but a very simple key value object -->\n    <PropertyGroup Condition=\" '$(MSBuildSupportsHashing)' == 'true' And '$(CacheFilesExist)' == 'true' \">\n        <PaketRestoreCachedContents>$([System.IO.File]::ReadAllText('$(PaketRestoreCacheFile)'))</PaketRestoreCachedContents>\n    </PropertyGroup>\n    <ItemGroup Condition=\" '$(MSBuildSupportsHashing)' == 'true' And '$(CacheFilesExist)' == 'true' \">\n        <!-- Parse our simple 'paket.restore.cached' json ...-->\n        <PaketRestoreCachedSplitObject Include=\"$([System.Text.RegularExpressions.Regex]::Split(`$(PaketRestoreCachedContents)`, `{|}|,`))\"></PaketRestoreCachedSplitObject>\n        <!-- Keep Key, Value ItemGroup-->\n        <PaketRestoreCachedKeyValue Include=\"@(PaketRestoreCachedSplitObject)\"\n            Condition=\" $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `&quot;: &quot;`).Length) &gt; 1 \">\n          <Key>$([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `\": \"`)[0].Replace(`\"`, ``).Replace(` `, ``))</Key>\n          <Value>$([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `\": \"`)[1].Replace(`\"`, ``).Replace(` `, ``))</Value>\n        </PaketRestoreCachedKeyValue>\n    </ItemGroup>\n    <PropertyGroup Condition=\" '$(MSBuildSupportsHashing)' == 'true' And '$(CacheFilesExist)' == 'true' \">\n        <!-- Retrieve the hashes we are interested in -->\n        <PackagesDownloadedHash Condition=\" '%(PaketRestoreCachedKeyValue.Key)' == 'packagesDownloadedHash' \">%(PaketRestoreCachedKeyValue.Value)</PackagesDownloadedHash>\n        <ProjectsRestoredHash Condition=\" '%(PaketRestoreCachedKeyValue.Key)' == 'projectsRestoredHash' \">%(PaketRestoreCachedKeyValue.Value)</ProjectsRestoredHash>\n    </PropertyGroup>\n\n    <PropertyGroup Condition=\" '$(MSBuildSupportsHashing)' == 'true' And '$(CacheFilesExist)' == 'true' \">\n      <!-- If the restore file doesn't exist we need to restore, otherwise only if hashes don't match -->\n      <PaketRestoreRequired>true</PaketRestoreRequired>\n      <PaketRestoreRequired Condition=\" '$(PaketRestoreLockFileHash)' == '$(ProjectsRestoredHash)' \">false</PaketRestoreRequired>\n      <PaketRestoreRequired Condition=\" '$(PaketRestoreLockFileHash)' == '' \">true</PaketRestoreRequired>\n    </PropertyGroup>\n\n\t<!--\n\t\tThis value should match the version in the props generated by paket\n\t\tIf they differ, this means we need to do a restore in order to ensure correct dependencies\n\t-->\n    <PropertyGroup Condition=\"'$(PaketPropsVersion)' != '5.185.3' \">\n      <PaketRestoreRequired>true</PaketRestoreRequired>\n    </PropertyGroup>\n\n    <!-- Do a global restore if required -->\n    <Warning Text=\"This version of MSBuild (we assume '$(DetectedMSBuildVersion)' or older) doesn't support GetFileHash, so paket fast restore is disabled.\" Condition=\" '$(MSBuildSupportsHashing)' != 'true' \" />\n    <Error Text=\"Stop build because of PAKET_ERROR_ON_MSBUILD_EXEC and we always call the bootstrapper\" Condition=\" '$(PAKET_ERROR_ON_MSBUILD_EXEC)' == 'true' AND '$(PaketBootstrapperStyle)' == 'classic' AND Exists('$(PaketBootStrapperExePath)') AND !(Exists('$(PaketExePath)'))\" />\n    <Exec Command='$(PaketBootStrapperCommand)' Condition=\" '$(PaketBootstrapperStyle)' == 'classic' AND Exists('$(PaketBootStrapperExePath)') AND !(Exists('$(PaketExePath)'))\" ContinueOnError=\"false\" />\n    <Error Text=\"Stop build because of PAKET_ERROR_ON_MSBUILD_EXEC and we need a full restore (hashes don't match)\" Condition=\" '$(PAKET_ERROR_ON_MSBUILD_EXEC)' == 'true' AND '$(PaketRestoreRequired)' == 'true' AND '$(PaketDisableGlobalRestore)' != 'true'\" />\n    <Exec Command='$(PaketCommand) restore' Condition=\" '$(PaketRestoreRequired)' == 'true' AND '$(PaketDisableGlobalRestore)' != 'true' \" ContinueOnError=\"false\" />\n\n    <!-- Step 2 Detect project specific changes -->\n    <ItemGroup>\n      <MyTargetFrameworks Condition=\"'$(TargetFramework)' != '' \" Include=\"$(TargetFramework)\"></MyTargetFrameworks>\n      <!-- Don't include all frameworks when msbuild explicitly asks for a single one -->\n      <MyTargetFrameworks Condition=\"'$(TargetFrameworks)' != '' AND '$(TargetFramework)' == '' \" Include=\"$(TargetFrameworks)\"></MyTargetFrameworks>\n      <PaketResolvedFilePaths Include=\"@(MyTargetFrameworks -> '$(PaketIntermediateOutputPath)\\$(MSBuildProjectFile).%(Identity).paket.resolved')\"></PaketResolvedFilePaths>\n    </ItemGroup>\n\n    <PropertyGroup>\n      <PaketReferencesCachedFilePath>$(PaketIntermediateOutputPath)\\$(MSBuildProjectFile).paket.references.cached</PaketReferencesCachedFilePath>\n      <!-- MyProject.fsproj.paket.references has the highest precedence -->\n      <PaketOriginalReferencesFilePath>$(MSBuildProjectFullPath).paket.references</PaketOriginalReferencesFilePath>\n      <!-- MyProject.paket.references -->\n      <PaketOriginalReferencesFilePath Condition=\" !Exists('$(PaketOriginalReferencesFilePath)')\">$(MSBuildProjectDirectory)\\$(MSBuildProjectName).paket.references</PaketOriginalReferencesFilePath>\n      <!-- paket.references -->\n      <PaketOriginalReferencesFilePath Condition=\" !Exists('$(PaketOriginalReferencesFilePath)')\">$(MSBuildProjectDirectory)\\paket.references</PaketOriginalReferencesFilePath>\n\n      <DoAllResolvedFilesExist>false</DoAllResolvedFilesExist>\n      <DoAllResolvedFilesExist Condition=\"Exists(%(PaketResolvedFilePaths.Identity))\">true</DoAllResolvedFilesExist>\n      <PaketRestoreRequired>true</PaketRestoreRequired>\n      <PaketRestoreRequiredReason>references-file-or-cache-not-found</PaketRestoreRequiredReason>\n    </PropertyGroup>\n\n    <!-- Step 2 a Detect changes in references file -->\n    <PropertyGroup Condition=\"Exists('$(PaketOriginalReferencesFilePath)') AND Exists('$(PaketReferencesCachedFilePath)') \">\n      <PaketRestoreCachedHash>$([System.IO.File]::ReadAllText('$(PaketReferencesCachedFilePath)'))</PaketRestoreCachedHash>\n      <PaketRestoreReferencesFileHash>$([System.IO.File]::ReadAllText('$(PaketOriginalReferencesFilePath)'))</PaketRestoreReferencesFileHash>\n      <PaketRestoreRequiredReason>references-file</PaketRestoreRequiredReason>\n      <PaketRestoreRequired Condition=\" '$(PaketRestoreReferencesFileHash)' == '$(PaketRestoreCachedHash)' \">false</PaketRestoreRequired>\n    </PropertyGroup>\n\n    <PropertyGroup Condition=\"!Exists('$(PaketOriginalReferencesFilePath)') AND !Exists('$(PaketReferencesCachedFilePath)') \">\n      <!-- If both don't exist there is nothing to do. -->\n      <PaketRestoreRequired>false</PaketRestoreRequired>\n    </PropertyGroup>\n\n    <!-- Step 2 b detect relevant changes in project file (new targetframework) -->\n    <PropertyGroup Condition=\" '$(DoAllResolvedFilesExist)' != 'true' \">\n      <PaketRestoreRequired>true</PaketRestoreRequired>\n      <PaketRestoreRequiredReason>target-framework '$(TargetFramework)' or '$(TargetFrameworks)' files @(PaketResolvedFilePaths)</PaketRestoreRequiredReason>\n    </PropertyGroup>\n\n    <!-- Step 3 Restore project specific stuff if required -->\n    <Message Condition=\" '$(PaketRestoreRequired)' == 'true' \" Importance=\"low\" Text=\"Detected a change ('$(PaketRestoreRequiredReason)') in the project file '$(MSBuildProjectFullPath)', calling paket restore\" />\n    <Error Text=\"Stop build because of PAKET_ERROR_ON_MSBUILD_EXEC and we detected a change ('$(PaketRestoreRequiredReason)') in the project file '$(MSBuildProjectFullPath)'\" Condition=\" '$(PAKET_ERROR_ON_MSBUILD_EXEC)' == 'true' AND '$(PaketRestoreRequired)' == 'true' \" />\n    <Exec Command='$(PaketCommand) restore --project \"$(MSBuildProjectFullPath)\" --output-path \"$(PaketIntermediateOutputPath)\" --target-framework \"$(TargetFrameworks)\"' Condition=\" '$(PaketRestoreRequired)' == 'true' AND '$(TargetFramework)' == '' \" ContinueOnError=\"false\" />\n    <Exec Command='$(PaketCommand) restore --project \"$(MSBuildProjectFullPath)\" --output-path \"$(PaketIntermediateOutputPath)\" --target-framework \"$(TargetFramework)\"' Condition=\" '$(PaketRestoreRequired)' == 'true' AND '$(TargetFramework)' != '' \" ContinueOnError=\"false\" />\n\n    <!-- This shouldn't actually happen, but just to be sure. -->\n    <PropertyGroup>\n      <DoAllResolvedFilesExist>false</DoAllResolvedFilesExist>\n      <DoAllResolvedFilesExist Condition=\"Exists(%(PaketResolvedFilePaths.Identity))\">true</DoAllResolvedFilesExist>\n    </PropertyGroup>\n    <Error Condition=\" '$(DoAllResolvedFilesExist)' != 'true' AND '$(ResolveNuGetPackages)' != 'False' \" Text=\"One Paket file '@(PaketResolvedFilePaths)' is missing while restoring $(MSBuildProjectFile). Please delete 'paket-files/paket.restore.cached' and call 'paket restore'.\" />\n\n    <!-- Step 4 forward all msbuild properties (PackageReference, DotNetCliToolReference) to msbuild -->\n    <ReadLinesFromFile Condition=\"($(DesignTimeBuild) != true OR '$(PaketPropsLoaded)' != 'true') AND '@(PaketResolvedFilePaths)' != ''\" File=\"%(PaketResolvedFilePaths.Identity)\" >\n      <Output TaskParameter=\"Lines\" ItemName=\"PaketReferencesFileLines\"/>\n    </ReadLinesFromFile>\n\n    <ItemGroup Condition=\"($(DesignTimeBuild) != true OR '$(PaketPropsLoaded)' != 'true') AND '@(PaketReferencesFileLines)' != '' \" >\n      <PaketReferencesFileLinesInfo Include=\"@(PaketReferencesFileLines)\" >\n        <Splits>$([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',').Length)</Splits>\n        <PackageName>$([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[0])</PackageName>\n        <PackageVersion>$([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[1])</PackageVersion>\n        <AllPrivateAssets>$([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[4])</AllPrivateAssets>\n        <CopyLocal Condition=\"'%(PaketReferencesFileLinesInfo.Splits)' == '6'\">$([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[5])</CopyLocal>\n      </PaketReferencesFileLinesInfo>\n      <PackageReference Include=\"%(PaketReferencesFileLinesInfo.PackageName)\">\n        <Version>%(PaketReferencesFileLinesInfo.PackageVersion)</Version>\n        <PrivateAssets Condition=\" ('%(PaketReferencesFileLinesInfo.AllPrivateAssets)' == 'true') Or ('$(PackAsTool)' == 'true') \">All</PrivateAssets>\n        <ExcludeAssets Condition=\" '%(PaketReferencesFileLinesInfo.Splits)' == '6' And %(PaketReferencesFileLinesInfo.CopyLocal) == 'false'\">runtime</ExcludeAssets>\n        <ExcludeAssets Condition=\" '%(PaketReferencesFileLinesInfo.Splits)' != '6' And %(PaketReferencesFileLinesInfo.AllPrivateAssets) == 'exclude'\">runtime</ExcludeAssets>\n        <Publish Condition=\" '$(PackAsTool)' == 'true' \">true</Publish>\n        <AllowExplicitVersion>true</AllowExplicitVersion>\n      </PackageReference>\n    </ItemGroup>\n\n    <PropertyGroup>\n      <PaketCliToolFilePath>$(PaketIntermediateOutputPath)/$(MSBuildProjectFile).paket.clitools</PaketCliToolFilePath>\n    </PropertyGroup>\n\n    <ReadLinesFromFile File=\"$(PaketCliToolFilePath)\" >\n      <Output TaskParameter=\"Lines\" ItemName=\"PaketCliToolFileLines\"/>\n    </ReadLinesFromFile>\n\n    <ItemGroup Condition=\" '@(PaketCliToolFileLines)' != '' \" >\n      <PaketCliToolFileLinesInfo Include=\"@(PaketCliToolFileLines)\" >\n        <PackageName>$([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[0])</PackageName>\n        <PackageVersion>$([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[1])</PackageVersion>\n      </PaketCliToolFileLinesInfo>\n      <DotNetCliToolReference Include=\"%(PaketCliToolFileLinesInfo.PackageName)\">\n        <Version>%(PaketCliToolFileLinesInfo.PackageVersion)</Version>\n      </DotNetCliToolReference>\n    </ItemGroup>\n\n    <!-- Disabled for now until we know what to do with runtime deps - https://github.com/fsprojects/Paket/issues/2964\n    <PropertyGroup>\n      <RestoreConfigFile>$(PaketIntermediateOutputPath)/$(MSBuildProjectFile).NuGet.Config</RestoreConfigFile>\n    </PropertyGroup> -->\n\n  </Target>\n\n  <Target Name=\"PaketDisableDirectPack\" AfterTargets=\"_IntermediatePack\" BeforeTargets=\"GenerateNuspec\" Condition=\"('$(IsPackable)' == '' Or '$(IsPackable)' == 'true') And Exists('$(PaketIntermediateOutputPath)/$(MSBuildProjectFile).references')\" >\n    <PropertyGroup>\n      <ContinuePackingAfterGeneratingNuspec>false</ContinuePackingAfterGeneratingNuspec>\n    </PropertyGroup>\n  </Target>\n\n  <Target Name=\"PaketOverrideNuspec\" DependsOnTargets=\"SetPaketCommand\" AfterTargets=\"GenerateNuspec\" Condition=\"('$(IsPackable)' == '' Or '$(IsPackable)' == 'true') And Exists('$(PaketIntermediateOutputPath)/$(MSBuildProjectFile).references')\" >\n    <ItemGroup>\n      <_NuspecFilesNewLocation Include=\"$(PaketIntermediateOutputPath)\\$(Configuration)\\*.nuspec\"/>\n      <MSBuildMajorVersion Include=\"$(DetectedMSBuildVersion.Replace(`-`, `.`).Split(`.`)[0])\" />\n      <MSBuildMinorVersion Include=\"$(DetectedMSBuildVersion.Replace(`-`, `.`).Split(`.`)[1])\" />\n    </ItemGroup>\n\n    <PropertyGroup>\n      <PaketProjectFile>$(MSBuildProjectDirectory)/$(MSBuildProjectFile)</PaketProjectFile>\n      <ContinuePackingAfterGeneratingNuspec>true</ContinuePackingAfterGeneratingNuspec>\n      <UseMSBuild16_0_Pack>false</UseMSBuild16_0_Pack>\n      <UseMSBuild16_0_Pack Condition=\" '@(MSBuildMajorVersion)' >= '16' \">true</UseMSBuild16_0_Pack>\n      <UseMSBuild15_9_Pack>false</UseMSBuild15_9_Pack>\n      <UseMSBuild15_9_Pack Condition=\" '@(MSBuildMajorVersion)' == '15' AND '@(MSBuildMinorVersion)' > '8' \">true</UseMSBuild15_9_Pack>\n      <UseMSBuild15_8_Pack>false</UseMSBuild15_8_Pack>\n      <UseMSBuild15_8_Pack Condition=\" '$(NuGetToolVersion)' != '4.0.0' AND (! $(UseMSBuild15_9_Pack)) AND (! $(UseMSBuild16_0_Pack)) \">true</UseMSBuild15_8_Pack>\n      <UseNuGet4_Pack>false</UseNuGet4_Pack>\n      <UseNuGet4_Pack Condition=\" (! $(UseMSBuild15_8_Pack)) AND (! $(UseMSBuild15_9_Pack)) AND (! $(UseMSBuild16_0_Pack)) \">true</UseNuGet4_Pack>\n      <AdjustedNuspecOutputPath>$(PaketIntermediateOutputPath)\\$(Configuration)</AdjustedNuspecOutputPath>\n      <AdjustedNuspecOutputPath Condition=\"@(_NuspecFilesNewLocation) == ''\">$(PaketIntermediateOutputPath)</AdjustedNuspecOutputPath>\n    </PropertyGroup>\n\n    <ItemGroup>\n      <_NuspecFiles Include=\"$(AdjustedNuspecOutputPath)\\*.$(PackageVersion.Split(`+`)[0]).nuspec\"/>\n    </ItemGroup>\n\n    <Error Text=\"Error Because of PAKET_ERROR_ON_MSBUILD_EXEC (not calling fix-nuspecs)\" Condition=\" '$(PAKET_ERROR_ON_MSBUILD_EXEC)' == 'true' \" />\n    <Exec Condition=\"@(_NuspecFiles) != ''\" Command='$(PaketCommand) fix-nuspecs files \"@(_NuspecFiles)\" project-file \"$(PaketProjectFile)\" ' />\n    <Error Condition=\"@(_NuspecFiles) == ''\" Text='Could not find nuspec files in \"$(AdjustedNuspecOutputPath)\" (Version: \"$(PackageVersion)\"), therefore we cannot call \"paket fix-nuspecs\" and have to error out!' />\n\n    <ConvertToAbsolutePath Condition=\"@(_NuspecFiles) != ''\" Paths=\"@(_NuspecFiles)\">\n      <Output TaskParameter=\"AbsolutePaths\" PropertyName=\"NuspecFileAbsolutePath\" />\n    </ConvertToAbsolutePath>\n\n    <!-- Call Pack -->\n    <PackTask Condition=\"$(UseMSBuild16_0_Pack)\"\n              PackItem=\"$(PackProjectInputFile)\"\n              PackageFiles=\"@(_PackageFiles)\"\n              PackageFilesToExclude=\"@(_PackageFilesToExclude)\"\n              PackageVersion=\"$(PackageVersion)\"\n              PackageId=\"$(PackageId)\"\n              Title=\"$(Title)\"\n              Authors=\"$(Authors)\"\n              Description=\"$(Description)\"\n              Copyright=\"$(Copyright)\"\n              RequireLicenseAcceptance=\"$(PackageRequireLicenseAcceptance)\"\n              LicenseUrl=\"$(PackageLicenseUrl)\"\n              ProjectUrl=\"$(PackageProjectUrl)\"\n              IconUrl=\"$(PackageIconUrl)\"\n              ReleaseNotes=\"$(PackageReleaseNotes)\"\n              Tags=\"$(PackageTags)\"\n              DevelopmentDependency=\"$(DevelopmentDependency)\"\n              BuildOutputInPackage=\"@(_BuildOutputInPackage)\"\n              TargetPathsToSymbols=\"@(_TargetPathsToSymbols)\"\n              SymbolPackageFormat=\"$(SymbolPackageFormat)\"\n              TargetFrameworks=\"@(_TargetFrameworks)\"\n              AssemblyName=\"$(AssemblyName)\"\n              PackageOutputPath=\"$(PackageOutputAbsolutePath)\"\n              IncludeSymbols=\"$(IncludeSymbols)\"\n              IncludeSource=\"$(IncludeSource)\"\n              PackageTypes=\"$(PackageType)\"\n              IsTool=\"$(IsTool)\"\n              RepositoryUrl=\"$(RepositoryUrl)\"\n              RepositoryType=\"$(RepositoryType)\"\n              SourceFiles=\"@(_SourceFiles->Distinct())\"\n              NoPackageAnalysis=\"$(NoPackageAnalysis)\"\n              MinClientVersion=\"$(MinClientVersion)\"\n              Serviceable=\"$(Serviceable)\"\n              FrameworkAssemblyReferences=\"@(_FrameworkAssemblyReferences)\"\n              ContinuePackingAfterGeneratingNuspec=\"$(ContinuePackingAfterGeneratingNuspec)\"\n              NuspecOutputPath=\"$(AdjustedNuspecOutputPath)\"\n              IncludeBuildOutput=\"$(IncludeBuildOutput)\"\n              BuildOutputFolders=\"$(BuildOutputTargetFolder)\"\n              ContentTargetFolders=\"$(ContentTargetFolders)\"\n              RestoreOutputPath=\"$(RestoreOutputAbsolutePath)\"\n              NuspecFile=\"$(NuspecFileAbsolutePath)\"\n              NuspecBasePath=\"$(NuspecBasePath)\"\n              NuspecProperties=\"$(NuspecProperties)\"\n              PackageLicenseFile=\"$(PackageLicenseFile)\"\n              PackageLicenseExpression=\"$(PackageLicenseExpression)\"\n              PackageLicenseExpressionVersion=\"$(PackageLicenseExpressionVersion)\" />\n\n    <PackTask Condition=\"$(UseMSBuild15_9_Pack)\"\n              PackItem=\"$(PackProjectInputFile)\"\n              PackageFiles=\"@(_PackageFiles)\"\n              PackageFilesToExclude=\"@(_PackageFilesToExclude)\"\n              PackageVersion=\"$(PackageVersion)\"\n              PackageId=\"$(PackageId)\"\n              Title=\"$(Title)\"\n              Authors=\"$(Authors)\"\n              Description=\"$(Description)\"\n              Copyright=\"$(Copyright)\"\n              RequireLicenseAcceptance=\"$(PackageRequireLicenseAcceptance)\"\n              LicenseUrl=\"$(PackageLicenseUrl)\"\n              ProjectUrl=\"$(PackageProjectUrl)\"\n              IconUrl=\"$(PackageIconUrl)\"\n              ReleaseNotes=\"$(PackageReleaseNotes)\"\n              Tags=\"$(PackageTags)\"\n              DevelopmentDependency=\"$(DevelopmentDependency)\"\n              BuildOutputInPackage=\"@(_BuildOutputInPackage)\"\n              TargetPathsToSymbols=\"@(_TargetPathsToSymbols)\"\n              SymbolPackageFormat=\"$(SymbolPackageFormat)\"\n              TargetFrameworks=\"@(_TargetFrameworks)\"\n              AssemblyName=\"$(AssemblyName)\"\n              PackageOutputPath=\"$(PackageOutputAbsolutePath)\"\n              IncludeSymbols=\"$(IncludeSymbols)\"\n              IncludeSource=\"$(IncludeSource)\"\n              PackageTypes=\"$(PackageType)\"\n              IsTool=\"$(IsTool)\"\n              RepositoryUrl=\"$(RepositoryUrl)\"\n              RepositoryType=\"$(RepositoryType)\"\n              SourceFiles=\"@(_SourceFiles->Distinct())\"\n              NoPackageAnalysis=\"$(NoPackageAnalysis)\"\n              MinClientVersion=\"$(MinClientVersion)\"\n              Serviceable=\"$(Serviceable)\"\n              FrameworkAssemblyReferences=\"@(_FrameworkAssemblyReferences)\"\n              ContinuePackingAfterGeneratingNuspec=\"$(ContinuePackingAfterGeneratingNuspec)\"\n              NuspecOutputPath=\"$(AdjustedNuspecOutputPath)\"\n              IncludeBuildOutput=\"$(IncludeBuildOutput)\"\n              BuildOutputFolder=\"$(BuildOutputTargetFolder)\"\n              ContentTargetFolders=\"$(ContentTargetFolders)\"\n              RestoreOutputPath=\"$(RestoreOutputAbsolutePath)\"\n              NuspecFile=\"$(NuspecFileAbsolutePath)\"\n              NuspecBasePath=\"$(NuspecBasePath)\"\n              NuspecProperties=\"$(NuspecProperties)\"/>\n\n    <PackTask Condition=\"$(UseMSBuild15_8_Pack)\"\n              PackItem=\"$(PackProjectInputFile)\"\n              PackageFiles=\"@(_PackageFiles)\"\n              PackageFilesToExclude=\"@(_PackageFilesToExclude)\"\n              PackageVersion=\"$(PackageVersion)\"\n              PackageId=\"$(PackageId)\"\n              Title=\"$(Title)\"\n              Authors=\"$(Authors)\"\n              Description=\"$(Description)\"\n              Copyright=\"$(Copyright)\"\n              RequireLicenseAcceptance=\"$(PackageRequireLicenseAcceptance)\"\n              LicenseUrl=\"$(PackageLicenseUrl)\"\n              ProjectUrl=\"$(PackageProjectUrl)\"\n              IconUrl=\"$(PackageIconUrl)\"\n              ReleaseNotes=\"$(PackageReleaseNotes)\"\n              Tags=\"$(PackageTags)\"\n              DevelopmentDependency=\"$(DevelopmentDependency)\"\n              BuildOutputInPackage=\"@(_BuildOutputInPackage)\"\n              TargetPathsToSymbols=\"@(_TargetPathsToSymbols)\"\n              TargetFrameworks=\"@(_TargetFrameworks)\"\n              AssemblyName=\"$(AssemblyName)\"\n              PackageOutputPath=\"$(PackageOutputAbsolutePath)\"\n              IncludeSymbols=\"$(IncludeSymbols)\"\n              IncludeSource=\"$(IncludeSource)\"\n              PackageTypes=\"$(PackageType)\"\n              IsTool=\"$(IsTool)\"\n              RepositoryUrl=\"$(RepositoryUrl)\"\n              RepositoryType=\"$(RepositoryType)\"\n              SourceFiles=\"@(_SourceFiles->Distinct())\"\n              NoPackageAnalysis=\"$(NoPackageAnalysis)\"\n              MinClientVersion=\"$(MinClientVersion)\"\n              Serviceable=\"$(Serviceable)\"\n              FrameworkAssemblyReferences=\"@(_FrameworkAssemblyReferences)\"\n              ContinuePackingAfterGeneratingNuspec=\"$(ContinuePackingAfterGeneratingNuspec)\"\n              NuspecOutputPath=\"$(AdjustedNuspecOutputPath)\"\n              IncludeBuildOutput=\"$(IncludeBuildOutput)\"\n              BuildOutputFolder=\"$(BuildOutputTargetFolder)\"\n              ContentTargetFolders=\"$(ContentTargetFolders)\"\n              RestoreOutputPath=\"$(RestoreOutputAbsolutePath)\"\n              NuspecFile=\"$(NuspecFileAbsolutePath)\"\n              NuspecBasePath=\"$(NuspecBasePath)\"\n              NuspecProperties=\"$(NuspecProperties)\"/>\n\n    <PackTask Condition=\"$(UseNuGet4_Pack)\"\n              PackItem=\"$(PackProjectInputFile)\"\n              PackageFiles=\"@(_PackageFiles)\"\n              PackageFilesToExclude=\"@(_PackageFilesToExclude)\"\n              PackageVersion=\"$(PackageVersion)\"\n              PackageId=\"$(PackageId)\"\n              Title=\"$(Title)\"\n              Authors=\"$(Authors)\"\n              Description=\"$(Description)\"\n              Copyright=\"$(Copyright)\"\n              RequireLicenseAcceptance=\"$(PackageRequireLicenseAcceptance)\"\n              LicenseUrl=\"$(PackageLicenseUrl)\"\n              ProjectUrl=\"$(PackageProjectUrl)\"\n              IconUrl=\"$(PackageIconUrl)\"\n              ReleaseNotes=\"$(PackageReleaseNotes)\"\n              Tags=\"$(PackageTags)\"\n              TargetPathsToAssemblies=\"@(_TargetPathsToAssemblies->'%(FinalOutputPath)')\"\n              TargetPathsToSymbols=\"@(_TargetPathsToSymbols)\"\n              TargetFrameworks=\"@(_TargetFrameworks)\"\n              AssemblyName=\"$(AssemblyName)\"\n              PackageOutputPath=\"$(PackageOutputAbsolutePath)\"\n              IncludeSymbols=\"$(IncludeSymbols)\"\n              IncludeSource=\"$(IncludeSource)\"\n              PackageTypes=\"$(PackageType)\"\n              IsTool=\"$(IsTool)\"\n              RepositoryUrl=\"$(RepositoryUrl)\"\n              RepositoryType=\"$(RepositoryType)\"\n              SourceFiles=\"@(_SourceFiles->Distinct())\"\n              NoPackageAnalysis=\"$(NoPackageAnalysis)\"\n              MinClientVersion=\"$(MinClientVersion)\"\n              Serviceable=\"$(Serviceable)\"\n              AssemblyReferences=\"@(_References)\"\n              ContinuePackingAfterGeneratingNuspec=\"$(ContinuePackingAfterGeneratingNuspec)\"\n              NuspecOutputPath=\"$(AdjustedNuspecOutputPath)\"\n              IncludeBuildOutput=\"$(IncludeBuildOutput)\"\n              BuildOutputFolder=\"$(BuildOutputTargetFolder)\"\n              ContentTargetFolders=\"$(ContentTargetFolders)\"\n              RestoreOutputPath=\"$(RestoreOutputAbsolutePath)\"\n              NuspecFile=\"$(NuspecFileAbsolutePath)\"\n              NuspecBasePath=\"$(NuspecBasePath)\"\n              NuspecProperties=\"$(NuspecProperties)\"/>\n  </Target>\n  <!--/+:cnd:noEmit-->\n</Project>\n"
  },
  {
    "path": ".paket/paket.targets",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<Project ToolsVersion=\"4.0\" xmlns=\"http://schemas.microsoft.com/developer/msbuild/2003\">\n  <PropertyGroup>\n    <!-- Enable the restore command to run before builds -->\n    <RestorePackages Condition=\" '$(RestorePackages)' == '' \">true</RestorePackages>\n    <!-- Download Paket.exe if it does not already exist -->\n    <DownloadPaket Condition=\" '$(DownloadPaket)' == '' \">true</DownloadPaket>\n    <PaketToolsPath>$(MSBuildThisFileDirectory)</PaketToolsPath>\n    <PaketRootPath>$(MSBuildThisFileDirectory)..\\</PaketRootPath>\n  </PropertyGroup>\n  <PropertyGroup>\n    <!-- Paket command -->\n    <PaketExePath Condition=\" '$(PaketExePath)' == '' \">$(PaketToolsPath)paket.exe</PaketExePath>\n    <PaketBootStrapperExePath Condition=\" '$(PaketBootStrapperExePath)' == '' \">$(PaketToolsPath)paket.bootstrapper.exe</PaketBootStrapperExePath>\n    <PaketCommand Condition=\" '$(OS)' == 'Windows_NT'\">\"$(PaketExePath)\"</PaketCommand>\n    <PaketCommand Condition=\" '$(OS)' != 'Windows_NT' \">mono --runtime=v4.0.30319 \"$(PaketExePath)\"</PaketCommand>\n    <PaketBootStrapperCommand Condition=\" '$(OS)' == 'Windows_NT'\">\"$(PaketBootStrapperExePath)\"</PaketBootStrapperCommand>\n    <PaketBootStrapperCommand Condition=\" '$(OS)' != 'Windows_NT' \">mono --runtime=v4.0.30319 $(PaketBootStrapperExePath)</PaketBootStrapperCommand>\n    <!-- Commands -->\n    <RestoreCommand>$(PaketCommand) restore</RestoreCommand>\n    <DownloadPaketCommand>$(PaketBootStrapperCommand)</DownloadPaketCommand>\n    <!-- We need to ensure packages are restored prior to assembly resolve -->\n    <BuildDependsOn Condition=\"$(RestorePackages) == 'true'\">RestorePackages; $(BuildDependsOn);</BuildDependsOn>\n  </PropertyGroup>\n  <Target Name=\"CheckPrerequisites\">\n    <!-- Raise an error if we're unable to locate paket.exe -->\n    <Error Condition=\"'$(DownloadPaket)' != 'true' AND !Exists('$(PaketExePath)')\" Text=\"Unable to locate '$(PaketExePath)'\" />\n    <MsBuild Targets=\"DownloadPaket\" Projects=\"$(MSBuildThisFileFullPath)\" Properties=\"Configuration=NOT_IMPORTANT;DownloadPaket=$(DownloadPaket)\" />\n  </Target>\n  <Target Name=\"DownloadPaket\">\n    <Exec Command=\"$(DownloadPaketCommand)\" Condition=\" '$(DownloadPaket)' == 'true' AND !Exists('$(PaketExePath)')\" />\n  </Target>\n  <Target Name=\"RestorePackages\" DependsOnTargets=\"CheckPrerequisites\">\n    <Exec Command=\"$(RestoreCommand)\" WorkingDirectory=\"$(PaketRootPath)\" />\n  </Target>\n</Project>\n"
  },
  {
    "path": "Hype.sln",
    "content": "﻿\nMicrosoft Visual Studio Solution File, Format Version 12.00\n# Visual Studio Version 16\nVisualStudioVersion = 16.0.29009.5\nMinimumVisualStudioVersion = 10.0.40219.1\nProject(\"{2150E333-8FDC-42A3-9474-1A3956D46DE8}\") = \".paket\", \".paket\", \"{B7FB3383-EF19-4645-986C-72D50C08F292}\"\n\tProjectSection(SolutionItems) = preProject\n\t\tpaket.dependencies = paket.dependencies\n\tEndProjectSection\nEndProject\nProject(\"{F2A71F9B-5D33-465A-A702-920D77279786}\") = \"Hype\", \"src\\Hype\\Hype.fsproj\", \"{C923664D-182E-48D5-BB30-F1505D7D28DF}\"\nEndProject\nProject(\"{2150E333-8FDC-42A3-9474-1A3956D46DE8}\") = \"docs\", \"docs\", \"{56DA870A-0ED4-47A2-B78B-34A8D4D6AD28}\"\n\tProjectSection(SolutionItems) = preProject\n\t\tdocs\\BuildDocs.fsx = docs\\BuildDocs.fsx\n\t\tdocs\\input\\download.fsx = docs\\input\\download.fsx\n\t\tdocs\\input\\FeedforwardNets.fsx = docs\\input\\FeedforwardNets.fsx\n\t\tdocs\\input\\HMC.fsx = docs\\input\\HMC.fsx\n\t\tdocs\\input\\index.fsx = docs\\input\\index.fsx\n\t\tdocs\\input\\Optimization.fsx = docs\\input\\Optimization.fsx\n\t\tdocs\\input\\RecurrentNets.fsx = docs\\input\\RecurrentNets.fsx\n\t\tdocs\\input\\Regression.fsx = docs\\input\\Regression.fsx\n\t\tdocs\\input\\Training.fsx = docs\\input\\Training.fsx\n\tEndProjectSection\nEndProject\nGlobal\n\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n\t\tDebug|Any CPU = Debug|Any CPU\n\t\tDebug|x64 = Debug|x64\n\t\tRelease|Any CPU = Release|Any CPU\n\t\tRelease|x64 = Release|x64\n\tEndGlobalSection\n\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n\t\t{C923664D-182E-48D5-BB30-F1505D7D28DF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU\n\t\t{C923664D-182E-48D5-BB30-F1505D7D28DF}.Debug|Any CPU.Build.0 = Debug|Any CPU\n\t\t{C923664D-182E-48D5-BB30-F1505D7D28DF}.Debug|x64.ActiveCfg = Debug|Any CPU\n\t\t{C923664D-182E-48D5-BB30-F1505D7D28DF}.Debug|x64.Build.0 = Debug|Any CPU\n\t\t{C923664D-182E-48D5-BB30-F1505D7D28DF}.Release|Any CPU.ActiveCfg = Release|Any CPU\n\t\t{C923664D-182E-48D5-BB30-F1505D7D28DF}.Release|Any CPU.Build.0 = Release|Any CPU\n\t\t{C923664D-182E-48D5-BB30-F1505D7D28DF}.Release|x64.ActiveCfg = Release|Any CPU\n\t\t{C923664D-182E-48D5-BB30-F1505D7D28DF}.Release|x64.Build.0 = Release|Any CPU\n\tEndGlobalSection\n\tGlobalSection(SolutionProperties) = preSolution\n\t\tHideSolutionNode = FALSE\n\tEndGlobalSection\n\tGlobalSection(ExtensibilityGlobals) = postSolution\n\t\tSolutionGuid = {028AF435-B43C-4E8E-8A82-4A65AF666086}\n\tEndGlobalSection\nEndGlobal\n"
  },
  {
    "path": "LICENSE.txt",
    "content": "The MIT License (MIT)\n\nCopyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter)\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE."
  },
  {
    "path": "README.md",
    "content": "Hype: Compositional Machine Learning and Hyperparameter Optimization\n--------------------------------------------------------------------\n\nHype is a proof-of-concept deep learning library, where you can perform optimization on compositional machine learning systems of many components, even when such components themselves internally perform optimization.\n\nIt is developed by [Atılım Güneş Baydin](http://www.cs.nuim.ie/~gunes/) and [Barak A. Pearlmutter](http://bcl.hamilton.ie/~barak/), at the [Brain and Computation Lab](http://www.bcl.hamilton.ie/), National University of Ireland Maynooth.\n\nThis work is supported by Science Foundation Ireland grant 09/IN.1/I2637.\n\nPlease visit the [project website](http://hypelib.github.io/Hype/) for documentation and tutorials.\n\nYou can come and join the Gitter chat room, if you want to chat with us:\n\n[![Join the chat at https://gitter.im/hypelib/Hype](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/hypelib/Hype?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)\n\n### Project statistics\n\n[![Issue Stats](http://issuestats.com/github/hypelib/Hype/badge/pr?style=flat-square)](http://issuestats.com/github/hypelib/Hype)\n[![Issue Stats](http://issuestats.com/github/hypelib/Hype/badge/issue?style=flat-square)](http://issuestats.com/github/hypelib/Hype)\n\n### Current build status\n\n[![Build status](https://ci.appveyor.com/api/projects/status/w1xgcleb1x4f30c0?svg=true)](https://ci.appveyor.com/project/gbaydin/hype)\n\n### License\n\nHype is released under the MIT license.\n"
  },
  {
    "path": "Roadmap.txt",
    "content": "- CUDA backend (DiffSharp)\n- Example for Hamiltonian MCMC\n- Probabilistic inference\n- Convolutional neural networks (ideally with DiffSharp tensor)\n- Saving and loading models using a standard format\n\n- Improve code comments\n- Add references to research papers where relevant\n\n- Add ability to read and write MATLAB files (scipy.io loadmat, savemat)\n- Add ability to read and write FSL nifti files for fMRI (PyMVPA2, SampleAttributes, fmri_dataset, poly_detrend, zscore)\n- Add ability to read and write standard image/video formats (OpenCV, MATLAB)\n- Better integration with graph libraries (box plots, bar graphs, confusion matrix plots, write to .png support)\n"
  },
  {
    "path": "docs/.gitignore",
    "content": "output/"
  },
  {
    "path": "docs/BuildDocs.fsx",
    "content": "﻿//\n// This file is part of\n// Hype: Compositional Machine Learning and Hyperparameter Optimization\n//\n// Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter)\n//\n// Hype is released under the MIT license.\n// (See accompanying LICENSE file.)\n//\n// Written by:\n//\n//   Atilim Gunes Baydin\n//   atilimgunes.baydin@nuim.ie\n//\n//   Barak A. Pearlmutter\n//   barak@cs.nuim.ie\n//\n//   Brain and Computation Lab\n//   Hamilton Institute & Department of Computer Science\n//   National University of Ireland Maynooth\n//   Maynooth, Co. Kildare\n//   Ireland\n//\n//   www.bcl.hamilton.ie\n//\n\n#r \"../packages/FSharp.Compiler.Service/lib/net40/FSharp.Compiler.Service.dll\"\n#r \"../packages/FSharpVSPowerTools.Core/lib/net45/FSharpVSPowerTools.Core.dll\"\n#r \"../packages/FSharp.Formatting/lib/net40/CSharpFormat.dll\"\n#r \"../packages/FSharp.Formatting/lib/net40/FSharp.CodeFormat.dll\"\n#r \"../packages/FSharp.Formatting/lib/net40/FSharp.Literate.dll\"\n#r \"../packages/FSharp.Formatting/lib/net40/FSharp.MetadataFormat.dll\"\n#r \"../packages/FSharp.Formatting/lib/net40/FSharp.Markdown.dll\"\n\nopen System.IO\nopen FSharp.Literate\nopen FSharp.MetadataFormat\n\n//\n// Setup output directory structure and copy static files\n//\n\nlet source = __SOURCE_DIRECTORY__ \nlet docs = Path.Combine(source, \"\")\nlet relative subdir = Path.Combine(docs, subdir)\n\nif not (Directory.Exists(relative \"output\")) then\n    Directory.CreateDirectory(relative \"output\") |> ignore\nif not (Directory.Exists(relative \"output/img\")) then\n    Directory.CreateDirectory (relative \"output/img\") |> ignore\nif not (Directory.Exists(relative \"output/misc\")) then\n    Directory.CreateDirectory (relative \"output/misc\") |> ignore\nif not (Directory.Exists(relative \"output/reference\")) then\n    Directory.CreateDirectory (relative \"output/reference\") |> ignore\n\nfor fileInfo in DirectoryInfo(relative \"input/files/misc\").EnumerateFiles() do\n    fileInfo.CopyTo(Path.Combine(relative \"output/misc\", fileInfo.Name), true) |> ignore\n\nfor fileInfo in DirectoryInfo(relative \"input/files/img\").EnumerateFiles() do\n    fileInfo.CopyTo(Path.Combine(relative \"output/img\", fileInfo.Name), true) |> ignore\n\n//\n// Generate documentation\n//\n\nlet tags = [\"project-name\", \"Hype\"; \"project-author\", \"Atılım Güneş Baydin\"; \"project-github\", \"http://github.com/hypelib/Hype\"; \"project-nuget\", \"https://www.nuget.org/packages/hype\"; \"root\", \"\"]\n\nLiterate.ProcessScriptFile(relative \"input/index.fsx\", relative \"input/templates/template.html\", relative \"output/index.html\", replacements = tags)\nLiterate.ProcessScriptFile(relative \"input/download.fsx\", relative \"input/templates/template.html\", relative \"output/download.html\", replacements = tags)\nLiterate.ProcessScriptFile(relative \"input/Optimization.fsx\", relative \"input/templates/template.html\", relative \"output/optimization.html\", replacements = tags)\nLiterate.ProcessScriptFile(relative \"input/Training.fsx\", relative \"input/templates/template.html\", relative \"output/training.html\", replacements = tags)\nLiterate.ProcessScriptFile(relative \"input/Regression.fsx\", relative \"input/templates/template.html\", relative \"output/regression.html\", replacements = tags)\nLiterate.ProcessScriptFile(relative \"input/FeedforwardNets.fsx\", relative \"input/templates/template.html\", relative \"output/feedforwardnets.html\", replacements = tags)\nLiterate.ProcessScriptFile(relative \"input/RecurrentNets.fsx\", relative \"input/templates/template.html\", relative \"output/recurrentnets.html\", replacements = tags)\nLiterate.ProcessScriptFile(relative \"input/HMC.fsx\", relative \"input/templates/template.html\", relative \"output/hmc.html\", replacements = tags)\n\n//\n// Generate API reference\n//\n\nlet library = relative \"../src/Hype/bin/Debug/Hype.dll\"\nlet layoutRoots = [relative \"input/templates\"; relative \"input/templates/reference\" ]\n\nMetadataFormat.Generate(library, relative \"output/reference\", layoutRoots, tags, markDownComments = true, libDirs = [relative \"../src/Hype/bin/Debug/\"])\n"
  },
  {
    "path": "docs/input/FeedforwardNets.fsx",
    "content": "﻿(*** hide ***)\n#r \"../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll\"\n#r \"../../src/Hype/bin/Release/netstandard2.0/Hype.dll\"\n#I \"../../packages/R.NET.Community/lib/net40/\"\n#I \"../../packages/R.NET.Community.FSharp/lib/net40/\"\n#I \"../../packages/RProvider\"\n#load \"RProvider.fsx\"\nfsi.ShowDeclarationValues <- true\n\n(**\nFeedforward neural networks\n===========================\n\nIn this example, we implement a softmax classifier network with several hidden layers. Also see the [regression example](regression.html) for some relevant basics.\n\nWe again demonstrate the library with the [MNIST](http://yann.lecun.com/exdb/mnist/) database, this time using the full training set of 60,000 examples for building a classifier with 10 outputs representing the class probabilities of an input image belonging to one of the ten categories.\n\n### Loading the data\n\nWe load the data and form the training, validation, and test datasets. The datasets are shuffled and the input data are normalized.\n*)\n\nopen Hype\nopen Hype.Neural\nopen DiffSharp.AD.Float32\nopen DiffSharp.Util\n\nlet MNIST = Dataset(Util.LoadMNISTPixels(\"C:/datasets/MNIST/train-images.idx3-ubyte\", 60000),\n                    Util.LoadMNISTLabels(\"C:/datasets/MNIST/train-labels.idx1-ubyte\", 60000)).NormalizeX()\n\nlet MNISTtrain = MNIST.[..58999].Shuffle()\nlet MNISTvalid = MNIST.[59000..].Shuffle()\n\nlet MNISTtest = \n    Dataset(Util.LoadMNISTPixels(\"C:/datasets/MNIST/t10k-images.idx3-ubyte\", 10000),\n            Util.LoadMNISTLabels(\"C:/datasets/MNIST/t10k-labels.idx1-ubyte\", 10000)).NormalizeX().Shuffle()\n\n(**\n<pre>\nval MNISTtrain : Dataset = Hype.Dataset\n   X: 784 x 59000\n   Y: 1 x 59000\nval MNISTvalid : Dataset = Hype.Dataset\n   X: 784 x 1000\n   Y: 1 x 1000\nval MNISTtest : Dataset = Hype.Dataset\n   X: 784 x 10000\n   Y: 1 x 10000\n</pre>\n\n*)\n\nMNISTtrain.[..5].VisualizeXColsAsImageGrid(28) |> printfn \"%s\"\n\n(**\n\n    [lang=cs]\n    Hype.Dataset\n       X: 784 x 6\n       Y: 1 x 6\n    X's columns reshaped to (28 x 28), presented in a (2 x 3) grid:\n    DM : 56 x 84\n                                                                                    \n                                                                                    \n                                                                                    \n                                                  ·▴█                                   \n                                                 ■■♦█·                  █■              \n                                                ▪███■▪                 -██■-            \n                                             ·■███♦●                    ·●██■▪          \n                            -♦▪             ·████♦                         -♦█♦         \n                         -♦■▪·              █■█●                             ■█·        \n                      ·●██♦                 ██●·                             ██·        \n             ·▴     ·●██▪                -■██▪                              ■█▪         \n            ·■▪   ·▪■■▪                 ·███▪  ·▴·                         ♦█▪          \n           ♦■▪  ·■■▴                    ♦███●▴▴♦██●                       ██▴           \n           █   ■█·                      ■█■█■██████■                     ■█▪            \n           ■█· -                        ██▴█████████▴                  ·██·             \n            ●█▪                         █▪●■████■███▴                 ·███♦·            \n             ·■■                        █▴●- ·    ●█·                  ●♦♦♦█▪           \n               ■█·                      █●■       ▪█                       ▴█-          \n               ·█●                      ███·  ·●■■██                        ■█          \n               ·█●                      █●■▴▴██████●                        ██          \n               ●█▴                      ♦█████████▴                        ▴█▪          \n            -♦██♦                       ▪█████♦▪·                    ●     █■           \n            ▴■■▴                          ▪▪▪                        █   ·■♦            \n                                                                     ■♦▪♦█▪             \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n           ·■       ·♦                           ▪■♦-                  ·●●█■·           \n           ██·      ♦█●                         ██■■█▪-●·              ▪■·▴●            \n           ██▴      ♦██▪                       ■█-  ▴███■              █♦               \n          -██       ♦██●                      ▪█·   ▪██▪              ▪█♦  ■▴           \n          -██       ♦██■                      █●   ♦██▴               ♦█- ██■           \n          ▴█♦       ▴██■                     ▴█  ·██■·                ♦█- ███           \n          ♦█♦       -███                     ▴█·▴██♦                  ▪██■███·          \n          ♦█·       ·██●                      ████▴                    ■██●■█·          \n          ♦█▴    ··-███●                     ▪██♦                       ·  ♦█·          \n          ♦██■■■■██████●                   ·■███                           ██           \n          ▪█████████■♦█●                  ♦██■●█                           █■           \n           ●██████▴-  █■                ▴██■- ●█                          ▴█▪           \n           ·███♦·     ██               ♦█■▴   █●                          ██▴           \n            ▪▪        ██▪             ███    ■█·                          ██▴           \n                      ●██            ●████■■██-                           ██            \n                      ▴██            -▪▪▪▪▪▪▪-                            ██            \n                       ██                                                -██            \n                       ■█                                                -█●            \n                       ●█·                                               -█▪            \n                       ·█                                                 █             \n                                                                                    \n                                                                                    \n\n### Defining the model\n\nWe define a neural network with 3 layers: (1) a hidden layer with 300 units, followed by ReLU activation, (2) a hidden layer with 100 units, followed by ReLU activation, (3) a final layer with 10 units, followed by softmax transformation.\n*)\n\nlet n = FeedForward()\nn.Add(Linear(28 * 28, 300, Initializer.InitReLU))\nn.Add(reLU)\nn.Add(Linear(300, 100, Initializer.InitReLU))\nn.Add(reLU)\nn.Add(Linear(100, 10))\nn.Add(fun m -> m |> DM.mapCols softmax) // Note the free inline implementation of the layer\n\nn.ToString() |> printfn \"%s\"\n\n(**\n    [lang=cs]\n    Hype.Neural.FeedForward\n       Learnable parameters: 266610\n       (0) -> (1) -> (2) -> (3) -> (4) -> (5)\n\n       (0): Hype.Neural.Linear\n       784 -> 300\n       Learnable parameters: 235500\n       Init: ReLU\n       W   : 300 x 784\n       b   : 300\n\n       (1): Hype.Neural.Activation\n\n       (2): Hype.Neural.Linear\n       300 -> 100\n       Learnable parameters: 30100\n       Init: ReLU\n       W   : 100 x 300\n       b   : 100\n\n       (3): Hype.Neural.Activation\n\n       (4): Hype.Neural.Linear\n       100 -> 10\n       Learnable parameters: 1010\n       Init: Standard\n       W   : 10 x 100\n       b   : 10\n\n       (5): Hype.Neural.Activation\n*)\n\n\n(**\n\n### Freely implementing transformation layers\n\nNow let's have a closer look at how we implemented the nonlinear transformations between the linear layers. \n\nYou might think that the instances of **reLU** in **n.Add(reLU)** above refer to a particular layer structure previously implemented as a layer module within the library. They don't. **reLU** is just a matrix-to-matrix elementwise function.\n\n**An important thing to note** here is that the activation/transformation layers added with, for example, **n.Add(reLU)**, can be **any matrix-to-matrix function that you can express in the language,** unlike commonly seen in many machine learning frameworks where you are asked to select a particular layer type that has been implemented beforehand with it's (1) forward evaluation code and (2) reverse gradient code w.r.t. layer inputs, and (3) reverse gradient code w.r.t. any layer parameters. In such a setting, a new layer design would require you to add a new layer type to the system and carefully implement these components.\n\nHere, because the system is based on nested AD, you can freely use any matrix-to-matrix transformation as a layer, and the forward and/or reverse AD operations of your code will be handled automatically by the underlying system. For example, you can write a layer like this: \n*)\n\nn.Add(fun w ->\n        let min = DM.Min(w)\n        let range = DM.Max(w) - min\n        (w - min) / range)\n\n(** \nwhich will be a normalization layer, scaling the values to be between 0 and 1.\n\nIn the above model, this is how the softmax layer is implemented as a mapping of the vector-to-vector **softmax** function to the columns of a matrix. \n\n*)\n\nn.Add(fun m -> m |> DM.mapCols softmax) \n\n(**\nIn this particular example, the output matrix has 10 rows (for the 10 target classes) and each column (a vector of size 10) is individually passed through the **softmax** function. The output matrix would have as many columns as the input matrix, representing the class probabilities of each input.\n*)\n\n\n(**\n### Weight initialization schemes\n\nWhen layers with learnable weights are created, the weights are initialized using one of the following schemes. The correct initialization would depend on the activation function immediately following the layer and would take the fan-in/fan-out of the layer into account. If a specific scheme is not specified, the **InitStandard** scheme is used by default. These implementations are based on existing machine learning literature, such as _\"Glorot, Xavier, and Yoshua Bengio. \"Understanding the difficulty of training deep feedforward neural networks.\" International conference on artificial intelligence and statistics. 2010\"_.\n\n*)\n\ntype Initializer =\n    | InitUniform of D * D\n    | InitNormal of D * D\n    | InitRBM of D\n    | InitReLU\n    | InitSigmoid\n    | InitTanh\n    | InitStandard\n    | InitCustom of (int->int->D)\n    override i.ToString() =\n        match i with\n        | InitUniform(min, max) -> sprintf \"Uniform min=%A max=%A\" min max\n        | InitNormal(mu, sigma) -> sprintf \"Normal mu=%A sigma=%A\" mu sigma\n        | InitRBM sigma -> sprintf \"RBM sigma=%A\" sigma\n        | InitReLU -> \"ReLU\"\n        | InitSigmoid -> \"Sigmoid\"\n        | InitTanh -> \"Tanh\"\n        | InitStandard -> \"Standard\"\n        | InitCustom f -> \"Custom\"\n    member i.InitDM(m, n) =\n        let fanOut, fanIn = m, n\n        match i with\n        | InitUniform(min, max) -> Rnd.UniformDM(m, n, min, max)\n        | InitNormal(mu, sigma) -> Rnd.NormalDM(m, n, mu, sigma)\n        | InitRBM sigma -> Rnd.NormalDM(m, n, D 0.f, sigma)\n        | InitReLU -> Rnd.NormalDM(m, n, D 0.f, sqrt (D 2.f / (float32 fanIn)))\n        | InitSigmoid -> let r = D 4.f * sqrt (D 6.f / (fanIn + fanOut)) in Rnd.UniformDM(m, n, -r, r)\n        | InitTanh -> let r = sqrt (D 6.f / (fanIn + fanOut)) in Rnd.UniformDM(m, n, -r, r)\n        | InitStandard -> let r = (D 1.f) / sqrt (float32 fanIn) in Rnd.UniformDM(m, n, -r, r)\n        | InitCustom f -> DM.init m n (fun _ _ -> f fanIn fanOut)\n    member i.InitDM(m:DM) = i.InitDM(m.Rows, m.Cols)\n\n(**\n### Training\n\nBefore training, let's visualize the weights of the first layer in a grid where each row of the weight matrix of the first layer is shown as a 28-by-28 image. It is an image of random weights, as expected.\n*)\n\nlet l = (n.[0] :?> Linear)\nl.VisualizeWRowsAsImageGrid(28) |> printfn \"%s\"\n\n(**\n<pre>\nHype.Neural.Linear\n    784 -> 300\n    Learnable parameters: 235500\n    Init: ReLU\n    W's rows reshaped to (28 x 28), presented in a (17 x 18) grid:\n</pre>\n\n<div class=\"row\">\n    <div class=\"span6 text-center\">\n        <img src=\"img/Feedforwardnets-1.png\" alt=\"Chart\" style=\"width:500px;\"/>\n    </div>\n</div><br/>\n\nNow let's train the network with the training and validation datasets we've prepared, using RMSProp, Nesterov momentum, and cross-entropy loss.\n*)\n\nlet p = {Params.Default with \n            Epochs = 2\n            EarlyStopping = Early (400, 100)\n            ValidationInterval = 10\n            Batch = Minibatch 100\n            Loss = CrossEntropyOnSoftmax\n            Momentum = Nesterov (D 0.9f)\n            LearningRate = RMSProp (D 0.001f, D 0.9f)}\n\nlet _, lhist = n.Train(MNISTtrain, MNISTvalid, p)\n\n\n(**\n<pre>\n[12/11/2015 22:42:07] --- Training started\n[12/11/2015 22:42:07] Parameters     : 266610\n[12/11/2015 22:42:07] Iterations     : 1180\n[12/11/2015 22:42:07] Epochs         : 2\n[12/11/2015 22:42:07] Batches        : Minibatches of 100 (590 per epoch)\n[12/11/2015 22:42:07] Training data  : 59000\n[12/11/2015 22:42:07] Validation data: 1000\n[12/11/2015 22:42:07] Valid. interval: 10\n[12/11/2015 22:42:07] Method         : Gradient descent\n[12/11/2015 22:42:07] Learning rate  : RMSProp a0 = D 0.00100000005f, k = D 0.899999976f\n[12/11/2015 22:42:07] Momentum       : Nesterov D 0.899999976f\n[12/11/2015 22:42:07] Loss           : Cross entropy after softmax layer\n[12/11/2015 22:42:07] Regularizer    : L2 lambda = D 9.99999975e-05f\n[12/11/2015 22:42:07] Gradient clip. : None\n[12/11/2015 22:42:07] Early stopping : Stagnation thresh. = 400, overfit. thresh. = 100\n[12/11/2015 22:42:07] Improv. thresh.: D 0.995000005f\n[12/11/2015 22:42:07] Return best    : true\n[12/11/2015 22:42:07] 1/2 | Batch   1/590 | D  2.383214e+000 [- ] | Valid D  2.411374e+000 [- ] | Stag:  0 Ovfit:  0\n[12/11/2015 22:42:08] 1/2 | Batch  11/590 | D  6.371681e-001 [↓▼] | Valid D  6.128169e-001 [↓▼] | Stag:  0 Ovfit:  0\n[12/11/2015 22:42:08] 1/2 | Batch  21/590 | D  4.729548e-001 [↓▼] | Valid D  4.779414e-001 [↓▼] | Stag:  0 Ovfit:  0\n[12/11/2015 22:42:09] 1/2 | Batch  31/590 | D  4.792733e-001 [↑ ] | Valid D  3.651254e-001 [↓▼] | Stag:  0 Ovfit:  0\n[12/11/2015 22:42:10] 1/2 | Batch  41/590 | D  2.977416e-001 [↓▼] | Valid D  3.680202e-001 [↑ ] | Stag: 10 Ovfit:  0\n[12/11/2015 22:42:10] 1/2 | Batch  51/590 | D  4.242567e-001 [↑ ] | Valid D  3.525212e-001 [↓▼] | Stag:  0 Ovfit:  0\n[12/11/2015 22:42:11] 1/2 | Batch  61/590 | D  2.464822e-001 [↓▼] | Valid D  3.365663e-001 [↓▼] | Stag:  0 Ovfit:  0\n[12/11/2015 22:42:11] 1/2 | Batch  71/590 | D  6.299557e-001 [↑ ] | Valid D  3.981607e-001 [↑ ] | Stag: 10 Ovfit:  0\n...\n[12/11/2015 22:43:21] 2/2 | Batch 521/590 | D  1.163270e-001 [↓ ] | Valid D  2.264248e-001 [↓ ] | Stag: 50 Ovfit:  0\n[12/11/2015 22:43:21] 2/2 | Batch 531/590 | D  2.169427e-001 [↑ ] | Valid D  2.203927e-001 [↓ ] | Stag: 60 Ovfit:  0\n[12/11/2015 22:43:22] 2/2 | Batch 541/590 | D  2.233351e-001 [↑ ] | Valid D  2.353653e-001 [↑ ] | Stag: 70 Ovfit:  0\n[12/11/2015 22:43:22] 2/2 | Batch 551/590 | D  3.425132e-001 [↑ ] | Valid D  2.559682e-001 [↑ ] | Stag: 80 Ovfit:  0\n[12/11/2015 22:43:23] 2/2 | Batch 561/590 | D  2.768238e-001 [↓ ] | Valid D  2.412431e-001 [↓ ] | Stag: 90 Ovfit:  0\n[12/11/2015 22:43:24] 2/2 | Batch 571/590 | D  2.550858e-001 [↓ ] | Valid D  2.726600e-001 [↑ ] | Stag:100 Ovfit:  0\n[12/11/2015 22:43:24] 2/2 | Batch 581/590 | D  2.308137e-001 [↓ ] | Valid D  2.466903e-001 [↓ ] | Stag:110 Ovfit:  0\n[12/11/2015 22:43:25] Duration       : 00:01:17.5011734\n[12/11/2015 22:43:25] Loss initial   : D  2.383214e+000\n[12/11/2015 22:43:25] Loss final     : D  1.087980e-001 (Best)\n[12/11/2015 22:43:25] Loss change    : D -2.274415e+000 (-95.43 %)\n[12/11/2015 22:43:25] Loss chg. / s  : D -2.934685e-002\n[12/11/2015 22:43:25] Epochs / s     : 0.02580606089\n[12/11/2015 22:43:25] Epochs / min   : 1.548363654\n[12/11/2015 22:43:25] --- Training finished\n</pre>\n\n<div class=\"row\">\n    <div class=\"span6 text-center\">\n        <img src=\"img/Feedforwardnets-3.png\" alt=\"Chart\" style=\"width:500px;\"/>\n    </div>\n</div><br/>\n*)\n\n(*** hide ***)\nopen RProvider\nopen RProvider.graphics\nopen RProvider.grDevices\n\nlet ll = lhist |> Array.map (float32>>float)\n\nnamedParams[\n    \"x\", box ll\n    \"pch\", box 19\n    \"col\", box \"darkblue\"\n    \"type\", box \"l\"\n    \"xlab\", box \"Iteration\"\n    \"ylab\", box \"Loss\"\n    \"width\", box 700\n    \"height\", box 500\n    ]\n|> R.plot|> ignore\n\n\n(**\nNow let's visualize the weights of the first layer in the grid. We see that the network has learned the problem domain.\n*)\n\nlet l = (n.[0] :?> Linear)\nl.VisualizeWRowsAsImageGrid(28) |> printfn \"%s\"\n\n(**\n\n<div class=\"row\">\n    <div class=\"span6 text-center\">\n        <img src=\"img/Feedforwardnets-2.png\" alt=\"Chart\" style=\"width:500px;\"/>\n    </div>\n</div><br/>\n*)\n\n(**\n\n### Building the softmax classifier\n\nAs explained in [regression](regression.html), we just construct an instance of **SoftmaxClassifier** with the trained neural network as its parameter. Please see the [API reference](reference/index.html) and the [source code](https://github.com/hypelib/Hype/blob/master/src/Hype/Classifier.fs) for a better understanding of how classifiers are implemented.\n*)\n\nlet cc = SoftmaxClassifier(n)\n\n(**\n\nTesting class predictions for 10 random elements from the MNIST test set.\n\n*)\n\nlet pred = cc.Classify(MNISTtest.X.[*,0..9]);;\nlet real = MNISTtest.Yi.[0..9]\n\n(**\n<pre>\nval pred : int [] = [|5; 1; 9; 2; 6; 0; 0; 5; 7; 6|]\nval real : int [] = [|5; 1; 9; 2; 6; 0; 0; 5; 7; 6|]\n</pre>\n\nLet's compute the classification error for the whole MNIST test set of 10,000 examples.\n*)\n\ncc.ClassificationError(MNISTtest)\n\n(**\n<pre>\nval it : float32 = 0.0502999984f\n</pre>\n\nThe classification error is around 5%. This can be lowered some more by training the model for more than 2 epochs as we did.\n\nClassifying a single digit:\n*)\n\nlet cls = cc.Classify(MNISTtest.X.[*,0]);;\nMNISTtest.X.[*,0] |> DV.visualizeAsDM 28 |> printfn \"%s\"\n\n(**\n    [lang=cs]\n    val cls : int = 5\n\n    DM : 28 x 28\n                            \n                            \n                            \n                            \n                            \n                            ·   \n                        ▴●██♦-  \n                     ▴♦██■▴-    \n                ♦█■■███▪·       \n               ■████■-          \n              ♦███▪             \n             ♦██♦               \n             ██●                \n            ■█▪                 \n            ██· -▴■●-           \n           ▴██████■███-         \n           ♦██♦▪    ▪█■-        \n            ▪·       ▴█●        \n                     -██        \n                     ♦█●        \n                    ■█■         \n                 -●██■·         \n             -▴▪■███▪           \n          ███████●-             \n                            \n                            \n                            \n\nClassifying many digits at the same time:\n*)\n\nlet clss = cc.Classify(MNISTtest.X.[*,0..4]);;\nMNISTtest.[0..4].VisualizeXColsAsImageGrid(28) |> printfn \"%s\"\n\n(**\n\n    [lang=cs]\n    val clss : int [] = [|5; 1; 9; 2; 6|]\n\n    Hype.Dataset\n       X: 784 x 5\n       Y: 1 x 5\n    X's columns reshaped to (28 x 28), presented in a (2 x 3) grid:\n    DM : 56 x 84\n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                  ██♦                                   \n                            ·                     ██                                    \n                        ▴●██♦-                   ██▴                    -♦█▪            \n                     ▴♦██■▴-                    ♦██                    ●█████●          \n                ♦█■■███▪·                       ██♦                   ■███♦♦██          \n               ■████■-                         ███                   ■██♦   ■█▴         \n              ♦███▪                           ▴███                  ·██♦    ●██         \n             ♦██♦                             ███                   ▪██     ■█■         \n             ██●                             ▴██▴                   ·██·  ·♦██▴         \n            ■█▪                              ███                     ███♦♦████▴         \n            ██· -▴■●-                       ███♦                     ▴████████·         \n           ▴██████■███-                     ███      ▴                ·-●- ■██          \n           ♦██♦▪    ▪█■-                   ♦██▴                            ██■          \n            ▪·       ▴█●                  ▴██♦                            -██▴          \n                     -██                  ███▴                            -██·          \n                     ♦█●                 ♦██▴                             ■██·          \n                    ■█■                  ███                              ███           \n                 -●██■·                 ♦██▴                             ▴██●           \n             -▴▪■███▪                   ██♦                              ███            \n          ███████●-                     ♦█                              -██■            \n                                                                        -██♦            \n                                                                        -██·            \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                ▴●█♦                                    \n                ●██                           -████▴                                    \n              ▪████●                         ▴████                                      \n             ▴██████▴                       ▴███■                                       \n             ■██▪▴██▴                       ███▪                                        \n            ▴██●  ▴█■                      ■██▴                                         \n           ·███    ██-                   ·♦██▴                                          \n           ♦██●    ▪█▪                  -███▴                                           \n           ███      ██                  ███♦                                            \n           ███      █♦                 ███▪                                             \n           █♦·      █♦                ●██■        ▴▴▴                                   \n            ·       ██                ███    -██-■█████▪                                \n              -     ██                ██■   ●███████████-                               \n            ·██■♦-  ██               ▴██▴  ███●-     ▪██▴                               \n            ♦█████■███               ▪██  ·██-       ·██▪                               \n            ■█████████               ███▪·██▴        ♦██                                \n            ♦█████████♦▪             ▪██████▴      ·♦██·                                \n            -███████████■●●●·         ▪███████████████▴                                 \n             ■██████■■■█████▴          -▪██████████♦-                                   \n             ·████■    ▴████■              ·▴▴▴▴▴▴·                                     \n              -■█-       ■■▴                                                            \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n\n\n\n\nNested optimization of training hyperparameters\n-----------------------------------------------\n\nAs we've seen in [optimization](optimization.html), nested AD allows us to apply gradient-based optimization to functions that also internally perform optimization.\n\nThis gives us the possibility of optimizing the hyperparameters of training. We can, for example, compute the gradient of the final loss of a training procedure with respect to the continuous hyperparameters of the training such as learning rates, momentum parameters, regularization coefficients, or initialization conditions. \n\nAs an example, let's train a neural network with a learning rate schedule of 50 elements, and optimize this schedule vector with another level of optimization on top of the training.\n*)\n\nlet train lrschedule =\n    Rnd.Seed(123)\n    n.Init()\n\n    let p = {Params.Default with\n                LearningRate = Schedule lrschedule\n                Loss = CrossEntropyOnSoftmax\n                ValidationInterval = 1\n                Silent = true\n                ReturnBest = false\n                Batch = Full}\n    let loss, _ = n.Train(MNISTvalid.[..20], p)\n    loss\n\nlet hypertrain epochs =\n    let p = {Params.Default with \n                Epochs = epochs\n                LearningRate = RMSProp(D 0.01f, D 0.9f)\n                ValidationInterval = 1}\n    let lr, _, _, _ = Optimize.Minimize(train, DV.create 50 (D 0.1f), p)\n    lr\n\nlet lr = hypertrain 50\n\n(*** hide ***)\nopen RProvider\nopen RProvider.graphics\nopen RProvider.grDevices\n\nlet lrlr = lr |> DV.toArray |> Array.map (float32>>float)\n\nnamedParams[\n    \"x\", box lrlr\n    \"pch\", box 19\n    \"col\", box \"darkblue\"\n    \"type\", box \"o\"\n    \"xlab\", box \"Iteration\"\n    \"ylab\", box \"Learning rate\"\n    \"width\", box 700\n    \"height\", box 500\n    ]\n|> R.plot|> ignore\n\n(**\n<div class=\"row\">\n    <div class=\"span6 text-center\">\n        <img src=\"img/Feedforwardnets-4.png\" alt=\"Chart\" style=\"width:500px;\"/>\n    </div>\n</div><br/>\n*)"
  },
  {
    "path": "docs/input/HMC.fsx",
    "content": "﻿(*** hide ***)\n#r \"../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll\"\n#r \"../../src/Hype/bin/Release/netstandard2.0/Hype.dll\"\n#I \"../../packages/R.NET.Community/lib/net40/\"\n#I \"../../packages/R.NET.Community.FSharp/lib/net40/\"\n#I \"../../packages/RProvider\"\n#load \"RProvider.fsx\"\nfsi.ShowDeclarationValues <- false\n\n(**\nMarkov Chain Monte Carlo\n========================\n\nDocumentation coming soon.\n*)"
  },
  {
    "path": "docs/input/Optimization.fsx",
    "content": "﻿(*** hide ***)\n\n#r \"../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll\"\n#r \"../../src/Hype/bin/Release/netstandard2.0/Hype.dll\"\n#I \"../../packages/R.NET.Community/lib/net40/\"\n#I \"../../packages/R.NET.Community.FSharp/lib/net40/\"\n#I \"../../packages/RProvider\"\n#load \"RProvider.fsx\"\n\n(**\nOptimization\n============\n\nHype provides a highly configurable and modular gradient-based optimization functionality. This works similar to many other machine learning libraries.\n\n**Here's the novelty:** \n\nThanks to nested AD, gradient-based optimization can be combined with any code, including code which internally takes derivatives of a function to produce its output. In other words, you can optimize the value of a function that is internally optimizing another function, or using derivatives for any other purpose (e.g. running particle simulations, adaptive control), up to any level. \n\nIn such a compositional optimization setting, all arising higher-order derivatives are handled for you through **nested instantiations of forward and/or reverse AD**. In any case, you only need to write your algorithms as usual, **only implementing a regular forward algorithm**.\n\nLet's explain this through a basic example from the article _\"Jeffrey Mark Siskind and Barak A. Pearlmutter. Nesting forward-mode AD in a functional framework. Higher Order and Symbolic Computation 21(4):361-76, 2008. doi:10.1007/s10990-008-9037-1\"_, where a parameter of a physics simulation using the gradient of an electric potential is optimized with Newton's method using the Hessian of an error, requiring third-order nesting of derivatives.\n\nOptimizing a physics simulation\n-------------------------------\n\nConsider a charged particle traveling in a plane with position $\\mathbf{x}(t)$, velocity $\\dot{\\mathbf{x}}(t)$, initial position $\\mathbf{x}(0)=(0, 8)$, and initial velocity $\\dot{\\mathbf{x}}(0)=(0.75, 0)$. The particle is accelerated by an electric field formed by a pair of repulsive bodies,\n\n$$$\n   p(\\mathbf{x}; w) = \\| \\mathbf{x} - (10, 10 - w)\\|^{-1} + \\| \\mathbf{x} - (10, 0)\\|^{-1}\n\nwhere $w$ is a parameter of this simple particle simulation, adjusting the location of one of the repulsive bodies.\n\nWe can simulate the time evolution of this system by using a naive Euler ODE integration\n\n$$$\n   \\begin{eqnarray*}\n   \\ddot{\\mathbf{x}}(t) &=& \\left. -\\nabla_{\\mathbf{x}} p(\\mathbf{x}) \\right|_{\\mathbf{x}=\\mathbf{x}(t)}\\\\\n   \\dot{\\mathbf{x}}(t + \\Delta t) &=& \\dot{\\mathbf{x}}(t) + \\Delta t \\ddot{\\mathbf{x}}(t)\\\\\n   \\mathbf{x}(t + \\Delta t) &=& \\mathbf{x}(t) + \\Delta t \\dot{\\mathbf{x}}(t)\n   \\end{eqnarray*}\n\nwhere $\\Delta t$ is an integration time step.\n\nFor a given parameter $w$, the simulation starts with $t=0$ and finishes when the particle hits the $x$-axis, at position $\\mathbf{x}(t_f)$ at time $t_f$. When the particle hits the $x$-axis, we calculate an error $E(w) = x_0 (t_f)^2$, the squared horizontal distance of the particle from the origin. We then minimize this error using Newton's method, which finds the optimal value of $w$ so that the particle eventually hits the $x$-axis at the origin.\n\n$$$\n   w^{(i+1)} = w^{(i)} - \\frac{E'(w^{(i)})}{E''(w^{(i)})}\n\nIn other words, the code calculating the trajectory of the particle internally computes the gradient of the electric potential $p(\\mathbf{x}; w)$, and, at the same time, the final position of the trajectory $\\mathbf{x}(t_f)$ is used to compute an error, and the gradient and Hessian of this error are computed during the optimization procedure.\n\nHere's how it goes.\n*)\n\nopen Hype\nopen DiffSharp.AD.Float32\n\nlet dt = D 0.1f\nlet x0 = toDV [0.; 8.]\nlet v0 = toDV [0.75; 0.]\n\nlet p w (x:DV) = (1.f / DV.norm (x - toDV [D 10.f + w * D 0.f; D 10.f - w])) \n               + (1.f / DV.norm (x - toDV [10.; 0.]))\n\nlet trajectory (w:D) = \n    (x0, v0) \n    |> Seq.unfold (fun (x, v) ->\n                    let a = -grad (p w)  x\n                    let v = v + dt * a\n                    let x = x + dt * v\n                    Some(x, (x, v)))\n    |> Seq.takeWhile (fun x -> x.[1] > D 0.f)\n\nlet error (w:DV) =\n    let xf = trajectory w.[0] |> Seq.last\n    xf.[0] * xf.[0]\n\nlet w, l, whist, lhist = Optimize.Minimize(error, toDV [0.], \n                                            {Params.Default with \n                                                Method = Newton; \n                                                LearningRate = Constant (D 1.f)\n                                                ValidationInterval = 1;\n                                                Epochs = 10})\n\n(**\n<pre>\n[25/12/2015 23:53:10] --- Minimization started\n[25/12/2015 23:53:10] Parameters     : 1\n[25/12/2015 23:53:10] Iterations     : 10\n[25/12/2015 23:53:10] Valid. interval: 1\n[25/12/2015 23:53:10] Method         : Exact Newton\n[25/12/2015 23:53:10] Learning rate  : Constant a = D 1.0f\n[25/12/2015 23:53:10] Momentum       : None\n[25/12/2015 23:53:10] Gradient clip. : None\n[25/12/2015 23:53:10] Early stopping : None\n[25/12/2015 23:53:10] Improv. thresh.: D 0.995000005f\n[25/12/2015 23:53:10] Return best    : true\n[25/12/2015 23:53:10]  1/10 | D  2.535113e+000 [- ]\n[25/12/2015 23:53:10]  2/10 | D  7.528733e-002 [↓▼]\n[25/12/2015 23:53:10]  3/10 | D  1.592970e-002 [↓▼]\n[25/12/2015 23:53:10]  4/10 | D  4.178338e-003 [↓▼]\n[25/12/2015 23:53:10]  5/10 | D  1.382800e-008 [↓▼]\n[25/12/2015 23:53:11]  6/10 | D  3.274181e-011 [↓▼]\n[25/12/2015 23:53:11]  7/10 | D  1.151079e-012 [↓▼]\n[25/12/2015 23:53:11]  8/10 | D  1.151079e-012 [- ]\n[25/12/2015 23:53:11]  9/10 | D  1.151079e-012 [- ]\n[25/12/2015 23:53:11] 10/10 | D  3.274181e-011 [↑ ]\n[25/12/2015 23:53:11] Duration       : 00:00:00.9201285\n[25/12/2015 23:53:11] Value initial  : D  2.535113e+000\n[25/12/2015 23:53:11] Value final    : D  1.151079e-012 (Best)\n[25/12/2015 23:53:11] Value change   : D -2.535113e+000 (-100.00 %)\n[25/12/2015 23:53:11] Value chg. / s : D -2.755173e+000\n[25/12/2015 23:53:11] Iter. / s      : 10.86804723\n[25/12/2015 23:53:11] Iter. / min    : 652.0828341\n[25/12/2015 23:53:11] --- Minimization finished\n\nval whist : DV [] =\n  [|DV [|0.0f|]; DV [|0.20767726f|]; DV [|0.17457059f|]; DV [|0.190040559f|];\n    DV [|0.182180524f|]; DV [|0.182166189f|]; DV [|0.182166889f|];\n    DV [|0.182166755f|]; DV [|0.182166621f|]; DV [|0.182166487f|]|]\nval w : DV = DV [|0.182166889f|]\nval lhist : D [] =\n  [|D 2.5351131f; D 2.5351131f; D 0.0752873272f; D 0.0159297027f;\n    D 0.00417833822f; D 1.38279992e-08f; D 3.27418093e-11f; D 1.15107923e-12f;\n    D 1.15107923e-12f; D 1.15107923e-12f|]\nval l : D = D 1.15107923e-12f\n</pre>\n*)\n\n(*** hide ***)\nopen RProvider\nopen RProvider.graphics\nopen RProvider.grDevices\n\nR.plot_new (namedParams [ ])\n\nlet t = trajectory (whist.[1].[0])\nlet tx, ty = t |> Seq.toArray |> Array.map (fun v -> v.[0] |> float32 |> float, v.[1] |> float32 |> float) |> Array.unzip\n\nnamedParams[\n    \"x\", box tx\n    \"y\", box ty\n    \"pch\", box 1\n    \"xlab\", box \"\"\n    \"ylab\", box \"\"\n    \"col\", box \"darkblue\"\n    \"type\", box \"l\"\n    \"lty\", box 4\n    \"width\", box 700\n    \"height\", box 500\n    ]\n|> R.lines |> ignore\n\n\n(**\n<div class=\"row\">\n    <div class=\"span6 text-center\">\n        <img src=\"img/Optimization-3.png\" alt=\"Chart\" style=\"width:500px;\"/>\n    </div>\n</div><br/>\n\nOptimization parameters\n-----------------------\nAs another example, let's optimize the Beale function\n\n$$$\n   f(\\mathbf{x}) = (1.5 - x_1 + x_1 x_2)^2 + (2.25 - x_1 + x_1 x_2^2)^2 + (2.625 - x_1 + x_1 x_2^3)^2\n\nstarting from $\\mathbf{x} = (1, 1.5)$, using RMSProp. The optimum is at $(3, 0.5)$\n*)\n\nlet beale (x:DV) = (1.5f - x.[0] + (x.[0] * x.[1])) ** 2.f\n                    + (2.25f - x.[0] + x.[0] * x.[1] ** 2.f) ** 2.f\n                    + (2.625f - x.[0] + x.[0] * x.[1] ** 3.f) ** 2.f\n\nlet wopt, lopt, whist, lhist = Optimize.Minimize(beale, toDV [1.; 1.5], \n                                                    {Params.Default with \n                                                        Epochs = 3000; \n                                                        LearningRate = RMSProp (D 0.01f, D 0.9f)})\n\n(**\n<pre>\n[12/11/2015 01:22:59] --- Minimization started\n[12/11/2015 01:22:59] Parameters     : 2\n[12/11/2015 01:22:59] Iterations     : 3000\n[12/11/2015 01:22:59] Valid. interval: 10\n[12/11/2015 01:22:59] Method         : Gradient descent\n[12/11/2015 01:22:59] Learning rate  : RMSProp a0 = D 0.00999999978f, k = D 0.899999976f\n[12/11/2015 01:22:59] Momentum       : None\n[12/11/2015 01:22:59] Gradient clip. : None\n[12/11/2015 01:22:59] Early stopping : None\n[12/11/2015 01:22:59] Improv. thresh.: D 0.995000005f\n[12/11/2015 01:22:59] Return best    : true\n[12/11/2015 01:22:59]    1/3000 | D  4.125000e+001 [- ]\n[12/11/2015 01:22:59]   11/3000 | D  2.655878e+001 [↓▼]\n[12/11/2015 01:22:59]   21/3000 | D  2.154373e+001 [↓▼]\n[12/11/2015 01:22:59]   31/3000 | D  1.841705e+001 [↓▼]\n[12/11/2015 01:22:59]   41/3000 | D  1.624916e+001 [↓▼]\n[12/11/2015 01:22:59]   51/3000 | D  1.465973e+001 [↓▼]\n[12/11/2015 01:22:59]   61/3000 | D  1.334291e+001 [↓▼]\n...\n[12/11/2015 01:22:59] 2921/3000 | D  9.084024e-004 [- ]\n[12/11/2015 01:22:59] 2931/3000 | D  9.084024e-004 [- ]\n[12/11/2015 01:22:59] 2941/3000 | D  9.084024e-004 [- ]\n[12/11/2015 01:22:59] 2951/3000 | D  9.084024e-004 [- ]\n[12/11/2015 01:22:59] 2961/3000 | D  9.084024e-004 [- ]\n[12/11/2015 01:22:59] 2971/3000 | D  9.084024e-004 [- ]\n[12/11/2015 01:22:59] 2981/3000 | D  9.084024e-004 [- ]\n[12/11/2015 01:22:59] 2991/3000 | D  9.084024e-004 [- ]\n[12/11/2015 01:22:59] Duration       : 00:00:00.3142646\n[12/11/2015 01:22:59] Value initial  : D  4.125000e+001\n[12/11/2015 01:22:59] Value final    : D  8.948371e-004 (Best)\n[12/11/2015 01:22:59] Value change   : D -4.124910e+001 (-100.00 %)\n[12/11/2015 01:22:59] Value chg. / s : D -1.312560e+002\n[12/11/2015 01:22:59] Iter. / s      : 9546.09587\n[12/11/2015 01:22:59] Iter. / min    : 572765.7522\n[12/11/2015 01:22:59] --- Minimization finished\n\nval wopt : DV = DV [|2.99909306f; 0.50039643f|]\n</pre>\n*)\n\n(*** hide ***)\nopen RProvider\nopen RProvider.graphics\nopen RProvider.grDevices\n\nR.plot_new (namedParams [ ])\n\nlet ll = lhist |> Array.map (float32>>float)\n\nnamedParams[\n    \"x\", box ll\n    \"pch\", box 19\n    \"col\", box \"darkblue\"\n    \"type\", box \"o\"\n    \"xlab\", box \"Iteration\"\n    \"ylab\", box \"Function value\"\n    \"width\", box 700\n    \"height\", box 500\n    ]\n|> R.plot|> ignore\n\n(**\n<div class=\"row\">\n    <div class=\"span6 text-center\">\n        <img src=\"img/Optimization-1.png\" alt=\"Chart\" style=\"width:500px;\"/>\n    </div>\n</div><br/>\n\n*)\n\n(*** hide ***)\n\nlet contourplot3d (f:DV->D) (xmin, xmax) (ymin, ymax) =\n    let res = 100\n    let xstep = ((xmax - xmin) / float res)\n    let ystep = ((ymax - ymin) / float res)\n    let x = [|xmin .. xstep .. xmax|]\n    let y = [|ymin .. ystep .. ymax|]\n    let z = Array2D.init x.Length y.Length (fun i j -> f (toDV [x.[i]; y.[j]])) |> Array2D.map (float32>>float)\n    namedParams [\n        \"x\", box x\n        \"y\", box y\n        \"z\", box z\n        \"labels\", box \"\"\n        \"levels\", box [|0..5..200|]]\n    |> R.contour\n\ncontourplot3d beale (-4.5,4.5) (-4.5,4.5) \n\nlet xx, yy = whist |> Array.map (fun v -> v.[0] |> float32 |> float, v.[1] |> float32 |> float) |> Array.unzip\nnamedParams[\n    \"x\", box xx\n    \"y\", box yy\n    \"col\", box \"blue\"]\n|> R.lines\n\nnamedParams[\n    \"x\", box (xx |>Array.last)\n    \"y\", box (yy |> Array.last)\n    \"pch\", box 16\n    \"col\", box \"blue\"]\n|> R.points\n\n(**\n<div class=\"row\">\n    <div class=\"span6 text-center\">\n        <img src=\"img/Optimization-2.png\" alt=\"Chart\" style=\"width:500px;\"/>\n    </div>\n</div><br/>\n\nEach instantiation of gradient-based optimization is controlled through a collection of parameters, using the **Hype.Params** type.\n\nIf you do not supply any parameters to optimization, the default parameter set **Params.Default** is used. The default parameters look like this:\n\n*)\nmodule Params =\n     let Default = {Epochs = 100\n                    LearningRate = LearningRate.DefaultRMSProp\n                    Momentum = NoMomentum\n                    Loss = L2Loss\n                    Regularization = Regularization.DefaultL2Reg\n                    GradientClipping = NoClip\n                    Method = GD\n                    Batch = Full\n                    EarlyStopping = NoEarly\n                    ImprovementThreshold = D 0.995f\n                    Silent = false\n                    ReturnBest = true\n                    ValidationInterval = 10\n                    LoggingFunction = fun _ _ _ -> ()}\n\n(**\nIf you want to change only a specific element of the parameter type, you can do so by extending the **Params.Default** value and overwriting only the parts you need to change, such as this:\n*)\n\nlet p = {Params.Default with\n            Epochs = 5000\n            LearningRate = LearningRate.AdaGrad (D 0.001f)\n            Momentum = Nesterov (D 0.9f)}\n\n(**\n### Optimization method\n*)\n\ntype Method =\n    | GD          // Gradient descent\n    | CG          // Conjugate gradient\n    | CD          // Conjugate descent\n    | NonlinearCG // Nonlinear conjugate gradient\n    | DaiYuanCG   // Dai & Yuan conjugate gradient\n    | NewtonCG    // Newton conjugate gradient\n    | Newton      // Exact Newton\n\n(**\n### Learning rate\n*)\n\ntype LearningRate =\n    | Constant    of D         // Constant\n    | Decay       of D * D     // 1 / t decay, a = a0 / (1 + kt). Initial value, decay rate\n    | ExpDecay    of D * D     // Exponential decay, a = a0 * Exp(-kt). Initial value, decay rate\n    | Schedule    of DV        // Scheduled learning rate vector, its length overrides Params.Epochs\n    | Backtrack   of D * D * D // Backtracking line search. Initial value, c, rho\n    | StrongWolfe of D * D * D // Strong Wolfe line search. lmax, c1, c2\n    | AdaGrad     of D         // Adagrad. Initial value\n    | RMSProp     of D * D     // RMSProp. Initial value, decay rate\n    static member DefaultConstant    = Constant (D 0.001f)\n    static member DefaultDecay       = Decay (D 1.f, D 0.1f)\n    static member DefaultExpDecay    = ExpDecay (D 1.f, D 0.1f)\n    static member DefaultBacktrack   = Backtrack (D 1.f, D 0.0001f, D 0.5f)\n    static member DefaultStrongWolfe = StrongWolfe (D 1.f, D 0.0001f, D 0.5f)\n    static member DefaultAdaGrad     = AdaGrad (D 0.001f)\n    static member DefaultRMSProp     = RMSProp (D 0.001f, D 0.9f)\n\n(**\n### Momentum\n*)\n\ntype Momentum =\n    | Momentum of D // Default momentum\n    | Nesterov of D // Nesterov momentum\n    | NoMomentum\n    static member DefaultMomentum = Momentum (D 0.9f)\n    static member DefaultNesterov = Nesterov (D 0.9f)\n\n(**\n### Gradient clipping\n*)\n\ntype GradientClipping =\n    | NormClip of D // Norm clipping\n    | NoClip\n    static member DefaultNormClip = NormClip (D 1.f)\n\n(**\n\nFinally, looking at the [API reference](reference/index.html) and the [source code](https://github.com/hypelib/Hype/blob/master/src/Hype/Optimize.fs) of the optimization module can give you a better idea of the optimization algorithms currently implemented.\n*)"
  },
  {
    "path": "docs/input/RecurrentNets.fsx",
    "content": "﻿(*** hide ***)\n#r \"../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll\"\n#r \"../../src/Hype/bin/Release/netstandard2.0/Hype.dll\"\n#I \"../../packages/R.NET.Community/lib/net40/\"\n#I \"../../packages/R.NET.Community.FSharp/lib/net40/\"\n#I \"../../packages/RProvider\"\n#load \"RProvider.fsx\"\nfsi.ShowDeclarationValues <- false\n\n(**\nRecurrent neural networks\n=========================\n\nIn this example we build a recurrent neural network (RNN) for a language modeling task and train it with a short passage of text for a quick demonstration. Hype currently has three RNN models implemented as **Hype.Neural** layers, which can be combined freely with other layer types, explained, for example, in the [neural networks](feedforwardnets.html) page. **Hype.Neural.Recurrent** implements the \"vanilla\" RNN layer, **Hype.Neural.LSTM** implements the LSTM layer, and **Hype.Neural.GRU** implements the gated recurrent unit (GRU) layer.\n\n### Language modeling\n\nRNNs are well suited for constructing [language models,](https://en.wikipedia.org/wiki/Language_model) where we need to predict the probability of a word (or token) given the history of the tokens that came before it. Here, we will use an LSTM-based RNN to construct a word-level language model from a short passage of text, for a basic demonstration of usage. This model can be scaled to larger problems. State-of-the-art models of this type can require considerable computing resources and training time.\n\nThe text is from the beginning of Virgil's Aeneid, Book I.\n*)\n\nlet text = \"I sing of arms and the man, he who, exiled by fate, first came from the coast of Troy to Italy, and to Lavinian shores – hurled about endlessly by land and sea, by the will of the gods, by cruel Juno’s remorseless anger, long suffering also in war, until he founded a city and brought his gods to Latium: from that the Latin people came, the lords of Alba Longa, the walls of noble Rome. Muse, tell me the cause: how was she offended in her divinity, how was she grieved, the Queen of Heaven, to drive a man, noted for virtue, to endure such dangers, to face so many trials? Can there be such anger in the minds of the gods?\"\n\n(**\nHype provides a simple **Hype.NLP.Language** type for tokenizing text. You can look at the [API reference](reference/index.html) and the [source code](https://github.com/hypelib/Hype/blob/master/src/Hype/NLP.fs) for a better understanding of its usage.\n*)\n\nopen Hype\nopen Hype.Neural\nopen Hype.NLP\nopen DiffSharp.AD.Float32\nopen DiffSharp.Util\n\nlet lang = Language(text)\n\nlang.Tokens |> printfn \"%A\"\nlang.Length |> printfn \"%A\"\n\n(**\nThese are the tokens extracted from the text, including some of the punctuation marks. When we are sampling from the RNN language model, we will make use of the \".\" token for signaling the end of a sentence. The puncutation marks are configurable when you are constructing the **Language** instance. If they are not provided, a default set is used.\n\n<pre>\n[|\",\"; \".\"; \":\"; \"?\"; \"Alba\"; \"Can\"; \"Heaven\"; \"I\"; \"Italy\"; \"Juno’s\"; \"Latin\";\n  \"Latium\"; \"Lavinian\"; \"Longa\"; \"Muse\"; \"Queen\"; \"Rome\"; \"Troy\"; \"a\"; \"about\";\n  \"also\"; \"and\"; \"anger\"; \"arms\"; \"be\"; \"brought\"; \"by\"; \"came\"; \"cause\"; \"city\";\n  \"coast\"; \"cruel\"; \"dangers\"; \"divinity\"; \"drive\"; \"endlessly\"; \"endure\";\n  \"exiled\"; \"face\"; \"fate\"; \"first\"; \"for\"; \"founded\"; \"from\"; \"gods\"; \"grieved\";\n  \"he\"; \"her\"; \"his\"; \"how\"; \"hurled\"; \"in\"; \"land\"; \"long\"; \"lords\"; \"man\";\n  \"many\"; \"me\"; \"minds\"; \"noble\"; \"noted\"; \"of\"; \"offended\"; \"people\";\n  \"remorseless\"; \"sea\"; \"she\"; \"shores\"; \"sing\"; \"so\"; \"such\"; \"suffering\";\n  \"tell\"; \"that\"; \"the\"; \"there\"; \"to\"; \"trials\"; \"until\"; \"virtue\"; \"walls\";\n  \"war\"; \"was\"; \"who\"; \"will\"; \"–\"|]\n  \n  86\n</pre>\nThere are 86 tokens in this language instance.\n\nNow let's transform the full text to a dataset, using the **Language** instance holding these tokens. The text will be encoded in a matrix where each column is a representation of each word as a _one-hot_ vector.\n*)\n\nlet text' = lang.EncodeOneHot(text)\ntext'.Visualize() |> printfn \"%s\"\n\n(**\n<pre>\nDM : 86 x 145\n</pre>\n\nOut of these 145 words, we will construct a dataset where the inputs are the first 144 words and the target outputs are the 144 words starting with a one word shift. This means that, for each word, we want the output (the prediction) to be the following word in our text passage.\n*)\n\nlet data = Dataset(text'.[*, 0..(text'.Cols - 2)],\n                   text'.[*, 1..(text'.Cols - 1)])\n\n(**\n<pre>\nval data : Dataset = Hype.Dataset\n   X: 86 x 144\n   Y: 86 x 144\n</pre>\n\nRNNs, and especially the LSTM variety that we will use, can make predictions that take long-term dependencies and contextual information into account. When the language model is trained with a large enough text corpus and the network has enough capacity, state-of-the-art RNN language models are able to learn complex grammatical relations.\n\nFor our quick demonstration, we use a linear word embedding layer of 20 units, an LSTM of 100 units and a final linear layer of 86 units (the size of our vocabulary) followed by **softmax** activation.\n*)\n\nlet dim = lang.Length // Vocabulary size, here 86\n\nlet n = FeedForward()\nn.Add(Linear(dim, 20))\nn.Add(LSTM(20, 100))\nn.Add(Linear(100, dim))\nn.Add(DM.mapCols softmax)\n\n(**\nYou can also easily stack multiple RNNs on top of each other.\n*)\n\nlet n = FeedForward()\nn.Add(Linear(dim, 20))\nn.Add(LSTM(20, 100))\nn.Add(LSTM(100, 100))\nn.Add(Linear(100, dim))\nn.Add(DM.mapCols softmax)\n\n(**\nWe will observe the the performance of our RNN during training by sampling random sentences from the language model. \n\nRemember that the final output of the network, through the softmax activation, is a vector of word probabilities. When we are sampling, we start with a word, supply this to the network, and use the resulting probabilities at the output to sample from the vocabulary where words with higher probability are more likely to be selected. We then continue by giving the network the last sampled word and repeating this until we hit an \"end of sentence\" token (we use \".\" here) or reach a limit of maximum sentence length.\n\nThis is how we would sample a sentence starting with a specific word.\n*)\n\nn.Reset()\nfor i = 0 to 5 do\n    lang.Sample(n.Run, \"I\", [|\".\"|], 30) // Use \".\" as the stop token, limit maximum sentence length to 30.\n    |> printfn \"%s\"\n\n(**\n\nBecause the model is not trained, we get sequences of random words from the vocabulary.\n\n<pre>\nI be: she dangers Latium endlessly gods remorseless divinity tell and his offended lords trials? about war trials and anger shores so anger Alba a Alba sing her\nI? came exiled – suffering shores anger came Latium people sing sing remorseless who brought war walls endlessly anger me founded his.\nI – will long of in offended cruel until Queen Italy who anger lords Queen in Longa Muse who people about suffering Italy also grieved cruel hurled who me about\nI endlessly city first by face, a Heaven me hurled sea such long noted she noted many sea city anger I noted remorseless cause Queen to remorseless Italy coast\nI sea noted noble me minds long sing cause people in walls Italy by Longa first, for grieved sea many walls Troy came was endlessly of in Latium Latium\nI and Latin of many suffering Alba Latium war.\n</pre>\n\nWe set a training cycle where we run one epoch of training followed by sampling one sentence starting with the word \"I\". In each epoch, we run through the whole training dataset. With a larger training corpus, we could also run the training with minibatches by stating this in the parameter set (commented out below).\n\nLike the sample sentences above, at the beginning of training, we see mostly random orderings of words. As the training progresses, the cross-entropy loss for our dataset is decreasing and the sentences start exhibiting meaningful word patterns.\n*)\n\nfor i = 0 to 1000 do\n    let par = {Params.Default with\n                //Batch = Minibatch 10\n                LearningRate = LearningRate.RMSProp(D 0.01f, D 0.9f)\n                Loss = CrossEntropyOnSoftmax\n                Epochs = 1\n                Silent = true       // Suppress the regular printing of training progress\n                ReturnBest = false} \n    let loss, _ = Layer.Train(n, data, par)\n    printfn \"Epoch: %*i | Loss: %O | Sample: %s\" 3 i loss (lang.Sample(n.Run, \"I\", [|\".\"|], 30))\n\n(**\n\nHere is a selection of sentences demonstrating the progress of training.\n\n<pre>\nEpoch:   0 | Loss: D  4.478101e+000 | Sample: I Queen drive she Alba endlessly Queen the by how tell his from grieved war her there drive people – lords coast he.\nEpoch:  10 | Loss: D  4.102071e+000 | Sample: I people to,, Rome how the he of – sing fate, Muse, by,, Muse the of man Queen Latin and in her cause:\nEpoch:  30 | Loss: D  3.438288e+000 | Sample: I walls long to first dangers she her, to founded to virtue sea first Can dangers a founded about Can Queen lords from sea by remorseless founded endlessly Latium\nEpoch:  40 | Loss: D  2.007577e+000 | Sample: I Alba gods Alba Rome, the walls Alba Muse Rome anger me the the of the gods to who man me first founded offended endlessly until also grieved long\nEpoch:  50 | Loss: D  9.753818e-001 | Sample: I sing people cruel: me the of Rome.\nEpoch:  60 | Loss: D  3.944587e-001 | Sample: I sing sing Troy to so hurled endlessly by land sea, by to – hurled about by the of arms, by Juno’s such anger long also in her\nEpoch:  70 | Loss: D  2.131431e-001 | Sample: I sing of and the of Longa, by Juno’s anger was in her of Heaven, to a city brought his gods to a gods to Lavinian hurled to\nEpoch:  80 | Loss: D  1.895453e-001 | Sample: I sing, by will the of Rome.\nEpoch:  90 | Loss: D  1.799535e-001 | Sample: I sing? there Muse the of the of the of arms by the: how she offended in the of? a, he shores hurled by land to\nEpoch: 100 | Loss: D  1.733837e-001 | Sample: I sing arms the of Alba gods who, by Juno’s Rome such anger the of the of arms and, by, by from the coast Rome.\nEpoch: 110 | Loss: D  1.682917e-001 | Sample: I sing Troy by, by from the of arms and, by, by from came, by Juno’s anger long in the of the of arms cruel Muse\nEpoch: 120 | Loss: D  1.639529e-001 | Sample: I sing arms the of Rome.\nEpoch: 130 | Loss: D  1.600647e-001 | Sample: I sing arms and, by Juno’s remorseless there and the of the of arms and, by Alba coast Troy to a – his gods by of the of\nEpoch: 140 | Loss: D  1.564835e-001 | Sample: I sing arms by the of Rome.\nEpoch: 150 | Loss: D  1.531392e-001 | Sample: I sing arms cruel, exiled by coast, he a city in the of the of arms.\nEpoch: 160 | Loss: D  1.499920e-001 | Sample: I sing arms cruel man, by the trials arms to shores hurled endlessly by the of gods Italy, me the of Rome.\nEpoch: 200 | Loss: D  1.390327e-001 | Sample: I sing arms and, by Juno’s such of the of the of arms Italy, by from the sing arms walls of the of Rome.\nEpoch: 230 | Loss: D  1.322940e-001 | Sample: I sing arms the man he, tell from the of arms Italy, by fate, by the of Troy Italy, by fate first from the of the\nEpoch: 260 | Loss: D  1.264137e-001 | Sample: I sing brought Muse Muse the of Heaven, by shores remorseless there he in the of arms cruel, by fate, he from the gods to Italy,\nEpoch: 420 | Loss: D  1.131158e-001 | Sample: I sing of arms the of Heaven, by Juno’s remorseless hurled such in the of arms.\nEpoch: 680 | Loss: D  9.938217e-002 | Sample: I of arms the man he, exiled fate, he virtue, to a? Can be such in the of the of of the of arms.\nEpoch: 923 | Loss: D  9.283429e-002 | Sample: I sing of arms and the man he, by fate came from the of to Italy, by the, by Juno’s anger of Rome.\n</pre>\n*)\n"
  },
  {
    "path": "docs/input/Regression.fsx",
    "content": "﻿(*** hide ***)\n#r \"../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll\"\n#r \"../../src/Hype/bin/Release/netstandard2.0/Hype.dll\"\n#I \"../../packages/R.NET.Community/lib/net40/\"\n#I \"../../packages/R.NET.Community.FSharp/lib/net40/\"\n#I \"../../packages/RProvider\"\n#load \"RProvider.fsx\"\nfsi.ShowDeclarationValues <- true\n\n(**\nRegression\n==========\n\nIn this example we implement a logistic regression based binary classifier and train it to distinguish between the [MNIST](http://yann.lecun.com/exdb/mnist/) digits of 0 and 1.\n\n### Loading the data\n\nFirst, let's start by loading the MNIST training and testing data and arranging these into training, validation, and testing sets.\n*)\n\nopen Hype\nopen Hype.Neural\nopen DiffSharp.AD.Float32\nopen DiffSharp.Util\n\nlet MNIST = Dataset(Util.LoadMNISTPixels(\"C:/datasets/MNIST/train-images.idx3-ubyte\", 60000),\n                    Util.LoadMNISTLabels(\"C:/datasets/MNIST/train-labels.idx1-ubyte\", 60000) |> toDV |> DM.ofDV 1).NormalizeX()\n\n\n\nlet MNISTtrain = MNIST.[..58999]\nlet MNISTvalid = MNIST.[59000..]\n\nlet MNISTtest = Dataset(Util.LoadMNISTPixels(\"C:/datasets/MNIST/t10k-images.idx3-ubyte\", 10000),\n                        Util.LoadMNISTLabels(\"C:/datasets/MNIST/t10k-labels.idx1-ubyte\", 10000) |> toDV |> DM.ofDV 1).NormalizeX()\n\n(**\nWe shuffle the columns of the datasets and filter them to only keep the digits of 0 and 1.\n*)\n\nlet MNISTtrain01 = MNISTtrain.Shuffle().Filter(fun (x, y) -> y.[0] <= D 1.f)\nlet MNISTvalid01 = MNISTvalid.Shuffle().Filter(fun (x, y) -> y.[0] <= D 1.f)\nlet MNISTtest01 = MNISTtest.Shuffle().Filter(fun (x, y) -> y.[0] <= D 1.f)\n\n(**\n<pre>\nval MNISTtrain01 : Dataset = Hype.Dataset\n   X: 784 x 12465\n   Y: 1 x 12465\nval MNISTvalid01 : Dataset = Hype.Dataset\n   X: 784 x 200\n   Y: 1 x 200\nval MNISTtest01 : Dataset = Hype.Dataset\n   X: 784 x 2115\n   Y: 1 x 2115\n</pre>\n\nWe can visualize individual digits from the dataset.\n*)\n\nMNISTtrain.X.[*,9] |> DV.visualizeAsDM 28 |> printfn \"%s\"\nMNISTtrain.Y.[*,9]\n\n(**\n    [lang=cs]\n    DM : 28 x 28\n                            \n                            \n                            \n                            \n                          ♦♦    \n                         ▪█▪    \n                        ▴██·    \n                        ♦█♦     \n                 ●     ·█■      \n                ■█     ■█·      \n                ♦█     ██·      \n               ▴█■    ●█♦       \n               ■█    ▪█■        \n              ■█▪   ▴██-        \n            -███♦▴  ♦█▪         \n           ·███■██♦■█■          \n          ·██■  ♦█████■         \n          ♦■-    ♦████▪         \n          -      ██·            \n                ▪█■             \n               ▴█■              \n               ■█▴              \n              ■█▪               \n              ▴█·               \n                            \n                            \n                            \n                            \n\n    val it : DV = DV [|4.0f|]\n\nWe can also visualize a series of digits in grid layout.\n*)\n\nMNISTtrain.[..5].VisualizeXColsAsImageGrid(28) |> printfn \"%s\"\n\n(**\n    [lang=cs]\n    Hype.Dataset\n       X: 784 x 6\n       Y: 1 x 6\n    X's columns reshaped to (28 x 28), presented in a (2 x 3) grid:\n    DM : 56 x 84\n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                ▪█▪                                     \n                    ▴▴● ●██▴                   █████                             ■      \n              -▪●█████■●██♦                   ■███■█             ·              ▴●      \n            ██████████-··                    ■███♦·██▴          ▴●              ▪♦      \n            ■█████♦●██                     ●██████-♦█●          ■●              █▪      \n            ·▪-██♦   ▪                     ███♦-█■ ·█●          ■●             ●█▴      \n               ▪█·                        ███● ·▴   ██          █●             ♦█       \n               ▴█♦                       ●█■♦·      ██●        ▴█●             ■█       \n                ♦█·                     ●██·        ██♦        ▪█▴            ●█■       \n                 █■▪-                   ██          ██♦        ▪█           ·●██·       \n                 ·███▴                 ♦█♦          ██♦        ▪█·     ▴▪▪███●██        \n                   ●██▪               ·██-          ██▪        ▪██♦♦♦████♦▪·  ■█        \n                    -██♦              ·█■          ▴█●          ▴●●●●●-      -█■        \n                     ███              ·█■         ▴█■·                       ●█▴        \n                   ▴●██♦              ·█▪        ●█●                         ●█         \n                 ▪■████●              ·█■      -██▪                          ●█         \n               -■████♦·               ·██▪  ·●■█■●                           ●█-        \n              ■████♦·                 ·███■■███♦▴                            ●█-        \n           ●■████♦·                    ♦█████■▪                              ●█▪        \n         ●■█████▴                       ▴███▪                                ●█▪        \n        ▴███■▴▴                                                              -█▪        \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                      ▴██                                                    -▴         \n                     -███                                                 ▪♦███▪        \n                     ▴███                    ▪♦██-·▪                    ▪███■■█■        \n                     ██■                   ·■██♦♦███●                 ▪████■  ██        \n                    ■██-                   ██♦   ●██▴                -████■   ██        \n                   ▪██♦                  -██●   ·██■                 ●██■●   ·██        \n                   ███                  ▴██▪    ■██·                  ▴      -██        \n                  ♦██▴                 ▴██●    ·██▴                          ▪██        \n                 -██●                  ■█●    ♦██●                       -▴▴▴♦█♦        \n                ·██♦                   ██  ▴♦████·                      ●█████■         \n                ███▪                   ■█████■■█■                     ■██■●███■▴        \n               ▪███                     ██■▴  ♦█▪                   ·██▴   ♦████·       \n               ■██●                           ██-                  ▴██●   ♦██▴●██●      \n              ███♦                           ·██                  ▴██-   ♦█■   ·●███■   \n              ███·                            ██                 -██· ·●██▴      ·●●    \n             ▪███                            ·██                 ■██♦■███▴              \n             ■██▪                            -██                 ♦████●▴                \n             ██■                              ██                  -▪▴                   \n             ██■                              ■█                                        \n             ♦█■                              -█♦                                       \n                                               ●█●                                      \n                                                ▪█                                      \n                                                                                    \n*)\n\nMNISTtrain01.[..5].VisualizeXColsAsImageGrid(28) |> printfn \"%s\"\n\n(**\n    [lang=cs]\n    Hype.Dataset\n       X: 784 x 6\n       Y: 1 x 6\n    X's columns reshaped to (28 x 28), presented in a (2 x 3) grid:\n    DM : 56 x 84\n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                     ▴●███-                      ·♦██                                   \n                  ·▪■█████■                      ████●                   -▪██████       \n                 ▪████████-                     ●████■                 ▪██████████      \n               ●████▪ ●███▴                     ■████●                ■███■▪▪▴-███●     \n              ████●   ▴███·                    ██████               -███■     ██████    \n             ███▪      ██▴                    -██████               ■██▴      ████▴♦-   \n            ■██·       ●█●                   ▪█████■               ■██        ■█▴█■     \n           ●██·        ▴█●                   ██████               ▪██●         - ■█-    \n          -██▪         ▴█●                   ██████               ██■            ▪█▴    \n          ██■          ▴█●                  ■█████▴              ▴██-            ■█▴    \n         ·██           ▴█●                 ██████-               ▴██             ██▴    \n         ■█■           ▴█●                 █████▴                ▴█■             ██     \n         ■█▪           ▴█●                ♦████-                 ▴█▪            ▴█▪     \n         ██-           ●█●               ♦█████                  ▴█▪            ●█·     \n         ██▴           ██·               █████♦                  -██           -█▪      \n         ███          ●██·              ●█████·                   ██■           █-      \n         -███●       ▴██●              ·█████                     ███          ■■       \n           ■█████♦●●■███■              ♦█████                      ██-       -■█·       \n            ▴■█████████♦               █████♦                      ●██●    -■██▪        \n               ·▪▪●█♦▪▴                ▴████·                       ●████████■-         \n                                                                     ▪█████▪-           \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                ■▴                         ·····        \n                                                ▴■                       ▪■█████·       \n               ♦♦♦♦♦-·                          ■█                     ▴█████████-      \n             ▪████████■▪                        ██                     ███████████·     \n             ■███████████■                     ▪█■                   ▴■████♦♦ ▴████     \n            █████████  ♦███                    ██-                  ▴████♦▴    ■███·    \n          ▪███●     ●   ·██■                   █●                  ▴████■      ♦███-    \n          ██■            -██·                 ▪█▴                  ████▪        ████    \n         ▪██              ██♦                 ██                   ███-         ████    \n         ●█♦              ▪██                ▪██                 ·███·          ████    \n         ██▴              ▴██                ●██                ·███♦          ▪████    \n         ██               ♦█▪                ██●                ·███          ▪████·    \n         ██-              ██                 ██·                ●███        ·●█████·    \n         ♦█●             ■██                ·██-                ████·      ■██████▴     \n         ▪█■            ■██-                 █♦                 ████■▴▴▴■████████▪      \n         -██●         ▴███♦                 -█♦                 ■██████████████♦        \n          ♦██♦-▪    ▪♦██■-                  ■█♦                 ·████████████▪▴         \n           ♦███████████●                    ■█-                   -█████████▴           \n            ▴■█■■■■■■·                      █■                       ▪ ▪▪               \n                                            ♦▴                                          \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n### Defining the model\n\nLet's now create our linear regression model. We implement this using the **Hype.Neural** module, as a linear layer with $28 \\times 28 = 784$ inputs and one output. The output of the layer is passed through the sigmoid function.\n\n*)\n\nlet n = Neural.FeedForward()\nn.Add(Linear(28 * 28, 1))\nn.Add(sigmoid)\n\n(**\n\nWe can visualize the initial state of the linear model weights before the training. For information of about weight initialization parameters, please see the [neural networks example](feedforwardnets.html).\n\n*)\n\nlet l = (n.[0] :?> Linear)\nl.VisualizeWRowsAsImageGrid(28) |> printfn \"%s\"\n\n(** \n\n    [lang=cs]\n    Hype.Neural.Linear\n       784 -> 1\n       Learnable parameters: 785\n       Init: Standard\n       W's rows reshaped to (28 x 28), presented in a (1 x 1) grid:\n    DM : 28 x 28\n     ▴▪●●-█▴♦♦● ·▴█● ● ▴· ●●●●▪·\n    ■ █- ▴●●▪ ■♦· ■▪■▪   █  ♦■●■\n    ♦■ █♦●▪●♦  ♦■   ♦     ■ ▪- ■\n     ■▪ ■♦■♦ █ ▪● ♦▪▴··■█ -▴●▪▪●\n    ██··▴●●█▪♦■ -·█■ ▪- ··▪·  ██\n    - ▪   ♦ ▪●  ▪■█♦- ▴▪ ▴·  ▪·●\n    -   ●●▴▴ ▪■ ▴█ ▪▴·▴▴·♦■■♦·■■\n    ♦▴ ▪■ ▪▪▴■·■--▪♦-   ·♦▪■ ♦·●\n     ·▴·♦▪♦●▪··▴·▪ ● ▪ █  ▴▪·♦▪ \n    ■ ▴ ♦█▴ -  ♦●■  █▪■●▪█■▴●--█\n    ♦■   ●■▴♦ ●· █· ▴· -█-▪●■■-■\n     █-·▪▴-▴█ ♦ █●·♦█▪▪●●■ -   ·\n     -   █ ■♦·●▪▴♦ -▴ -  ■♦· ♦ -\n    ■█ ▪-  ▪■●♦█▴-█▪■  ■♦▪█■▪■ -\n    ●♦█▴♦♦ ♦   ▴▪▴▴♦-▴♦♦█ ▴ ▪·● \n     ·█▪■■█ ●· ●· -●■●··  ▴  --▴\n    ·♦█▴ ♦♦■ ▴▪●▪-  · -♦●♦ ■ · ■\n    ■■▪---♦■·●▴▪-▪▴· ▪●● ·♦■ ▪♦▴\n    ▴ -♦●■█·█   ● ♦▪●■- ·■♦-▪▴■▴\n     ●-■● ···●█▴▪ -█·▪ ♦▴    ● ●\n    ·█  █▴ ·♦---■▴·█■■▴ ▴■  -  █\n    - ▪  ●█·▴♦▪    ■ ▪■ ■···   ▴\n    ■ ♦♦- █▪♦-- ▴ ▴ ··█▴● ■♦    \n    ■·■■▪▴-·█♦●■ ▴ ♦ ♦▴■♦  ■ ●♦▪\n    ·█▪- ■●▴▪▴▪ ▪  ▴▪ ·   ▪▴▴··♦\n      ▪█♦■   ·♦ ■▪ ♦ ▴·●█▪· ·▪▴ \n    · ■♦▪■ ▪■● ♦  ··· ·▪█■·  ▪■●\n    ●▴▪ ·■● -█●█·▪■▴ ▴▴♦  ■  ■ ▴\n\n       b:\n    DV : 1\n     \n\n### Training\n\nLet's train the model for 10 epochs (full passes through the training data), with a minibatch size of 100, using the training and validation sets we've defined. The validation set will make sure that we're not overfitting the model.\n*)\n\nlet p = {Params.Default with \n            Epochs = 10; \n            Batch = Minibatch 100; \n            EarlyStopping = EarlyStopping.DefaultEarly}\n\nn.Train(MNISTtrain01, MNISTvalid01, p)\n\n(**\n<pre>\n[12/11/2015 20:21:12] --- Training started\n[12/11/2015 20:21:12] Parameters     : 785\n[12/11/2015 20:21:12] Iterations     : 1240\n[12/11/2015 20:21:12] Epochs         : 10\n[12/11/2015 20:21:12] Batches        : Minibatches of 100 (124 per epoch)\n[12/11/2015 20:21:12] Training data  : 12465\n[12/11/2015 20:21:12] Validation data: 200\n[12/11/2015 20:21:12] Valid. interval: 10\n[12/11/2015 20:21:12] Method         : Gradient descent\n[12/11/2015 20:21:12] Learning rate  : RMSProp a0 = D 0.00100000005f, k = D 0.899999976f\n[12/11/2015 20:21:12] Momentum       : None\n[12/11/2015 20:21:12] Loss           : L2 norm\n[12/11/2015 20:21:12] Regularizer    : L2 lambda = D 9.99999975e-05f\n[12/11/2015 20:21:12] Gradient clip. : None\n[12/11/2015 20:21:12] Early stopping : Stagnation thresh. = 750, overfit. thresh. = 10\n[12/11/2015 20:21:12] Improv. thresh.: D 0.995000005f\n[12/11/2015 20:21:12] Return best    : true\n[12/11/2015 20:21:12]  1/10 | Batch   1/124 | D  4.748471e-001 [- ] | Valid D  4.866381e-001 [- ] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch  11/124 | D  2.772053e-001 [↓▼] | Valid D  3.013612e-001 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch  21/124 | D  2.178165e-001 [↓▼] | Valid D  2.304372e-001 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch  31/124 | D  2.009703e-001 [↓▼] | Valid D  1.799015e-001 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch  41/124 | D  1.352896e-001 [↓▼] | Valid D  1.405802e-001 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch  51/124 | D  1.182899e-001 [↓▼] | Valid D  1.108390e-001 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch  61/124 | D  1.124191e-001 [↓▼] | Valid D  8.995526e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch  71/124 | D  8.975799e-002 [↓▼] | Valid D  7.361954e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch  81/124 | D  5.031444e-002 [↓▼] | Valid D  5.941865e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch  91/124 | D  5.063754e-002 [↑ ] | Valid D  4.927430e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch 101/124 | D  3.842642e-002 [↓▼] | Valid D  4.095582e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch 111/124 | D  4.326219e-002 [↑ ] | Valid D  3.452797e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  1/10 | Batch 121/124 | D  2.585407e-002 [↓▼] | Valid D  2.788338e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  2/10 | Batch   1/124 | D  3.069563e-002 [↑ ] | Valid D  2.663207e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  2/10 | Batch  11/124 | D  1.765305e-002 [↓▼] | Valid D  2.332163e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  2/10 | Batch  21/124 | D  2.314118e-002 [↑ ] | Valid D  1.902804e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  2/10 | Batch  31/124 | D  3.177435e-002 [↑ ] | Valid D  1.691620e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  2/10 | Batch  41/124 | D  2.219648e-002 [↓ ] | Valid D  1.455527e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  2/10 | Batch  51/124 | D  1.205402e-002 [↓▼] | Valid D  1.240637e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  2/10 | Batch  61/124 | D  3.891717e-002 [↑ ] | Valid D  1.189688e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  2/10 | Batch  71/124 | D  2.114762e-002 [↓ ] | Valid D  1.083007e-002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  2/10 | Batch  81/124 | D  5.075417e-003 [↓▼] | Valid D  9.630994e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:12]  2/10 | Batch  91/124 | D  1.343214e-002 [↑ ] | Valid D  8.666289e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  2/10 | Batch 101/124 | D  6.054885e-003 [↓ ] | Valid D  8.039203e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  2/10 | Batch 111/124 | D  1.964125e-002 [↑ ] | Valid D  7.339509e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  2/10 | Batch 121/124 | D  4.401092e-003 [↓▼] | Valid D  6.376633e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch   1/124 | D  7.068173e-003 [↑ ] | Valid D  6.426438e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch  11/124 | D  3.763680e-003 [↓▼] | Valid D  6.076077e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch  21/124 | D  9.855231e-003 [↑ ] | Valid D  5.091224e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch  31/124 | D  1.263964e-002 [↑ ] | Valid D  4.641499e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch  41/124 | D  1.205439e-002 [↓ ] | Valid D  4.599225e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch  51/124 | D  2.941387e-003 [↓▼] | Valid D  4.381890e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch  61/124 | D  2.546543e-002 [↑ ] | Valid D  4.439059e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch  71/124 | D  9.878366e-003 [↓ ] | Valid D  4.358966e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch  81/124 | D  1.868963e-003 [↓▼] | Valid D  3.960044e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch  91/124 | D  7.171181e-003 [↑ ] | Valid D  3.634899e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch 101/124 | D  2.681098e-003 [↓ ] | Valid D  3.636524e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch 111/124 | D  1.502046e-002 [↑ ] | Valid D  3.393996e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  3/10 | Batch 121/124 | D  2.381395e-003 [↓ ] | Valid D  3.178693e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  4/10 | Batch   1/124 | D  3.185510e-003 [↑ ] | Valid D  3.240891e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:13]  4/10 | Batch  11/124 | D  2.029225e-003 [↓ ] | Valid D  3.163968e-003 [↓ ] | Stag: 20 Ovfit: 0\n[12/11/2015 20:21:13]  4/10 | Batch  21/124 | D  6.450378e-003 [↑ ] | Valid D  2.772849e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  4/10 | Batch  31/124 | D  7.448227e-003 [↑ ] | Valid D  2.572560e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:13]  4/10 | Batch  41/124 | D  9.700718e-003 [↑ ] | Valid D  2.693694e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:13]  4/10 | Batch  51/124 | D  1.799919e-003 [↓▼] | Valid D  2.737873e-003 [↑ ] | Stag: 20 Ovfit: 1\n[12/11/2015 20:21:13]  4/10 | Batch  61/124 | D  1.919956e-002 [↑ ] | Valid D  2.778393e-003 [↑ ] | Stag: 30 Ovfit: 3\n[12/11/2015 20:21:13]  4/10 | Batch  71/124 | D  5.462923e-003 [↓ ] | Valid D  2.870561e-003 [↑ ] | Stag: 40 Ovfit: 3\n[12/11/2015 20:21:13]  4/10 | Batch  81/124 | D  1.455469e-003 [↓▼] | Valid D  2.632472e-003 [↓ ] | Stag: 50 Ovfit: 4\n[12/11/2015 20:21:14]  4/10 | Batch  91/124 | D  5.270801e-003 [↑ ] | Valid D  2.455564e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:14]  4/10 | Batch 101/124 | D  2.057914e-003 [↓ ] | Valid D  2.511977e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:14]  4/10 | Batch 111/124 | D  1.314815e-002 [↑ ] | Valid D  2.393763e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:14]  4/10 | Batch 121/124 | D  2.033168e-003 [↓ ] | Valid D  2.358985e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:14]  5/10 | Batch   1/124 | D  2.199435e-003 [↑ ] | Valid D  2.389120e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:14]  5/10 | Batch  11/124 | D  1.668178e-003 [↓ ] | Valid D  2.356529e-003 [↓ ] | Stag: 20 Ovfit: 0\n[12/11/2015 20:21:14]  5/10 | Batch  21/124 | D  5.649061e-003 [↑ ] | Valid D  2.151499e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:14]  5/10 | Batch  31/124 | D  5.264180e-003 [↓ ] | Valid D  2.038927e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:14]  5/10 | Batch  41/124 | D  8.416546e-003 [↑ ] | Valid D  2.145057e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:14]  5/10 | Batch  51/124 | D  1.564733e-003 [↓ ] | Valid D  2.208556e-003 [↑ ] | Stag: 20 Ovfit: 0\n[12/11/2015 20:21:14]  5/10 | Batch  61/124 | D  1.581773e-002 [↑ ] | Valid D  2.233998e-003 [↑ ] | Stag: 30 Ovfit: 0\n[12/11/2015 20:21:14]  5/10 | Batch  71/124 | D  3.898179e-003 [↓ ] | Valid D  2.347554e-003 [↑ ] | Stag: 40 Ovfit: 0\n[12/11/2015 20:21:14]  5/10 | Batch  81/124 | D  1.395002e-003 [↓▼] | Valid D  2.182974e-003 [↓ ] | Stag: 50 Ovfit: 1\n[12/11/2015 20:21:14]  5/10 | Batch  91/124 | D  4.450763e-003 [↑ ] | Valid D  2.069927e-003 [↓ ] | Stag: 60 Ovfit: 1\n[12/11/2015 20:21:14]  5/10 | Batch 101/124 | D  1.927794e-003 [↓ ] | Valid D  2.129479e-003 [↑ ] | Stag: 70 Ovfit: 1\n[12/11/2015 20:21:14]  5/10 | Batch 111/124 | D  1.238949e-002 [↑ ] | Valid D  2.059099e-003 [↓ ] | Stag: 80 Ovfit: 1\n[12/11/2015 20:21:14]  5/10 | Batch 121/124 | D  1.969593e-003 [↓ ] | Valid D  2.072177e-003 [↑ ] | Stag: 90 Ovfit: 1\n[12/11/2015 20:21:14]  6/10 | Batch   1/124 | D  1.885590e-003 [↓ ] | Valid D  2.087292e-003 [↑ ] | Stag:100 Ovfit: 1\n[12/11/2015 20:21:14]  6/10 | Batch  11/124 | D  1.577425e-003 [↓ ] | Valid D  2.074389e-003 [↓ ] | Stag:110 Ovfit: 1\n[12/11/2015 20:21:14]  6/10 | Batch  21/124 | D  5.410788e-003 [↑ ] | Valid D  1.943973e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:14]  6/10 | Batch  31/124 | D  4.188792e-003 [↓ ] | Valid D  1.863442e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:14]  6/10 | Batch  41/124 | D  7.516511e-003 [↑ ] | Valid D  1.951990e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:14]  6/10 | Batch  51/124 | D  1.510475e-003 [↓ ] | Valid D  2.003860e-003 [↑ ] | Stag: 20 Ovfit: 0\n[12/11/2015 20:21:14]  6/10 | Batch  61/124 | D  1.375423e-002 [↑ ] | Valid D  2.020531e-003 [↑ ] | Stag: 30 Ovfit: 0\n[12/11/2015 20:21:14]  6/10 | Batch  71/124 | D  3.260145e-003 [↓ ] | Valid D  2.129138e-003 [↑ ] | Stag: 40 Ovfit: 0\n[12/11/2015 20:21:15]  6/10 | Batch  81/124 | D  1.402565e-003 [↓ ] | Valid D  2.002138e-003 [↓ ] | Stag: 50 Ovfit: 0\n[12/11/2015 20:21:15]  6/10 | Batch  91/124 | D  3.999386e-003 [↑ ] | Valid D  1.920336e-003 [↓ ] | Stag: 60 Ovfit: 0\n[12/11/2015 20:21:15]  6/10 | Batch 101/124 | D  1.929424e-003 [↓ ] | Valid D  1.976652e-003 [↑ ] | Stag: 70 Ovfit: 0\n[12/11/2015 20:21:15]  6/10 | Batch 111/124 | D  1.205915e-002 [↑ ] | Valid D  1.926643e-003 [↓ ] | Stag: 80 Ovfit: 0\n[12/11/2015 20:21:15]  6/10 | Batch 121/124 | D  1.978536e-003 [↓ ] | Valid D  1.951888e-003 [↑ ] | Stag: 90 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch   1/124 | D  1.769614e-003 [↓ ] | Valid D  1.959661e-003 [↑ ] | Stag:100 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch  11/124 | D  1.555518e-003 [↓ ] | Valid D  1.955613e-003 [↓ ] | Stag:110 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch  21/124 | D  5.217655e-003 [↑ ] | Valid D  1.861573e-003 [↓ ] | Stag:120 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch  31/124 | D  3.625835e-003 [↓ ] | Valid D  1.796666e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch  41/124 | D  6.929778e-003 [↑ ] | Valid D  1.872346e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch  51/124 | D  1.502809e-003 [↓ ] | Valid D  1.913079e-003 [↑ ] | Stag: 20 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch  61/124 | D  1.241405e-002 [↑ ] | Valid D  1.924762e-003 [↑ ] | Stag: 30 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch  71/124 | D  2.962820e-003 [↓ ] | Valid D  2.024504e-003 [↑ ] | Stag: 40 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch  81/124 | D  1.421725e-003 [↓ ] | Valid D  1.919308e-003 [↓ ] | Stag: 50 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch  91/124 | D  3.717377e-003 [↑ ] | Valid D  1.854433e-003 [↓ ] | Stag: 60 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch 101/124 | D  1.973184e-003 [↓ ] | Valid D  1.907719e-003 [↑ ] | Stag: 70 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch 111/124 | D  1.190252e-002 [↑ ] | Valid D  1.867085e-003 [↓ ] | Stag: 80 Ovfit: 0\n[12/11/2015 20:21:15]  7/10 | Batch 121/124 | D  2.006255e-003 [↓ ] | Valid D  1.894716e-003 [↑ ] | Stag: 90 Ovfit: 0\n[12/11/2015 20:21:15]  8/10 | Batch   1/124 | D  1.721533e-003 [↓ ] | Valid D  1.898627e-003 [↑ ] | Stag:100 Ovfit: 0\n[12/11/2015 20:21:15]  8/10 | Batch  11/124 | D  1.553262e-003 [↓ ] | Valid D  1.897926e-003 [↓ ] | Stag:110 Ovfit: 0\n[12/11/2015 20:21:15]  8/10 | Batch  21/124 | D  5.004487e-003 [↑ ] | Valid D  1.823838e-003 [↓ ] | Stag:120 Ovfit: 0\n[12/11/2015 20:21:15]  8/10 | Batch  31/124 | D  3.308986e-003 [↓ ] | Valid D  1.768821e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:15]  8/10 | Batch  41/124 | D  6.563510e-003 [↑ ] | Valid D  1.835302e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:15]  8/10 | Batch  51/124 | D  1.507999e-003 [↓ ] | Valid D  1.868091e-003 [↑ ] | Stag: 20 Ovfit: 0\n[12/11/2015 20:21:15]  8/10 | Batch  61/124 | D  1.148601e-002 [↑ ] | Valid D  1.876653e-003 [↑ ] | Stag: 30 Ovfit: 0\n[12/11/2015 20:21:16]  8/10 | Batch  71/124 | D  2.807777e-003 [↓ ] | Valid D  1.968064e-003 [↑ ] | Stag: 40 Ovfit: 0\n[12/11/2015 20:21:16]  8/10 | Batch  81/124 | D  1.440011e-003 [↓ ] | Valid D  1.876611e-003 [↓ ] | Stag: 50 Ovfit: 0\n[12/11/2015 20:21:16]  8/10 | Batch  91/124 | D  3.522004e-003 [↑ ] | Valid D  1.821817e-003 [↓ ] | Stag: 60 Ovfit: 0\n[12/11/2015 20:21:16]  8/10 | Batch 101/124 | D  2.031282e-003 [↓ ] | Valid D  1.872902e-003 [↑ ] | Stag: 70 Ovfit: 0\n[12/11/2015 20:21:16]  8/10 | Batch 111/124 | D  1.182362e-002 [↑ ] | Valid D  1.836957e-003 [↓ ] | Stag: 80 Ovfit: 0\n[12/11/2015 20:21:16]  8/10 | Batch 121/124 | D  2.035742e-003 [↓ ] | Valid D  1.864137e-003 [↑ ] | Stag: 90 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch   1/124 | D  1.699795e-003 [↓ ] | Valid D  1.865989e-003 [↑ ] | Stag:100 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch  11/124 | D  1.556397e-003 [↓ ] | Valid D  1.866347e-003 [↑ ] | Stag:110 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch  21/124 | D  4.788828e-003 [↑ ] | Valid D  1.804229e-003 [↓ ] | Stag:120 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch  31/124 | D  3.119682e-003 [↓ ] | Valid D  1.756223e-003 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch  41/124 | D  6.336636e-003 [↑ ] | Valid D  1.816257e-003 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch  51/124 | D  1.516153e-003 [↓ ] | Valid D  1.843593e-003 [↑ ] | Stag: 20 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch  61/124 | D  1.080968e-002 [↑ ] | Valid D  1.850113e-003 [↑ ] | Stag: 30 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch  71/124 | D  2.720124e-003 [↓ ] | Valid D  1.934669e-003 [↑ ] | Stag: 40 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch  81/124 | D  1.455176e-003 [↓ ] | Valid D  1.852409e-003 [↓ ] | Stag: 50 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch  91/124 | D  3.375944e-003 [↑ ] | Valid D  1.804057e-003 [↓ ] | Stag: 60 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch 101/124 | D  2.093168e-003 [↓ ] | Valid D  1.853583e-003 [↑ ] | Stag: 70 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch 111/124 | D  1.178356e-002 [↑ ] | Valid D  1.820183e-003 [↓ ] | Stag: 80 Ovfit: 0\n[12/11/2015 20:21:16]  9/10 | Batch 121/124 | D  2.061530e-003 [↓ ] | Valid D  1.846045e-003 [↑ ] | Stag: 90 Ovfit: 0\n[12/11/2015 20:21:16] 10/10 | Batch   1/124 | D  1.689459e-003 [↓ ] | Valid D  1.846794e-003 [↑ ] | Stag:100 Ovfit: 0\n[12/11/2015 20:21:16] 10/10 | Batch  11/124 | D  1.560583e-003 [↓ ] | Valid D  1.847311e-003 [↑ ] | Stag:110 Ovfit: 0\n[12/11/2015 20:21:16] 10/10 | Batch  21/124 | D  4.588457e-003 [↑ ] | Valid D  1.792883e-003 [↓ ] | Stag:120 Ovfit: 0\n[12/11/2015 20:21:16] 10/10 | Batch  31/124 | D  3.001853e-003 [↓ ] | Valid D  1.750141e-003 [↓ ] | Stag:130 Ovfit: 0\n[12/11/2015 20:21:16] 10/10 | Batch  41/124 | D  6.195725e-003 [↑ ] | Valid D  1.805622e-003 [↑ ] | Stag:140 Ovfit: 0\n[12/11/2015 20:21:16] 10/10 | Batch  51/124 | D  1.524289e-003 [↓ ] | Valid D  1.829196e-003 [↑ ] | Stag:150 Ovfit: 0\n[12/11/2015 20:21:17] 10/10 | Batch  61/124 | D  1.029841e-002 [↑ ] | Valid D  1.834366e-003 [↑ ] | Stag:160 Ovfit: 0\n[12/11/2015 20:21:17] 10/10 | Batch  71/124 | D  2.667856e-003 [↓ ] | Valid D  1.913492e-003 [↑ ] | Stag:170 Ovfit: 0\n[12/11/2015 20:21:17] 10/10 | Batch  81/124 | D  1.467351e-003 [↓ ] | Valid D  1.837669e-003 [↓ ] | Stag:180 Ovfit: 0\n[12/11/2015 20:21:17] 10/10 | Batch  91/124 | D  3.261143e-003 [↑ ] | Valid D  1.793646e-003 [↓ ] | Stag:190 Ovfit: 0\n[12/11/2015 20:21:17] 10/10 | Batch 101/124 | D  2.153974e-003 [↓ ] | Valid D  1.842048e-003 [↑ ] | Stag:200 Ovfit: 0\n[12/11/2015 20:21:17] 10/10 | Batch 111/124 | D  1.176465e-002 [↑ ] | Valid D  1.810117e-003 [↓ ] | Stag:210 Ovfit: 0\n[12/11/2015 20:21:17] 10/10 | Batch 121/124 | D  2.082179e-003 [↓ ] | Valid D  1.834467e-003 [↑ ] | Stag:220 Ovfit: 0\n[12/11/2015 20:21:17] Duration       : 00:00:05.2093910\n[12/11/2015 20:21:17] Loss initial   : D  4.748471e-001\n[12/11/2015 20:21:17] Loss final     : D  1.395002e-003 (Best)\n[12/11/2015 20:21:17] Loss change    : D -4.734521e-001 (-99.71 %)\n[12/11/2015 20:21:17] Loss chg. / s  : D -9.088434e-002\n[12/11/2015 20:21:17] Epochs / s     : 1.919610181\n[12/11/2015 20:21:17] Epochs / min   : 115.1766109\n[12/11/2015 20:21:17] --- Training finished\n\n</pre>\n\nAfter a 5-second training, we can see that the characteristics of the problem domain (distinguishing between the digits of 0 and 1) is captured in the model weights.\n\n*)\n\nlet l = (n.[0] :?> Linear)\nl.VisualizeWRowsAsImageGrid(28) |> printfn \"%s\"\n\n(**\n    [lang=cs]\n    Hype.Neural.Linear\n       784 -> 1\n       Learnable parameters: 785\n       Init: Standard\n       W's rows reshaped to (28 x 28), presented in a (1 x 1) grid:\n    DM : 28 x 28\n    ----------------------------\n    ----------------------------\n    ------------▴▴▴▴▴-----------\n    ---------▴--▴▴▴▴▴▴-▴--------\n    --------▴▴▪▴▪▪▪▴-▴▴▴▪▪▪▴----\n    ------▴-▴▴▴▴▪▪▴▴-·-▴▪▪▪▪▴---\n    ------▴--▴▴-▴▴--···▴▪▴▴▴▴---\n    ----▴---------▴▴-· ---------\n    ---------··---▴▪--·-·····---\n    -------······▴▪▪▪▴-······---\n    ------·····  ▴●●●▴·     ·---\n    -----·· ·    ▪♦■♦▪      ·---\n    -----· ·     ●■■♦▴      ·---\n    -----·      ·♦██♦·      ·---\n    -----·      ▴■██●       ·---\n    ----·       ▪██■▪       ·---\n    ----·      -●█■♦-       ·---\n    ----·      ▴♦█■●·     ···---\n    ----·     ·▴♦♦♦●·   ····----\n    ----·    ·▴▪●●●▪·· ····--▴--\n    ----······-▴▪▪▪▴--------▴---\n    -----▴▴----·--▴▴-▴▴-▴▴------\n    -----▴▪▪▴-· ··--▴▴▪▴▴▴▴-----\n    ----▴▪▪▪▪▴-· ·-▴▴▪▴▴▴▴------\n    -----▴▪▴▴▴▴·---▴▴▴▴▴--------\n    ------------▴▴▴▴------------\n    ----------------------------\n    ----------------------------\n\n       b:\n    DV : 1\n     \n### Classifier\n\nYou can create classifiers by instantiating types such as **LogisticClassifier** or **SoftmaxClassifier**, and passing a classification function of the form **DM->DM**in the constructor. Alternatively, you can directly pass the model we have just trained. \n\nPlease see the [API reference](reference/index.html) and the [source code](https://github.com/hypelib/Hype/blob/master/src/Hype/Classifier.fs) for a better understanding of how classifiers are implemented.\n\n*)\n\nlet cc = LogisticClassifier(n)\n\n(**\nLet's test the class predictions for 10 random elements from the MNIST test set, which, if you remember, we've filtered to have only 0s and 1s.\n*)\n\nlet pred = cc.Classify(MNISTtest01.X.[*,0..9]);;\nlet real = MNISTtest01.Y.[*, 0..9] |> DM.toDV |> DV.toArray |> Array.map (float32>>int)\n\n(**\n<pre>\nval pred : int [] = [|1; 0; 1; 0; 1; 0; 0; 1; 1; 1|]\nval real : int [] = [|1; 0; 1; 0; 1; 0; 0; 1; 1; 1|]\n</pre>\n\nThe classifier seems to be working well. We can compute the classification error for a given dataset.\n*)\n\nlet error = cc.ClassificationError(MNISTtest01);;\n\n(**\n<pre>\nval error : float32 = 0.000472813234f\n</pre>\n\nThe classification error is 0.047%.\n\nFinally, this is how you would classify single digits.\n*)\n\nlet cls = cc.Classify(MNISTtest01.X.[*,0]);;\nMNISTtest01.X.[*,0] |> DV.visualizeAsDM 28 |> printfn \"%s\"\n\n(**\n    [lang=cs]\n    val cls : int = 1\n\n    DM : 28 x 28\n                            \n                            \n                            \n                            \n                ♦               \n                ●♦              \n                 █              \n                 ■·             \n                ▪█-             \n                ▴█-             \n                 ■♦             \n                 ♦█·            \n                 -█▪            \n                  █▪            \n                  ●▪            \n                  ▪█            \n                  ▪█-           \n                  ▪█▴           \n                  ▪█■           \n                   █■           \n                   ██           \n                   ▪█           \n                    █▴          \n                    █●          \n                            \n                            \nAnd this is how you would classify many digits efficiently at the same time, by running them through the model together as the columns of an input matrix.\n*)\n\nlet clss = cc.Classify(MNISTtest01.X.[*,5..9]);;\nMNISTtest01.[5..9].VisualizeXColsAsImageGrid(28) |> printfn \"%s\"\n\n(**\n    [lang=cs]\n    val clss : int [] = [|0; 0; 1; 1; 1|]\n\n    Hype.Dataset\n       X: 784 x 5\n       Y: 1 x 5\n    X's columns reshaped to (28 x 28), presented in a (2 x 3) grid:\n    DM : 56 x 84\n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                 ██·                                                                    \n                ●███♦-                        ·████♦·                   -█▴             \n                ██████■-                     ♦███████-                  ▪██·            \n               ●███████■                  ♦███████████▴                 ●██·            \n              ▪███● -███-                ♦█████████████■                ▴♦█·            \n              ♦██▪   -██■                ████♦ ●●████████                ▪█·            \n             ▪███    ·██■               ●█████·  ··██████                ▪█·            \n             ■██▪    ·██■              ▪██████·    ██████                ▪█·            \n            ·██■     ▪██■             -██████♦     ██████                ▪█·            \n            ♦██▪     ■██■            ▪███████     ·█████♦                ●█·            \n           ·███-     ■██■            ██████♦·    ♦██████·                ██·            \n           ♦██■     ·███■            ██████-   ▪███████-                 ██·            \n           ■██-     -███-            ██████   ●███████▴                  ██·            \n           ■██·     ■██♦             ██████·♦████████■                   ██·            \n           ■██·    ■███●             ████████████████                    ██             \n           ■██●    ████              ██████████████-                     ██             \n           ▴███· -■███-              ▴████████████▴                     ·██             \n           ·█████████♦                ■████████■●                        ██             \n            ▴███████♦                  ████████                         ·█♦·            \n              ▪███♦·                    ●●■●●-                          ·██▴            \n                                                                         █■             \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                    \n                      ·██                                                               \n                      ███                       -██■                                    \n                     ●██■                      ▪████-                                   \n                     ♦██                       ♦███■                                    \n                    ·███                      ·████■                                    \n                    ■██♦                      ▴███■                                     \n                   ▪██■                      -████●                                     \n                   ███-                      ▴████▪                                     \n                  ▪██■                      ·████♦                                      \n                 -███▴                      █████·                                      \n                 ♦██●                       ████♦                                       \n                -███                       ▪████·                                       \n                ███▴                       ■███■                                        \n               -███                       ▴████·                                        \n               ███●                       ■███■                                         \n              ████                        ■███▪                                         \n             ●███-                       -███♦                                          \n            ▴███●                        ●███▪                                          \n            ●██♦                         ████▪                                          \n            ●██·                         ■███▴                                          \n                                          ■■-                                           \n                                                                                    \n                                                                                    \n                                                                                    \n                                                                                                                \n*)\n"
  },
  {
    "path": "docs/input/Training.fsx",
    "content": "﻿(*** hide ***)\n#r \"../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll\"\n#r \"../../src/Hype/bin/Release/netstandard2.0/Hype.dll\"\n#I \"../../packages/R.NET.Community/lib/net40/\"\n#I \"../../packages/R.NET.Community.FSharp/lib/net40/\"\n#I \"../../packages/RProvider\"\n#load \"RProvider.fsx\"\n//fsi.ShowDeclarationValues <- false\nSystem.Environment.CurrentDirectory <- __SOURCE_DIRECTORY__\n\n(**\nTraining\n========\n\nIn [optimization,](optimization.html) we've seen how nested AD and gradient-based optimization work together.\n\nTraining a model is the optimization of model parameters to minimize a loss function, or equivalently, to maximize the likelihood of a given set of data under the model parameters. In addition to the _optimization method_, _learning rate_, _momentum_, and _gradient clipping_ parameters we've seen, this introduces parameters for the _loss function_, _regularization_, _training batches_, and _validation and early stopping_. \n\nBut let's start with the **Dataset** type, which we will use for keeping the training, validation, and test data for the training procedure.\n\nDataset\n-------\n\nFor supervised training, data consists of pairs of input vectors $\\mathbf{x}_i \\in \\mathbb{R}^{d_x}$ and output vectors $\\mathbf{y}_i \\in \\mathbb{R}^{d_y}$. We represent data using the **Dataset** type, which is basically a pair of matrices \n\n$$$\n   \\begin{eqnarray*}\n   \\mathbf{X} &\\in& \\mathbb{R}^{d_x \\times n}\\\\\n   \\mathbf{Y} &\\in& \\mathbb{R}^{d_y \\times n}\\\\\n   \\end{eqnarray*}\n   \nholding these vectors, where $n$ is the number of input–output pairs, $d_x$ is the number of input features and $d_y$ is the number of output features. In other words, each of the $n$ columns of the matrix $\\mathbf{X}$ is an input vector of length $d_x$ and each of the $n$ columns of matrix $\\mathbf{Y}$ is the corresponding output vector of length $d_y$.\n\nKeeping data in matrix form is essential for harnessing high-performance linear algebra engines tailored for your CPU or GPU. Hype, by default, uses a high-performance CPU backend using OpenBLAS for BLAS/LAPACK operations, and parallel implementations of non-BLAS operations such as elementwise functions.\n*)\n\nopen Hype\nopen DiffSharp.AD.Float32\n\nlet x = toDM [[0; 0; 1; 1]\n              [0; 1; 0; 1]]\nlet y = toDM [[0; 1; 1; 0]]\n\nlet XORdata = Dataset(x, y)\n\n(**\n\nHype provides several utility functions for loading data into matrices from images, delimited text files (e.g., CSV), or commonly used dataset files such as the MNIST.\n\n*)\n\nlet MNIST = Dataset(Util.LoadMNISTPixels(\"train-images.idx3-ubyte\", 60000),\n                    Util.LoadMNISTLabels(\"train-labels.idx1-ubyte\", 60000) |> toDV |> DM.ofDV 1).NormalizeX()\n\nlet MNISTtest = Dataset(Util.LoadMNISTPixels(\"t10k-images.idx3-ubyte\", 10000),\n                        Util.LoadMNISTLabels(\"t10k-labels.idx1-ubyte\", 10000) |> toDV |> DM.ofDV 1).NormalizeX()\n\n(**\n\nYou can see the [API reference](reference/index.html) and the [source code](https://github.com/hypelib/Hype/blob/master/src/Hype/Hype.fs) for various ways of constructing Datasets.\n\nTraining parameters\n-------------------\n\nLet's load the housing prices dataset from the [Stanford UFLDL Tutorial](http://ufldl.stanford.edu/tutorial/supervised/LinearRegression/) and divide it into input and output pairs. We will later train a simple linear regression model, to demonstrate the use of training parameters.\n\n*)\n\nlet h = Util.LoadDelimited(\"housing.data\") |> DM.Transpose\nh.ToString() |> printfn \"%s\"\n\n(**\n<pre>\nDM : 14 x 506\n  0.00632    0.0273    0.0273    0.0324    0.0691    0.0299    0.0883     0.145     0.211      0.17     0.225     0.117    0.0938      0.63     0.638     0.627      1.05     0.784     0.803     0.726      1.25     0.852      1.23     0.988      0.75     0.841     0.672     0.956     0.773         1      1.13      1.35      1.39      1.15      1.61    0.0642    0.0974    0.0801     0.175    0.0276    0.0336     0.127     0.142     0.159     0.123     0.171     0.188     0.229     0.254      0.22    0.0887    0.0434    0.0536    0.0498    0.0136    0.0131    0.0206    0.0143     0.154     0.103     0.149     0.172      0.11     0.127    0.0195    0.0358    0.0438    0.0579     0.136     0.128    0.0883     0.159    0.0916     0.195     0.079    0.0951     0.102    0.0871    0.0565    0.0839    0.0411    0.0446    0.0366    0.0355    0.0506    0.0574    0.0519    0.0715    0.0566     0.053    0.0468    0.0393     0.042    0.0288    0.0429     0.122     0.115     0.121    0.0819    0.0686     0.149     0.114     0.229     0.212      0.14     0.133     0.171     0.131     0.128     0.264     0.108     0.101     0.123     0.222     0.142     0.171     0.132     0.151     0.131     0.145     0.069    0.0717     0.093      0.15    0.0985     0.169     0.387     0.259     0.325     0.881      0.34      1.19      0.59      0.33     0.976     0.558     0.323     0.352      0.25     0.545     0.291      1.63      3.32       4.1      2.78      2.38      2.16      2.37      2.33      2.73      1.66       1.5      1.13      2.15      1.41      3.54      2.45      1.22      1.34      1.43      1.27      1.46      1.83      1.52      2.24      2.92      2.01       1.8       2.3      2.45      1.21      2.31     0.139    0.0918    0.0845    0.0666    0.0702    0.0543    0.0664    0.0578    0.0659    0.0689     0.091       0.1    0.0831    0.0605     0.056    0.0788     0.126    0.0837    0.0907    0.0691    0.0866    0.0219    0.0144    0.0138    0.0401    0.0467    0.0377    0.0315    0.0178    0.0345    0.0218    0.0351    0.0201     0.136      0.23     0.252     0.136     0.436     0.174     0.376     0.217     0.141      0.29     0.198    0.0456    0.0701     0.111     0.114     0.358     0.408     0.624     0.615     0.315     0.527     0.382     0.412     0.298     0.442     0.537     0.463     0.575     0.331     0.448      0.33     0.521     0.512    0.0824    0.0925     0.113     0.106     0.103     0.128     0.206     0.191      0.34     0.197     0.164     0.191      0.14     0.214    0.0822     0.369    0.0482    0.0355    0.0154     0.612     0.664     0.657      0.54     0.534      0.52     0.825      0.55     0.762     0.786     0.578     0.541    0.0907     0.299     0.162     0.115     0.222    0.0564     0.096     0.105    0.0613    0.0798      0.21    0.0358    0.0371    0.0613     0.015   0.00906     0.011    0.0197    0.0387    0.0459     0.043     0.035    0.0789    0.0362    0.0827     0.082     0.129    0.0537     0.141    0.0647    0.0556    0.0442    0.0354    0.0927       0.1    0.0552    0.0548     0.075    0.0493     0.493     0.349      2.64      0.79     0.262     0.269     0.369     0.254     0.318     0.245     0.402     0.475     0.168     0.182     0.351     0.284     0.341     0.192     0.303     0.241    0.0662    0.0672    0.0454    0.0502    0.0347    0.0508    0.0374    0.0396    0.0343    0.0304    0.0331     0.055    0.0615     0.013     0.025    0.0254    0.0305    0.0311    0.0616    0.0187     0.015     0.029    0.0621    0.0795    0.0724    0.0171     0.043     0.107      8.98      3.85       5.2      4.26      4.54      3.84      3.68      4.22      3.47      4.56       3.7      3.52       4.9      5.67      6.54      9.23      8.27      1.11       8.5      9.61      5.29      9.82      3.65      7.87      8.98      5.87      9.19      7.99    0.0849      6.81      4.39       2.6      4.33      8.15      6.96      5.29      1.58      8.64      3.36      8.72      5.87      7.67      8.35      9.92      5.05      4.24       9.6       4.8      1.53      7.92     0.716      1.95       7.4      4.44      1.14      4.05      8.81      8.66      5.75      8.08     0.834      5.94      3.53      1.81      1.09      7.02      2.05      7.05      8.79      5.86      2.25      7.66      7.37      9.34      8.49    0.0623      6.44      5.58      3.91      1.16      4.42      5.18      3.68      9.39      2.05      9.72      5.67      9.97       2.8     0.672      6.29      9.92      9.33      7.53      6.72      5.44      5.09      8.25      9.51      4.75      4.67       8.2      7.75       6.8      4.81      3.69      6.65      5.82      7.84      3.16      3.77      4.42      5.58      3.08      4.35      4.04      3.57      4.65      8.06      6.39      4.87      5.02     0.233      4.33      5.82      5.71      5.73      2.82      2.38      3.67      5.69      4.84     0.151     0.183     0.207     0.106     0.111     0.173      0.28     0.179      0.29     0.268     0.239     0.178     0.224    0.0626    0.0453    0.0608      0.11    0.0474 \n       18         0         0         0         0         0      12.5      12.5      12.5      12.5      12.5      12.5      12.5         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0        75        75         0         0         0         0         0         0         0         0         0        21        21        21        21        75        90        85       100        25        25        25        25        25        25      17.5        80        80      12.5      12.5      12.5         0         0         0         0         0         0         0         0         0         0        25        25        25        25         0         0         0         0         0         0         0         0        28        28        28         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0        45        45        45        45        45        45        60        60        80        80        80        80        95        95      82.5      82.5        95        95         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0        30        30        30        30        30        30        22        22        22        22        22        22        22        22        22        22        80        80        90        20        20        20        20        20        20        20        20        20        20        20        20        20        20        20        20        20        40        40        40        40        40        20        20        20        20        90        90        55        80      52.5      52.5      52.5        80        80        80         0         0         0         0         0        70        70        70        34        34        34        33        33        33        33         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0        35        35         0         0         0         0         0         0         0         0        35         0        55        55         0         0        85        80        40        40        60        60        90        80        80         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0 \n     2.31      7.07      7.07      2.18      2.18      2.18      7.87      7.87      7.87      7.87      7.87      7.87      7.87      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      5.96      5.96      5.96      5.96      2.95      2.95      6.91      6.91      6.91      6.91      6.91      6.91      6.91      6.91      6.91      5.64      5.64      5.64      5.64         4      1.22      0.74      1.32      5.13      5.13      5.13      5.13      5.13      5.13      1.38      3.37      3.37      6.07      6.07      6.07      10.8      10.8      10.8      10.8      12.8      12.8      12.8      12.8      12.8      12.8      4.86      4.86      4.86      4.86      4.49      4.49      4.49      4.49      3.41      3.41      3.41      3.41        15        15        15      2.89      2.89      2.89      2.89      2.89      8.56      8.56      8.56      8.56      8.56      8.56      8.56      8.56      8.56      8.56      8.56        10        10        10        10        10        10        10        10        10      25.7      25.7      25.7      25.7      25.7      25.7      25.7      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      4.05      4.05      4.05      4.05      4.05      4.05      4.05      2.46      2.46      2.46      2.46      2.46      2.46      2.46      2.46      3.44      3.44      3.44      3.44      3.44      3.44      2.93      2.93      0.46      1.52      1.52      1.52      1.47      1.47      2.03      2.03      2.68      2.68      10.6      10.6      10.6      10.6      10.6      10.6      10.6      10.6      10.6      10.6      10.6      13.9      13.9      13.9      13.9       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2      4.93      4.93      4.93      4.93      4.93      4.93      5.86      5.86      5.86      5.86      5.86      5.86      5.86      5.86      5.86      5.86      3.64      3.64      3.75      3.97      3.97      3.97      3.97      3.97      3.97      3.97      3.97      3.97      3.97      3.97      3.97      6.96      6.96      6.96      6.96      6.96      6.41      6.41      6.41      6.41      6.41      3.33      3.33      3.33      3.33      1.21      2.97      2.25      1.76      5.32      5.32      5.32      4.95      4.95      4.95      13.9      13.9      13.9      13.9      13.9      2.24      2.24      2.24      6.09      6.09      6.09      2.18      2.18      2.18      2.18       9.9       9.9       9.9       9.9       9.9       9.9       9.9       9.9       9.9       9.9       9.9       9.9      7.38      7.38      7.38      7.38      7.38      7.38      7.38      7.38      3.24      3.24      3.24      6.06      6.06      5.19      5.19      5.19      5.19      5.19      5.19      5.19      5.19      1.52      1.89      3.78      3.78      4.39      4.39      4.15      2.01      1.25      1.25      1.69      1.69      2.02      1.91      1.91      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      27.7      27.7      27.7      27.7      27.7      9.69      9.69      9.69      9.69      9.69      9.69      9.69      9.69      11.9      11.9      11.9      11.9      11.9 \n        0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         1         0         0         0         0         0         0         0         0         0         1         0         1         1         0         0         0         0         1         0         1         1         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         1         1         1         1         1         0         0         0         1         0         1         1         1         1         1         0         0         0         0         0         0         0         0         0         0         0         1         0         1         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         1         0         0         0         1         1         0         1         1         0         0         0         0         1         1         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         1         1         1         0         0         0         0         1         1         0         0         0         0         1         1         0         1         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0 \n    0.538     0.469     0.469     0.458     0.458     0.458     0.524     0.524     0.524     0.524     0.524     0.524     0.524     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.499     0.499     0.499     0.499     0.428     0.428     0.448     0.448     0.448     0.448     0.448     0.448     0.448     0.448     0.448     0.439     0.439     0.439     0.439      0.41     0.403      0.41     0.411     0.453     0.453     0.453     0.453     0.453     0.453     0.416     0.398     0.398     0.409     0.409     0.409     0.413     0.413     0.413     0.413     0.437     0.437     0.437     0.437     0.437     0.437     0.426     0.426     0.426     0.426     0.449     0.449     0.449     0.449     0.489     0.489     0.489     0.489     0.464     0.464     0.464     0.445     0.445     0.445     0.445     0.445      0.52      0.52      0.52      0.52      0.52      0.52      0.52      0.52      0.52      0.52      0.52     0.547     0.547     0.547     0.547     0.547     0.547     0.547     0.547     0.547     0.581     0.581     0.581     0.581     0.581     0.581     0.581     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.605     0.605     0.871     0.605     0.605     0.605     0.605     0.605     0.605     0.605     0.605     0.605     0.605     0.605     0.605      0.51      0.51      0.51      0.51      0.51      0.51      0.51     0.488     0.488     0.488     0.488     0.488     0.488     0.488     0.488     0.437     0.437     0.437     0.437     0.437     0.437     0.401     0.401     0.422     0.404     0.404     0.404     0.403     0.403     0.415     0.415     0.416     0.416     0.489     0.489     0.489     0.489     0.489     0.489     0.489     0.489     0.489     0.489     0.489      0.55      0.55      0.55      0.55     0.507     0.507     0.507     0.507     0.504     0.504     0.504     0.504     0.504     0.504     0.504     0.504     0.507     0.507     0.507     0.507     0.507     0.507     0.428     0.428     0.428     0.428     0.428     0.428     0.431     0.431     0.431     0.431     0.431     0.431     0.431     0.431     0.431     0.431     0.392     0.392     0.394     0.647     0.647     0.647     0.647     0.647     0.647     0.647     0.647     0.647     0.647     0.575     0.575     0.464     0.464     0.464     0.464     0.464     0.447     0.447     0.447     0.447     0.447     0.443     0.443     0.443     0.443     0.401       0.4     0.389     0.385     0.405     0.405     0.405     0.411     0.411     0.411     0.437     0.437     0.437     0.437     0.437       0.4       0.4       0.4     0.433     0.433     0.433     0.472     0.472     0.472     0.472     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.493     0.493     0.493     0.493     0.493     0.493     0.493     0.493      0.46      0.46      0.46     0.438     0.438     0.515     0.515     0.515     0.515     0.515     0.515     0.515     0.515     0.442     0.518     0.484     0.484     0.442     0.442     0.429     0.435     0.429     0.429     0.411     0.411      0.41     0.413     0.413      0.77      0.77      0.77      0.77      0.77      0.77      0.77      0.77     0.718     0.718     0.718     0.631     0.631     0.631     0.631     0.631     0.668     0.668     0.668     0.671     0.671     0.671     0.671     0.671     0.671     0.671       0.7       0.7       0.7       0.7       0.7       0.7       0.7       0.7       0.7       0.7       0.7     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.659     0.659     0.597     0.597     0.597     0.597     0.597     0.597     0.693     0.679     0.679     0.679     0.679     0.718     0.718     0.718     0.614     0.614     0.584     0.679     0.584     0.679     0.679     0.679     0.584     0.584     0.584     0.713     0.713      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.655     0.655     0.655     0.584      0.58      0.58      0.58     0.532      0.58     0.614     0.584     0.584     0.614     0.614     0.614     0.614     0.532     0.532     0.532     0.532     0.583     0.583     0.583     0.583     0.609     0.609     0.609     0.609     0.609     0.585     0.585     0.585     0.585     0.585     0.585     0.585     0.585     0.573     0.573     0.573     0.573     0.573 \n     6.58      6.42      7.19         7      7.15      6.43      6.01      6.17      5.63         6      6.38      6.01      5.89      5.95       6.1      5.83      5.94      5.99      5.46      5.73      5.57      5.97      6.14      5.81      5.92       5.6      5.81      6.05       6.5      6.67      5.71      6.07      5.95       5.7       6.1      5.93      5.84      5.85      5.97       6.6      7.02      6.77      6.17      6.21      6.07      5.68      5.79      6.03       5.4       5.6      5.96      6.12      6.51         6      5.89      7.25      6.38      6.82      6.15      5.93      5.74      5.97      6.46      6.76       7.1      6.29      5.79      5.88      5.59      5.89      6.42      5.96      6.07      6.25      6.27      6.29      6.28      6.14      6.23      5.87      6.73      6.62       6.3      6.17      6.39      6.63      6.02      6.12      7.01      7.08      6.42      6.41      6.44      6.21      6.25      6.63      6.16      8.07      7.82      7.42      6.73      6.78      6.41      6.14      6.17      5.85      5.84      6.13      6.47      6.23       6.2      6.72      5.91      6.09      6.25      5.93      6.18      6.02      5.87      5.73      5.87         6      5.96      5.86      5.88      5.99      5.61      5.69      6.43      5.64      6.46      6.33      6.37      5.82      5.76      6.34      5.94      6.45      5.86      6.15      6.17      5.02       5.4      5.47       4.9      6.13      5.63      4.93      5.19       5.6      6.12       5.4      5.01      5.71      6.13      6.15      5.27      6.94      6.07      6.51      6.25      7.49       7.8      8.38      5.85       6.1      7.93      5.88      6.32       6.4      5.88      5.88      5.57      6.42      5.86      6.55      6.02      6.32      6.86      6.98      7.77      6.14      7.16      6.56       5.6      6.15      7.83      6.78      6.56      7.19      6.95      6.74      7.18       6.8       6.6      7.88      7.29      7.11      7.27      6.98      7.14      6.16      7.61      7.85      8.03      5.89      6.33      5.78      6.06      5.34      5.96       5.4      5.81      6.38      5.41      6.18      5.89      6.64      5.95      6.37      6.95      6.16      6.88      6.62      8.27      8.73      8.04      7.16      7.69      6.55      5.98      7.41      8.34      8.25      6.73      6.09      6.63      7.36      6.48      6.61       6.9       6.1      6.36      6.39      5.59      5.61      6.11      6.23      6.43      6.72      6.49      6.44      6.96      8.26      6.11      5.88      7.45       8.7      7.33      6.84       7.2      7.52       8.4      7.33      7.21      5.56      7.01       8.3      7.47      5.92      5.86      6.24      6.54      7.69      6.76      6.85      7.27      6.83      6.48      6.81      7.82      6.97      7.65      7.92      7.09      6.45      6.23      6.21      6.32      6.57      6.86      7.15      6.63      6.13      6.01      6.68      6.55      5.79      6.35      7.04      6.87      6.59       6.5      6.98      7.24      6.62      7.42      6.85      6.64      5.97      4.97      6.12      6.02      6.27      6.57      5.71      5.91      5.78      6.38      6.11      6.43      6.38      6.04      5.71      6.42      6.43      6.31      6.08      5.87      6.33      6.14      5.71      6.03      6.32      6.31      6.04      5.87       5.9      6.06      5.99      5.97      7.24      6.54       6.7      6.87      6.01       5.9      6.52      6.64      6.94      6.49      6.58      5.88      6.73      5.66      5.94      6.21       6.4      6.13      6.11       6.4      6.25      5.36       5.8      8.78      3.56      4.96      3.86      4.97      6.68      7.02      6.22      5.88      4.91      4.14      7.31      6.65      6.79      6.38      6.22      6.97      6.55      5.54      5.52      4.37      5.28      4.65         5      4.88      5.39      5.71      6.05      5.04      6.19      5.89      6.47      6.41      5.75      5.45      5.85      5.99      6.34       6.4      5.35      5.53      5.68      4.14      5.61      5.62      6.85      5.76      6.66      4.63      5.16      4.52      6.43      6.78       5.3      5.96      6.82      6.41      6.01      5.65       6.1      5.57       5.9      5.84       6.2      6.19      6.38      6.35      6.83      6.43      6.44      6.21      6.63      6.46      6.15      5.94      5.63      5.82      6.41      6.22      6.49      5.85      6.46      6.34      6.25      6.19      6.42      6.75      6.66       6.3      7.39      6.73      6.53      5.98      5.94       6.3      6.08       6.7      6.38      6.32      6.51      6.21      5.76      5.95         6      5.93      5.71      6.17      6.23      6.44      6.98      5.43      6.16      6.48       5.3      6.19      6.23      6.24      6.75      7.06      5.76      5.87      6.31      6.11      5.91      5.45      5.41      5.09      5.98      5.98      5.71      5.93      5.67      5.39      5.79      6.02      5.57      6.03      6.59      6.12      6.98      6.79      6.03 \n     65.2      78.9      61.1      45.8      54.2      58.7      66.6      96.1       100      85.9      94.3      82.9        39      61.8      84.5      56.5      29.3      81.7      36.6      69.5      98.1      89.2      91.7       100      94.1      85.7      90.3      88.8      94.4      87.3      94.1       100        82        95      96.9      68.2      61.4      41.5      30.2      21.8      15.8       2.9       6.6       6.5        40      33.8      33.3      85.5      95.3        62      45.7        63      21.1      21.4      47.6      21.9      35.7      40.5      29.2      47.2      66.2      93.4      67.8      43.4      59.5      17.8      31.1      21.4      36.8        33       6.6      17.5       7.8       6.2         6        45      74.5      45.8      53.7      36.6      33.5      70.4      32.2      46.7        48      56.1      45.1      56.8      86.3      63.1      66.1      73.9      53.6      28.9      77.3      57.8      69.6        76      36.9      62.5      79.9      71.3      85.4      87.4        90      96.7      91.9      85.2      97.1      91.2      54.4      81.6      92.9      95.4      84.2      88.2      72.5      82.6      73.1      65.2      69.7      84.1      92.9        97      95.8      88.4      95.6        96      98.8      94.7      98.9      97.7      97.9      95.4      98.4      98.2      93.5      98.4      98.2      97.9      93.6       100       100       100      97.8       100       100      95.7      93.8      94.9      97.3       100        88      98.5        96      82.6        94      97.4       100       100      92.6      90.8      98.2      93.9      91.8        93      96.2      79.2      96.1      95.2      94.6      97.3      88.5      84.1      68.7      33.1      47.2      73.4      74.4      58.4      83.3      62.2      92.2      95.6      89.8      68.8      53.6      41.1      29.1      38.9      21.5      30.8      26.3       9.9      18.8        32      34.1      36.6      38.3      15.3      13.9      38.4      15.7      33.2      31.9      22.3      52.5      72.7      59.1       100      92.1      88.6      53.8      32.3       9.8      42.4        56      85.1      93.8      92.4      88.5      91.3      77.7      80.8      78.3        83      86.5      79.9        17      21.4      68.1      76.9      73.3      70.4      66.5      61.5      76.5      71.6      18.5      42.2      54.3      65.1      52.9       7.8      76.5      70.2      34.9      79.2      49.1      17.5        13       8.9       6.8       8.4        32      19.1      34.2      86.9       100       100      81.8      89.4      91.5      94.5      91.6      62.8      84.6        67      52.6      61.5      42.1      16.3      58.7      51.8      32.9      42.8        49      27.6      32.1      32.2      64.5      37.2      49.7      24.8      20.8      31.9      31.5      31.3      45.6      22.9      27.9      27.7      23.4      18.4      42.3      31.1        51        58      20.1        10      47.4      40.4      18.4      17.7      41.1      58.1      71.9      70.3      82.5      76.7      37.8      52.8      90.4      82.8      87.3      77.7      83.2      71.7      67.2      58.8      52.3      54.3      49.9      74.3      40.1      14.7      28.9      43.7      25.8      17.2      32.2      28.4      23.3      38.1      38.5      34.5      46.3      59.6      37.3      45.4      58.5      49.3      59.7      56.4      28.1      48.5      52.3      27.7      29.7      34.5      44.4      35.9      18.5      36.1      21.9      19.5      97.4        91      83.4      81.3        88      91.1      96.2        89      82.9      87.9      91.4       100       100      96.8      97.5       100      89.6       100       100      97.9      93.3      98.8      96.2       100      91.9      99.1       100       100      91.2      98.1       100      89.5       100      98.9        97      82.5        97      92.6      94.7      98.8        96      98.9       100      77.8       100       100       100        96      85.4       100       100       100      97.9       100       100       100       100       100       100       100      90.8      89.1       100      76.5       100      95.3      87.6      85.1      70.6      95.4      59.7      78.7      78.1      95.6      86.1      94.3      74.8      87.9        95      94.6      93.3       100      87.9      93.9      92.4      97.2       100       100      96.6      94.8      96.4      96.6      98.7      98.3      92.6      98.2      91.8      99.3      94.1      86.5      87.9      80.3      83.7      84.4        90      88.4        83      89.9      65.4      48.2      84.7      94.5        71      56.7        84      90.7        75      67.6      95.4      97.4      93.6      97.3      96.7        88      64.7      74.9        77      40.3      41.9      51.9      79.8      53.2      92.7      98.3        98      98.8      83.5        54      42.6      28.8      72.9      70.6      65.3      73.5      79.7      69.1      76.7        91      89.3      80.8 \n     4.09      4.97      4.97      6.06      6.06      6.06      5.56      5.95      6.08      6.59      6.35      6.23      5.45      4.71      4.46       4.5       4.5      4.26       3.8       3.8       3.8      4.01      3.98       4.1       4.4      4.45      4.68      4.45      4.45      4.24      4.23      4.18      3.99      3.79      3.76      3.36      3.38      3.93      3.85       5.4       5.4      5.72      5.72      5.72      5.72       5.1       5.1      5.69      5.87      6.09      6.81      6.81      6.81      6.81      7.32       8.7      9.19      8.32      7.81      6.93      7.23      6.82      7.23      7.98      9.22      6.61      6.61       6.5       6.5       6.5      5.29      5.29      5.29      5.29      4.25       4.5      4.05      4.09      5.01       4.5       5.4       5.4       5.4       5.4      4.78      4.44      4.43      3.75      3.42      3.41      3.09      3.09      3.67      3.67      3.62       3.5       3.5       3.5       3.5       3.5      2.78      2.86      2.71      2.71      2.42      2.11      2.21      2.12      2.43      2.55      2.78      2.68      2.35      2.55      2.26      2.46      2.73      2.75      2.48      2.76      2.26       2.2      2.09      1.94      2.01      1.99      1.76      1.79      1.81      1.98      2.12      2.27      2.33      2.47      2.35      2.11      1.97      1.85      1.67      1.67      1.61      1.44      1.32      1.41      1.35      1.42      1.52      1.46      1.53      1.53      1.62      1.59      1.61      1.62      1.75      1.75      1.74      1.88      1.76      1.77       1.8      1.97      2.04      2.16      2.42      2.28      2.05      2.43       2.1      2.26      2.43      2.39       2.6      2.65       2.7      3.13      3.55      3.32      2.92      2.83      2.74       2.6       2.7      2.85      2.99      3.28       3.2      3.79      4.57      4.57      6.48      6.48      6.48      6.22      6.22      5.65      7.31      7.31      7.31      7.65      7.65      6.27      6.27      5.12      5.12      3.95      4.35      4.35      4.24      3.88      3.88      3.67      3.65      3.95      3.59      3.95      3.11      3.42      2.89      3.36      2.86      3.05      3.27      3.27      2.89      2.89      3.22      3.22      3.38      3.38      3.67      3.67      3.84      3.65      3.65      3.65      4.15      4.15      6.19      6.19      6.34      6.34      7.04      7.04      7.95      7.95      8.06      8.06      7.83      7.83       7.4       7.4      8.91      8.91      9.22      9.22      6.34       1.8      1.89      2.01      2.11      2.14      2.29      2.08      1.93      1.99      2.13      2.42      2.87      3.92      4.43      4.43      3.92      4.37      4.08      4.27      4.79      4.86      4.14       4.1      4.69      5.24      5.21      5.89      7.31      7.31      9.09      7.32      7.32      7.32      5.12      5.12      5.12       5.5       5.5      5.96      5.96      6.32      7.83      7.83      7.83      5.49      5.49      5.49      4.02      3.37       3.1      3.18      3.32       3.1      2.52      2.64      2.83      3.26       3.6      3.95         4      4.03      3.53         4      4.54      4.54      4.72      4.72      4.72      5.42      5.42      5.42      5.21      5.21      5.87      6.64      6.64      6.46      6.46      5.99      5.23      5.62      4.81      4.81      4.81      7.04      6.27      5.73      6.47      8.01      8.01      8.54      8.34      8.79      8.79      10.7      10.7      12.1      10.6      10.6      2.12      2.51      2.72      2.51      2.52       2.3       2.1       1.9       1.9      1.61      1.75      1.51      1.33      1.36       1.2      1.17      1.13      1.17      1.14      1.32      1.34      1.36      1.39      1.39      1.42      1.52      1.58      1.53      1.44      1.43      1.47      1.52      1.59      1.73      1.93      2.17      1.77      1.79      1.78      1.73      1.68      1.63      1.49       1.5      1.59      1.57      1.64       1.7      1.61      1.43      1.18      1.29      1.45      1.47      1.41      1.53      1.55      1.59      1.66      1.83      1.82      1.65       1.8      1.79      1.86      1.87      1.95      2.02      2.06      1.91         2      1.86      1.94      1.97      2.05      2.09       2.2      2.32      2.22      2.12         2      1.91      1.82      1.82      1.87      2.07         2      1.98       1.9      1.99      2.07       2.2      2.26      2.19      2.32      2.36      2.37      2.45       2.5      2.44      2.58      2.78      2.78      2.72       2.6      2.57      2.73       2.8      2.96      3.07      2.87      2.54      2.91      2.82      3.03       3.1       2.9      2.53      2.43      2.21      2.31       2.1      2.17      1.95      3.42      3.33      3.41       4.1      3.72      3.99      3.55      3.15      1.82      1.76      1.82      1.87      2.11      2.38      2.38       2.8       2.8      2.89      2.41       2.4       2.5      2.48      2.29      2.17      2.39      2.51 \n        1         2         2         3         3         3         5         5         5         5         5         5         5         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         5         5         5         5         3         3         3         3         3         3         3         3         3         3         3         4         4         4         4         3         5         2         5         8         8         8         8         8         8         3         4         4         4         4         4         4         4         4         4         5         5         5         5         5         5         4         4         4         4         3         3         3         3         2         2         2         2         4         4         4         2         2         2         2         2         5         5         5         5         5         5         5         5         5         5         5         6         6         6         6         6         6         6         6         6         2         2         2         2         2         2         2         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         3         3         3         3         3         3         3         3         5         5         5         5         5         5         1         1         4         2         2         2         3         3         2         2         4         4         4         4         4         4         4         4         4         4         4         4         4         5         5         5         5         8         8         8         8         8         8         8         8         8         8         8         8         8         8         8         8         8         8         6         6         6         6         6         6         7         7         7         7         7         7         7         7         7         7         1         1         3         5         5         5         5         5         5         5         5         5         5         5         5         3         3         3         3         3         4         4         4         4         4         5         5         5         5         1         1         1         1         6         6         6         4         4         4         4         4         4         4         4         5         5         5         7         7         7         7         7         7         7         4         4         4         4         4         4         4         4         4         4         4         4         5         5         5         5         5         5         5         5         4         4         4         1         1         5         5         5         5         5         5         5         5         1         1         5         5         3         3         4         4         1         1         4         4         5         4         4        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24         4         4         4         4         4         6         6         6         6         6         6         6         6         1         1         1         1         1 \n      296       242       242       222       222       222       311       311       311       311       311       311       311       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       279       279       279       279       252       252       233       233       233       233       233       233       233       233       233       243       243       243       243       469       226       313       256       284       284       284       284       284       284       216       337       337       345       345       345       305       305       305       305       398       398       398       398       398       398       281       281       281       281       247       247       247       247       270       270       270       270       270       270       270       276       276       276       276       276       384       384       384       384       384       384       384       384       384       384       384       432       432       432       432       432       432       432       432       432       188       188       188       188       188       188       188       437       437       437       437       437       437       437       437       437       437       437       437       437       437       437       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       296       296       296       296       296       296       296       193       193       193       193       193       193       193       193       398       398       398       398       398       398       265       265       255       329       329       329       402       402       348       348       224       224       277       277       277       277       277       277       277       277       277       277       277       276       276       276       276       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       300       300       300       300       300       300       330       330       330       330       330       330       330       330       330       330       315       315       244       264       264       264       264       264       264       264       264       264       264       264       264       223       223       223       223       223       254       254       254       254       254       216       216       216       216       198       285       300       241       293       293       293       245       245       245       289       289       289       289       289       358       358       358       329       329       329       222       222       222       222       304       304       304       304       304       304       304       304       304       304       304       304       287       287       287       287       287       287       287       287       430       430       430       304       304       224       224       224       224       224       224       224       224       284       422       370       370       352       352       351       280       335       335       411       411       187       334       334       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       711       711       711       711       711       391       391       391       391       391       391       391       391       273       273       273       273       273 \n     15.3      17.8      17.8      18.7      18.7      18.7      15.2      15.2      15.2      15.2      15.2      15.2      15.2        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21      19.2      19.2      19.2      19.2      18.3      18.3      17.9      17.9      17.9      17.9      17.9      17.9      17.9      17.9      17.9      16.8      16.8      16.8      16.8      21.1      17.9      17.3      15.1      19.7      19.7      19.7      19.7      19.7      19.7      18.6      16.1      16.1      18.9      18.9      18.9      19.2      19.2      19.2      19.2      18.7      18.7      18.7      18.7      18.7      18.7        19        19        19        19      18.5      18.5      18.5      18.5      17.8      17.8      17.8      17.8      18.2      18.2      18.2        18        18        18        18        18      20.9      20.9      20.9      20.9      20.9      20.9      20.9      20.9      20.9      20.9      20.9      17.8      17.8      17.8      17.8      17.8      17.8      17.8      17.8      17.8      19.1      19.1      19.1      19.1      19.1      19.1      19.1      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      16.6      16.6      16.6      16.6      16.6      16.6      16.6      17.8      17.8      17.8      17.8      17.8      17.8      17.8      17.8      15.2      15.2      15.2      15.2      15.2      15.2      15.6      15.6      14.4      12.6      12.6      12.6        17        17      14.7      14.7      14.7      14.7      18.6      18.6      18.6      18.6      18.6      18.6      18.6      18.6      18.6      18.6      18.6      16.4      16.4      16.4      16.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      16.6      16.6      16.6      16.6      16.6      16.6      19.1      19.1      19.1      19.1      19.1      19.1      19.1      19.1      19.1      19.1      16.4      16.4      15.9        13        13        13        13        13        13        13        13        13        13        13        13      18.6      18.6      18.6      18.6      18.6      17.6      17.6      17.6      17.6      17.6      14.9      14.9      14.9      14.9      13.6      15.3      15.3      18.2      16.6      16.6      16.6      19.2      19.2      19.2        16        16        16        16        16      14.8      14.8      14.8      16.1      16.1      16.1      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      16.9      16.9      16.9      16.9      16.9      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      15.5      15.9      17.6      17.6      18.8      18.8      17.9        17      19.7      19.7      18.3      18.3        17        22        22      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.1      20.1      20.1      20.1      20.1      19.2      19.2      19.2      19.2      19.2      19.2      19.2      19.2        21        21        21        21        21 \n      397       397       393       395       397       394       396       397       387       387       393       397       391       397       380       396       387       387       289       391       377       393       397       395       394       303       377       306       388       380       360       377       233       359       248       397       378       397       393       396       396       385       383       394       389       397       397       393       397       397       396       394       397       397       397       396       397       393       391       397       395       378       397       396       393       397       397       396       397       397       384       377       391       377       395       383       374       387       386       396       397       396       397       391       397       392       396       395       397       396       392       394       395       396       397       358       392       397       394       397       395       396      70.8       394       393       394       396       388       395       391       393       396       395       397       389       345       393       395       339       392       389       378       378       370       379       385       359       392       397       397       395       397       386       389       263       395       378       394       392       397       388       397       397       397       397       173       169       392       357       352       373       342       343       262       321        88      88.6       363       354       364       339       374       390       388       395       240       369       228       297       330       292       348       397       396       393       391       393       396       391       397       396       397       394       397       391       387       393       394       383       397       378       390       390       393       377       394       397       354       392       397       384       394       395       393       391       397       395       389       381       397       393       395       391       386       349       394       393       393       397       394       392       395       390       397       385       382       387       372       378       380       378       376       386       379       360       377       388       390       379       384       391       395       373       375       372       389       390       376       375       394       396       377       386       397       393       395       386       390       383       392       393       388       387       393       388       392       384       385       390       391       389       397       395       391       397       397       389       393       397       397       387       392       377       396       395       395       342       397       397       372       397       397       397       397       397       397       393       397       368       372       391       396       384       390       394       393       397       397       397       396       350       397       396       393       396       396       391       397       395       396       397       397       397       391       397       394       397       397       382       375       369       394       362       390       389       397       397       395       396       397       397       395       390       397       388       386       365       392       391       390       397       371       392       384       383       376       378       391       395       391       375       351       381       353       355       355       316       131       376       375       392       366       348       397       397       397       363       397       397       394       397       397       397       397       286       397       397       397       373       397       394       378       397       397       397       392       397       393       397       338       397       397       376       397       329       385       370       332       315       179       2.6      35.1      28.8       211      88.3      27.3      21.6       127      16.5      48.5       319       320       292      2.52      3.65      7.68      24.7      18.8      96.7      60.7      83.5      81.3        98       100       101       110      27.5      9.32        69       397       391       386       396       387       241      43.1       318       389       397       304      0.32       355       385       376      6.68      50.9      10.5       3.5       272       397       255       391       397       394       397       334        22       331       369       397       397       395       393       375       353       303       396       349       380       383       397       393       395       393       371       389       393       388       395       344       318       390       397       397       397       393       397       397       397       396       397       392       397       397       393       397 \n     4.98      9.14      4.03      2.94      5.33      5.21      12.4      19.2      29.9      17.1      20.5      13.3      15.7      8.26      10.3      8.47      6.58      14.7      11.7      11.3        21      13.8      18.7      19.9      16.3      16.5      14.8      17.3      12.8        12      22.6        13      27.7      18.4      20.3      9.68      11.4      8.77      10.1      4.32      1.98      4.84      5.81      7.44      9.55      10.2      14.2      18.8      30.8      16.2      13.5      9.43      5.28      8.43      14.8      4.81      5.77      3.95      6.86      9.22      13.2      14.4      6.73       9.5      8.05      4.67      10.2       8.1      13.1      8.79      6.72      9.88      5.52      7.54      6.78      8.94        12      10.3      12.3       9.1      5.29      7.22      6.72      7.51      9.62      6.53      12.9      8.44       5.5       5.7      8.81       8.2      8.16      6.21      10.6      6.65      11.3      4.21      3.57      6.19      9.42      7.67      10.6      13.4      12.3      16.5      18.7      14.1      12.3      15.6        13      10.2      16.2      17.1      10.5      15.8        12      10.3      15.4      13.6      14.4      14.3      17.9      25.4      17.6      14.8      27.3      17.2      15.4      18.3      12.6      12.3      11.1        15      17.3        17      16.9      14.6      21.3      18.5      24.2      34.4      26.8      26.4      29.3      27.8      16.7      29.5      28.3      21.5      14.1      13.3      12.1      15.8      15.1        15      16.1      4.59      6.43      7.39       5.5      1.73      1.92      3.32      11.6      9.81       3.7      12.1      11.1      11.3      14.4        12      14.7      9.04      9.64      5.33      10.1      6.29      6.92      5.04      7.56      9.45      4.82      5.68        14      13.2      4.45      6.68      4.56      5.39       5.1      4.69      2.87      5.03      4.38      2.97      4.08      8.61      6.62      4.56      4.45      7.43      3.11      3.81      2.88      10.9        11      18.1      14.7      23.1      17.3        24        16      9.38      29.6      9.47      13.5      9.69      17.9      10.5      9.71      21.5      9.93       7.6      4.14      4.63      3.13      6.36      3.92      3.76      11.7      5.25      2.47      3.95      8.05      10.9      9.54      4.73      6.36      7.37      11.4      12.4      11.2      5.19      12.5      18.5      9.16      10.2      9.52      6.56       5.9      3.59      3.53      3.54      6.57      9.25      3.11      5.12      7.79       6.9      9.59      7.26      5.91      11.3       8.1      10.5      14.8      7.44      3.16      13.7        13      6.59      7.73      6.58      3.53      2.98      6.05      4.16      7.19      4.85      3.76      4.59      3.01      3.16      7.85      8.23      12.9      7.14       7.6      9.51      3.33      3.56       4.7      8.58      10.4      6.27      7.39      15.8      4.97      4.74      6.07       9.5      8.67      4.86      6.93      8.93      6.47      7.53      4.54      9.97      12.6      5.98      11.7       7.9      9.28      11.5      18.3      15.9      10.4      12.7       7.2      6.87       7.7      11.7      6.12      5.08      6.15      12.8      9.97      7.34      9.09      12.4      7.83      5.68      6.75      8.01       9.8      10.6      8.51      9.74      9.29      5.49      8.65      7.18      4.61      10.5      12.7      6.36      5.99      5.89      5.98      5.49      7.79       4.5      8.05      5.57      17.6      13.3      11.5      12.7      7.79      14.2      10.2      14.6      5.29      7.12        14      13.3      3.26      3.73      2.96      9.53      8.88      34.8        38      13.4      23.2      21.2      23.7      21.8      17.2      21.1      23.6      24.6      30.6      30.8      28.3        32      30.6      20.9      17.1      18.8      25.7      15.2      16.4      17.1      19.4      19.9      30.6        30      26.8      20.3      20.3      19.8      27.4        23      23.3      12.1      26.4      19.8      10.1      21.2      34.4      20.1        37      29.1      25.8      26.6      20.6      22.7        15      15.7      14.1      23.3      17.2      24.4      15.7      14.5      21.5      24.1      17.6      19.7        12      16.2      15.2      23.3      18.1      26.5        34      22.9      22.1      19.5      16.6      18.9      23.8        24      17.8      16.4      18.1      19.3      17.4      17.7      17.3      16.7      18.7      18.1        19      16.9      16.2      14.7      16.4      14.7        14      10.3      13.2      14.1      17.2      21.3      18.1      14.8      16.3      12.9      14.4      11.7      18.1      24.1      18.7      24.9        18      13.1      10.7      7.74      7.01      10.4      13.3      10.6        15      11.5      18.1        24      29.7      18.1      13.4        12      13.6      17.6      21.1      14.1      12.9      15.1      14.3      9.67      9.08      5.64      6.48      7.88 \n       24      21.6      34.7      33.4      36.2      28.7      22.9      27.1      16.5      18.9        15      18.9      21.7      20.4      18.2      19.9      23.1      17.5      20.2      18.2      13.6      19.6      15.2      14.5      15.6      13.9      16.6      14.8      18.4        21      12.7      14.5      13.2      13.1      13.5      18.9        20        21      24.7      30.8      34.9      26.6      25.3      24.7      21.2      19.3        20      16.6      14.4      19.4      19.7      20.5        25      23.4      18.9      35.4      24.7      31.6      23.3      19.6      18.7        16      22.2        25        33      23.5      19.4        22      17.4      20.9      24.2      21.7      22.8      23.4      24.1      21.4        20      20.8      21.2      20.3        28      23.9      24.8      22.9      23.9      26.6      22.5      22.2      23.6      28.7      22.6        22      22.9        25      20.6      28.4      21.4      38.7      43.8      33.2      27.5      26.5      18.6      19.3      20.1      19.5      19.5      20.4      19.8      19.4      21.7      22.8      18.8      18.7      18.5      18.3      21.2      19.2      20.4      19.3        22      20.3      20.5      17.3      18.8      21.4      15.7      16.2        18      14.3      19.2      19.6        23      18.4      15.6      18.1      17.4      17.1      13.3      17.8        14      14.4      13.4      15.6      11.8      13.8      15.6      14.6      17.8      15.4      21.5      19.6      15.3      19.4        17      15.6      13.1      41.3      24.3      23.3        27        50        50        50      22.7        25        50      23.8      23.8      22.3      17.4      19.1      23.1      23.6      22.6      29.4      23.2      24.6      29.9      37.2      39.8      36.2      37.9      32.5      26.4      29.6        50        32      29.8      34.9        37      30.5      36.4      31.1      29.1        50      33.3      30.3      34.6      34.9      32.9      24.1      42.3      48.5        50      22.6      24.4      22.5      24.4        20      21.7      19.3      22.4      28.1      23.7        25      23.3      28.7      21.5        23      26.7      21.7      27.5      30.1      44.8        50      37.6      31.6      46.7      31.5      24.3      31.7      41.7      48.3        29        24      25.1      31.5      23.7      23.3        22      20.1      22.2      23.7      17.6      18.5      24.3      20.5      24.5      26.2      24.4      24.8      29.6      42.8      21.9      20.9        44        50        36      30.1      33.8      43.1      48.8        31      36.5      22.8      30.7        50      43.5      20.7      21.1      25.2      24.4      35.2      32.4        32      33.2      33.1      29.1      35.1      45.4      35.4        46        50      32.2        22      20.1      23.2      22.3      24.8      28.5      37.3      27.9      23.9      21.7      28.6      27.1      20.3      22.5        29      24.8        22      26.4      33.1      36.1      28.4      33.4      28.2      22.8      20.3      16.1      22.1      19.4      21.6      23.8      16.2      17.8      19.8      23.1        21      23.8      23.1      20.4      18.5        25      24.6        23      22.2      19.3      22.6      19.8      17.1      19.4      22.2      20.7      21.1      19.5      18.5      20.6        19      18.7      32.7      16.5      23.9      31.2      17.5      17.2      23.1      24.5      26.6      22.9      24.1      18.6      30.1      18.2      20.6      17.8      21.7      22.7      22.6        25      19.9      20.8      16.8      21.9      27.5      21.9      23.1        50        50        50        50        50      13.8      13.8        15      13.9      13.3      13.1      10.2      10.4      10.9      11.3      12.3       8.8       7.2      10.5       7.4      10.2      11.5      15.1      23.2       9.7      13.8      12.7      13.1      12.5       8.5         5       6.3       5.6       7.2      12.1       8.3       8.5         5      11.9      27.9      17.2      27.5        15      17.2      17.9      16.3         7       7.2       7.5      10.4       8.8       8.4      16.7      14.2      20.8      13.4      11.7       8.3      10.2      10.9        11       9.5      14.5      14.1      16.1      14.3      11.7      13.4       9.6       8.7       8.4      12.8      10.5      17.1      18.4      15.4      10.8      11.8      14.9      12.6      14.1        13      13.4      15.2      16.1      17.8      14.9      14.1      12.7      13.5      14.9        20      16.4      17.7      19.5      20.2      21.4      19.9        19      19.1      19.1      20.1      19.9      19.6      23.2      29.8      13.8      13.3      16.7        12      14.6      21.4        23      23.7        25      21.8      20.6      21.2      19.1      20.6      15.2         7       8.1      13.6      20.1      21.8      24.5      23.1      19.7      18.3      21.2      17.5      16.8      22.4      20.6      23.9        22      11.9 \n</pre>\n\nThe data has 14 rows and 506 columns, where each column represents the 14 features of one house. The values in the last row represent the price of the property, and we will train a model to predict this value, given the remaining 13 features.\n\nWe also add a row of ones to the input matrix that will account for the bias (intercept) of our model and simplify the implementation.\n\n*)\n\nlet hx = h.[0..12, *]\nlet hy = h.[13..13, *]\n\nlet housing = Dataset(hx, hy).AppendBiasRowX()\n\n(**\n\nOur linear regression model is of the form\n\n$$$\n   h_{\\mathbf{w}} (\\mathbf{x}) = \\sum_j w_j x_j = \\mathbf{w}^{T} \\mathbf{x}\n\nwhich represents a family of linear functions parameterized by the vector $\\mathbf{w}$. \n*)\n\nlet model (w:DV) (x:DV) = w * x\n\n(**\n\nFor training the model, we minimize a loss function\n\n$$$\n   J(\\mathbf{w}) = \\frac{1}{2} \\sum_{i=1}^{n} \\left(h_{\\mathbf{w}} (\\mathbf{x}^{(i)}) - y^{(i)} \\right)^2 = \\frac{1}{2} \\sum_{i=1}^{n} \\left( \\mathbf{w}^{T} \\mathbf{x}^{(i)} - y^{(i)} \\right)^2\n\nwhere $\\mathbf{x}^{(i)}$ are vectors holding the 13 input features plus the bias input (the constant 1) and $y^{(i)}$ are the target values (which are here scalar).\n\n*)\n\nlet wopt, lopt, whist, lhist = Optimize.Train(model, Rnd.UniformDV(14), housing, \n                                    {Params.Default with Epochs = 1000\n                                                         Loss = Loss.Quadratic})\n\nlet trainedmodel = model wopt\n\n\n(*** hide ***)\nopen RProvider\nopen RProvider.graphics\nopen RProvider.grDevices\n\nlet px, py = housing.Y.[0,*] |> DV.toArray |> Array.mapi (fun i v -> float i, (v |> float32 |> float)) |> Array.unzip\nlet ppx, ppy = housing.X |> DM.mapCols (fun v -> toDV [trainedmodel v]) |> DM.toDV |> DV.toArray |> Array.mapi (fun i v -> float i, (v |> float32 |> float)) |> Array.unzip\n\nlet ll = lhist |> Array.map (float32>>float)\n\nnamedParams[\n    \"x\", box px\n    \"y\", box py\n    \"pch\", box 19\n    \"col\", box \"darkblue\"\n    \"type\", box \"p\"\n    \"xlab\", box \"House number\"\n    \"ylab\", box \"Price\"\n    \"width\", box 700\n    \"height\", box 500\n    ]\n|> R.plot|> ignore\n\nnamedParams[\n    \"x\", box ppx\n    \"y\", box ppy\n    \"pch\", box 19\n    \"col\", box \"red\"\n    \"type\", box \"p\"\n    \"width\", box 700\n    \"height\", box 500\n    ]\n|> R.points|> ignore\n\n(**\n\n<pre>\n[12/11/2015 14:41:04] --- Training started\n[12/11/2015 14:41:04] Parameters     : 14\n[12/11/2015 14:41:04] Iterations     : 1000\n[12/11/2015 14:41:04] Epochs         : 1000\n[12/11/2015 14:41:04] Batches        : Full (1 per epoch)\n[12/11/2015 14:41:04] Training data  : 506\n[12/11/2015 14:41:04] Validation data: None\n[12/11/2015 14:41:04] Valid. interval: 10\n[12/11/2015 14:41:04] Method         : Gradient descent\n[12/11/2015 14:41:04] Learning rate  : RMSProp a0 = D 0.00100000005f, k = D 0.899999976f\n[12/11/2015 14:41:04] Momentum       : None\n[12/11/2015 14:41:04] Loss           : L2 norm\n[12/11/2015 14:41:04] Regularizer    : L2 lambda = D 9.99999975e-05f\n[12/11/2015 14:41:04] Gradient clip. : None\n[12/11/2015 14:41:04] Early stopping : None\n[12/11/2015 14:41:04] Improv. thresh.: D 0.995000005f\n[12/11/2015 14:41:04] Return best    : true\n[12/11/2015 14:41:04]    1/1000 | Batch 1/1 | D  5.281104e+002 [- ]\n[12/11/2015 14:41:04]    2/1000 | Batch 1/1 | D  5.252324e+002 [↓▼]\n[12/11/2015 14:41:04]    3/1000 | Batch 1/1 | D  5.231447e+002 [↓ ]\n[12/11/2015 14:41:04]    4/1000 | Batch 1/1 | D  5.213967e+002 [↓▼]\n[12/11/2015 14:41:04]    5/1000 | Batch 1/1 | D  5.198447e+002 [↓ ]\n[12/11/2015 14:41:04]    6/1000 | Batch 1/1 | D  5.184225e+002 [↓▼]\n[12/11/2015 14:41:04]    7/1000 | Batch 1/1 | D  5.170928e+002 [↓ ]\n...\n[12/11/2015 14:41:27]  994/1000 | Batch 1/1 | D  6.404338e+000 [↓ ]\n[12/11/2015 14:41:27]  995/1000 | Batch 1/1 | D  6.392090e+000 [↓ ]\n[12/11/2015 14:41:27]  996/1000 | Batch 1/1 | D  6.377205e+000 [↓▼]\n[12/11/2015 14:41:28]  997/1000 | Batch 1/1 | D  6.363370e+000 [↓ ]\n[12/11/2015 14:41:28]  998/1000 | Batch 1/1 | D  6.351198e+000 [↓ ]\n[12/11/2015 14:41:28]  999/1000 | Batch 1/1 | D  6.344284e+000 [↓▼]\n[12/11/2015 14:41:28] 1000/1000 | Batch 1/1 | D  6.334455e+000 [↓ ]\n[12/11/2015 14:41:28] Duration       : 00:00:23.3076639\n[12/11/2015 14:41:28] Loss initial   : D  5.281104e+002\n[12/11/2015 14:41:28] Loss final     : D  6.344284e+000 (Best)\n[12/11/2015 14:41:28] Loss change    : D -5.217661e+002 (-98.80 %)\n[12/11/2015 14:41:28] Loss chg. / s  : D -2.238603e+001\n[12/11/2015 14:41:28] Epochs / s     : 42.90434272\n[12/11/2015 14:41:28] Epochs / min   : 2574.260563\n[12/11/2015 14:41:28] --- Training finished\n\nval trainedmodel : (DV -> D)\n</pre>\n\nThe following is a plot of the prices in the dataset where the blue points represent the real price, and the red points are the values predicted by the trained linear model.\n\n<div class=\"row\">\n    <div class=\"span6 text-center\">\n        <img src=\"img/Training-1.png\" alt=\"Chart\" style=\"width:500px;\"/>\n    </div>\n</div><br/>\n\n### Loss function\n*)\n\ntype Loss =\n    | L1Loss    // L1 norm, least absolute deviations\n    | L2Loss    // L2 norm\n    | Quadratic // L2 norm squared, least squares\n    | CrossEntropyOnLinear  // Cross entropy after linear layer\n    | CrossEntropyOnSoftmax // Cross entropy after softmax layer\n\n(**\n### Regularization\n*)\n\ntype Regularization =\n    | L1Reg of D // L1 regularization\n    | L2Reg of D // L2 regularization\n    | NoReg\n    static member DefaultL1Reg = L1Reg (D 0.0001f)\n    static member DefaultL2Reg = L2Reg (D 0.0001f)\n\n(**\n\n### Batch\n*)\n\ntype Batch =\n    | Full\n    | Minibatch of int // Minibatch of given size\n    | Stochastic       // Minibatch with size 1, SGD\n\n(**\n### Validation and early stopping\n*)\n\ntype EarlyStopping =\n    | Early of int * int // Stagnation patience, overfitting patience\n    | NoEarly\n    static member DefaultEarly = Early (750, 10)\n\n(**\n\nTraining proceeds by minimizing the loss function by adjusting model parameters. Continuing this optimization for longer than necessary causes overfitting, where the model strives to precisely approximate the training data. Overfitting reduces the model's generalization ability and it's performance with new data in the field.\n\nTo prevent overfitting, data is divided into training and validation sets, and while the model is being optimized by computing the loss function using the training data, the model's performance with the validation data is also monitored. Generally, at the initial stages of training the loss for both the training and validation data will decrease. Eventually, the validation loss will asymptotically approach a minimum, and beyond a certain stage, it will start to increase even when the training loss keeps decreasing. This signifies a good time to stop the training, for preventing overfitting the model to the training data.\n\nHype does this via the **EarlyStopping** parameter, where you can specify a stagnation \"patience\" for the number of acceptable iterations for non-decreasing training loss and an overfitting patience for the number of acceptable iterations where the training loss decreases without an accompanying decrease in the validation loss.\n\nLet's divide the housing dataset into training and validation sets and train the model using early stopping.\n\n*)\n\nlet housingtrain = housing.[..399] // The first 400 data points\nlet housingvalid = housing.[400..] // The remaining 106 data points\n\n(**\n<pre>\nval housingtrain : Dataset = Hype.Dataset\n   X: 14 x 400\n   Y: 1 x 400\nval housingvalid : Dataset = Hype.Dataset\n   X: 14 x 106\n   Y: 1 x 106\n</pre>\n*)\n\nlet wopt, lopt, whist, lhist = Optimize.Train(model, Rnd.UniformDV(14), housingtrain, housingvalid,\n                                 {Params.Default with \n                                    Epochs = 1000;\n                                    EarlyStopping = Hype.EarlyStopping.Early(750, 10)})\n\n(**\n<pre>\n[12/11/2015 15:09:15] --- Training started\n[12/11/2015 15:09:15] Parameters     : 14\n[12/11/2015 15:09:15] Iterations     : 1000\n[12/11/2015 15:09:15] Epochs         : 1000\n[12/11/2015 15:09:15] Batches        : Full (1 per epoch)\n[12/11/2015 15:09:15] Training data  : 400\n[12/11/2015 15:09:15] Validation data: 106\n[12/11/2015 15:09:15] Valid. interval: 10\n[12/11/2015 15:09:15] Method         : Gradient descent\n[12/11/2015 15:09:15] Learning rate  : RMSProp a0 = D 0.00100000005f, k = D 0.899999976f\n[12/11/2015 15:09:15] Momentum       : None\n[12/11/2015 15:09:15] Loss           : L2 norm\n[12/11/2015 15:09:15] Regularizer    : L2 lambda = D 9.99999975e-05f\n[12/11/2015 15:09:15] Gradient clip. : None\n[12/11/2015 15:09:15] Early stopping : Stagnation thresh. = 750, overfit. thresh. = 10\n[12/11/2015 15:09:15] Improv. thresh.: D 0.995000005f\n[12/11/2015 15:09:15] Return best    : true\n[12/11/2015 15:09:15]    1/1000 | Batch 1/1 | D  3.221269e+002 [- ] | Valid D  3.322605e+002 [- ] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:15]    2/1000 | Batch 1/1 | D  3.193867e+002 [↓▼] | Valid D  3.288632e+002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:15]    3/1000 | Batch 1/1 | D  3.173987e+002 [↓▼] | Valid D  3.263986e+002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:15]    4/1000 | Batch 1/1 | D  3.157341e+002 [↓▼] | Valid D  3.243348e+002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:15]    5/1000 | Batch 1/1 | D  3.142565e+002 [↓ ] | Valid D  3.225029e+002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:15]    6/1000 | Batch 1/1 | D  3.129025e+002 [↓▼] | Valid D  3.208241e+002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:15]    7/1000 | Batch 1/1 | D  3.116365e+002 [↓ ] | Valid D  3.192545e+002 [↓ ] | Stag: 10 Ovfit: 0\n[12/11/2015 15:09:15]    8/1000 | Batch 1/1 | D  3.104370e+002 [↓▼] | Valid D  3.177671e+002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:15]    9/1000 | Batch 1/1 | D  3.092885e+002 [↓ ] | Valid D  3.163436e+002 [↓ ] | Stag: 10 Ovfit: 0\n[12/11/2015 15:09:15]   10/1000 | Batch 1/1 | D  3.081814e+002 [↓▼] | Valid D  3.149709e+002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:15]   11/1000 | Batch 1/1 | D  3.071076e+002 [↓ ] | Valid D  3.136398e+002 [↓ ] | Stag: 10 Ovfit: 0\n[12/11/2015 15:09:15]   12/1000 | Batch 1/1 | D  3.060618e+002 [↓▼] | Valid D  3.123428e+002 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:15]   13/1000 | Batch 1/1 | D  3.050388e+002 [↓ ] | Valid D  3.110746e+002 [↓ ] | Stag: 10 Ovfit: 0\n...\n[12/11/2015 15:09:21]  318/1000 | Batch 1/1 | D  4.250416e+001 [↓▼] | Valid D  3.382476e+001 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:21]  319/1000 | Batch 1/1 | D  4.178834e+001 [↓▼] | Valid D  3.371201e+001 [↓ ] | Stag: 10 Ovfit: 0\n[12/11/2015 15:09:21]  320/1000 | Batch 1/1 | D  4.109373e+001 [↓▼] | Valid D  3.361367e+001 [↓▼] | Stag:  0 Ovfit: 0\n[12/11/2015 15:09:21]  321/1000 | Batch 1/1 | D  4.040976e+001 [↓▼] | Valid D  3.362166e+001 [↑ ] | Stag: 10 Ovfit: 0\n[12/11/2015 15:09:21]  322/1000 | Batch 1/1 | D  3.973472e+001 [↓▼] | Valid D  3.368684e+001 [↑ ] | Stag: 20 Ovfit: 1\n[12/11/2015 15:09:21]  323/1000 | Batch 1/1 | D  3.907929e+001 [↓▼] | Valid D  3.382304e+001 [↑ ] | Stag: 30 Ovfit: 2\n[12/11/2015 15:09:21]  324/1000 | Batch 1/1 | D  3.845267e+001 [↓▼] | Valid D  3.398524e+001 [↑ ] | Stag: 40 Ovfit: 3\n[12/11/2015 15:09:21]  325/1000 | Batch 1/1 | D  3.783842e+001 [↓▼] | Valid D  3.418199e+001 [↑ ] | Stag: 50 Ovfit: 4\n[12/11/2015 15:09:21]  326/1000 | Batch 1/1 | D  3.721857e+001 [↓▼] | Valid D  3.450164e+001 [↑ ] | Stag: 60 Ovfit: 5\n[12/11/2015 15:09:21]  327/1000 | Batch 1/1 | D  3.659464e+001 [↓▼] | Valid D  3.499456e+001 [↑ ] | Stag: 70 Ovfit: 6\n[12/11/2015 15:09:21]  328/1000 | Batch 1/1 | D  3.598552e+001 [↓▼] | Valid D  3.556280e+001 [↑ ] | Stag: 80 Ovfit: 7\n[12/11/2015 15:09:21]  329/1000 | Batch 1/1 | D  3.538885e+001 [↓▼] | Valid D  3.616002e+001 [↑ ] | Stag: 90 Ovfit: 8\n[12/11/2015 15:09:21]  330/1000 | Batch 1/1 | D  3.481464e+001 [↓▼] | Valid D  3.678414e+001 [↑ ] | Stag:100 Ovfit: 9\n[12/11/2015 15:09:21] *** EARLY STOPPING TRIGGERED: Overfitting ***\n[12/11/2015 15:09:21]  331/1000 | Batch 1/1 | D  3.426452e+001 [↓▼] | Valid D  3.741238e+001 [↑ ] | Stag:110 Ovfit:10\n[12/11/2015 15:09:21] Duration       : 00:00:05.9617220\n[12/11/2015 15:09:21] Loss initial   : D  3.221269e+002\n[12/11/2015 15:09:21] Loss final     : D  3.373809e+001 (Best)\n[12/11/2015 15:09:21] Loss change    : D -2.883888e+002 (-89.53 %)\n[12/11/2015 15:09:21] Loss chg. / s  : D -4.837340e+001\n[12/11/2015 15:09:21] Epochs / s     : 55.52087132\n[12/11/2015 15:09:21] Epochs / min   : 3331.252279\n[12/11/2015 15:09:21] --- Training finished\n</pre>\n*)\n"
  },
  {
    "path": "docs/input/download.fsx",
    "content": "﻿(**\nDownload\n========\n\nHype is tested on Linux and Windows.\n\nYou can download the source code or the binaries of the [latest release on GitHub](https://github.com/hypelib/Hype/releases).\n\nYou can also install the library as a package through [NuGet](https://www.nuget.org/packages/Hype), by running <pre>Install-Package Hype</pre> in the package manager console.\n\nHype only supports the 64 bit platform, so please make sure that you set \"x64\" as the platform target. For detailed instructions, please see the installation instructions of [DiffSharp](http://diffsharp.github.io/DiffSharp/), on which Hype depends.\n*)"
  },
  {
    "path": "docs/input/files/misc/style.css",
    "content": "@import url(https://fonts.googleapis.com/css?family=Droid+Sans|Droid+Sans+Mono|Open+Sans:400,600,700);\n\n/*-------------------------------------------------------------------------- \n  Formatting for F# code snippets \n/*--------------------------------------------------------------------------*/\n\n/* strings --- and stlyes for other string related formats */\nspan.s { color:#E0E268; }\n/* printf formatters */\nspan.pf { color:#E0C57F; }\n/* escaped chars */\nspan.e { color:#EA8675; }\n\n/* identifiers --- and styles for more specific identifier types */\nspan.i { color:#d1d1d1; }\n/* type or module */\nspan.t { color:#43AEC6; }\n/* function */\nspan.f { color:#e1e1e1; }\n/* DU case or active pattern */\nspan.p { color:#4ec9b0; }\n\n/* keywords */\nspan.k { color:#FAB11D; }\n/* comment */\nspan.c { color:#808080; }\n/* operators */\nspan.o { color:#af75c1; }\n/* numbers */\nspan.n { color:#96C71D; }\n/* line number */\nspan.l { color:#80b0b0; }\n/* mutable var or ref cell */\nspan.v { color:#d1d1d1; font-weight: bold; }\n/* inactive code */\nspan.inactive { color:#808080; }\n/* preprocessor */\nspan.prep { color:#af75c1; }\n/* fsi output */\nspan.fsi { color:#808080; }\n\n/* omitted */\nspan.omitted { \n\tbackground:#3c4e52;\n  border-radius:5px;\n\tcolor:#808080;\n\tpadding:0px 0px 1px 0px;\n}\n/* tool tip */\ndiv.tip {\n\tbackground:#475b5f;\n  border-radius:4px;\n  font:11pt 'Droid Sans', arial, sans-serif;\n\tpadding:6px 8px 6px 8px;\n\tdisplay:none;\n  color:#d1d1d1;\n}\ntable.pre pre {\n  padding:0px;\n  margin:0px;\n  border:none;\n} \ntable.pre, pre.fssnip, pre {\n  line-height:13pt;\n  border:1px solid #191919;\n  border-collapse:separate;\n  white-space:pre;\n  font: 9pt 'Droid Sans Mono',consolas,monospace;\n  width:90%;\n  margin:10px 20px 20px 20px;\n  background-color:#212d30;\n  padding:10px;\n  border-radius:5px;\n  color:#d1d1d1;  \n}\ntable.pre pre {\n  padding:0px;\n  margin:0px;\n  border-radius:0px;\n  width: 100%;\n}\ntable.pre td {\n  padding:0px;\n  white-space:normal;\n  margin:0px;\n}\ntable.pre td.lines {\n  width:30px;\n}\n\n/*-------------------------------------------------------------------------- \n  Formatting for page & standard document content\n/*--------------------------------------------------------------------------*/\n\nbody {\n  font-family: 'Open Sans', 'Segoe UI', serif;\n  padding-top: 0px;\n  padding-bottom: 40px;\n  color: #dbdbdb;\n  background-color: #191919;\n}\n\npre {\n    word-wrap: inherit;\n}\n\n.nav-pills > li > a {\n  background-color: #191919;\n  font-size: 11pt;\n  height: 32px;\n  padding: 7px 15px;\n}\n\n.nav-pills > li > a:hover {\n  color : #000000;\n  background-color: #3781c3;\n  font-size: 11pt;\n}\n\n.alert-info {\n  color: #ebebeb;\n  background-color: #1f1f1f;\n  border-color: #191919;\n}\n\n.hype {\n  color: #fff;\n}\n\n.nav-label {\n  font-size: 11pt;\n  height: 34px;\n  padding: 7px 15px;\n  margin: 20px 0px 0px 0px;\n  color: #dbdbdb;\n}\n\na:link {\n  color: #3781c3;\n}\n\na:visited {\n  color: #3275b0;\n}\n\na:hover {\n  color: #7fb0db;\n}\n\na:active {\n  color: #35ad67;\n}\n\nh1 {\n  margin-bottom: 30px;\n}\n\n/* Make table headings and td.title bold */\ntd.title, thead {\n  font-weight:bold;\n}\n\n\n/*-------------------------------------------------------------------------- \n  Formatting for API reference\n/*--------------------------------------------------------------------------*/\n\n.type-list .type-name, .module-list .module-name {\n  width:25%;\n  font-weight:bold;\n}\n.member-list .member-name {\n  width:35%;\n}\n#main .xmldoc h2 {\n  font-size:14pt;\n  margin:10px 0px 0px 0px;\n}\n#main .xmldoc h3 {\n  font-size:12pt;\n  margin:10px 0px 0px 0px;\n}\n.github-link {\n  float:right;\n  text-decoration:none;\n}\n.github-link img {\n  border-style:none;\n  margin-left:10px;\n}\n.github-link .hover { display:none; }\n.github-link:hover .hover { display:block; }\n.github-link .normal { display: block; }\n.github-link:hover .normal { display: none; }\n\ncode {\n    color: white;\n    background-color: #2f2f2f;\n    font-size: 100%;\n}\n\n/*-------------------------------------------------------------------------- \n  Links \n/*--------------------------------------------------------------------------*/\n\nh1 a, h1 a:hover, h1 a:focus,\nh2 a, h2 a:hover, h2 a:focus,\nh3 a, h3 a:hover, h3 a:focus,\nh4 a, h4 a:hover, h4 a:focus,\nh5 a, h5 a:hover, h5 a:focus,\nh6 a, h6 a:hover, h6 a:focus { color : inherit; text-decoration : inherit; outline:none }\n\n/*-------------------------------------------------------------------------- \n  Additional formatting for the homepage \n/*--------------------------------------------------------------------------*/\n\n#nuget { \n  margin-top:20px;\n  font-size: 11pt; \n  padding:20px;\n  background: #e0e0e0;\n  color: #3c3c3c;\n}\n\n#nuget pre {\n  font-size:11pt;\n  -moz-border-radius: 0px;\n  -webkit-border-radius: 0px;\n  border-radius: 0px;\n  background: #3c3c3c;\n  border-style:none;\n  color: #e0e0e0;\n  margin-top:15px;\n}"
  },
  {
    "path": "docs/input/files/misc/style_light.css",
    "content": "@import url(https://fonts.googleapis.com/css?family=Droid+Sans|Droid+Sans+Mono|Open+Sans:400,600,700);\n\n/*-------------------------------------------------------------------------- \n  Formatting for F# code snippets \n/*--------------------------------------------------------------------------*/\n\n/* identifier */\nspan.i { color:#000000; }\n/* string */\nspan.s { color:#a31515; }\n/* keywords */\nspan.k { color:#0000ff; }\n/* comment */\nspan.c { color:#008000; }\n/* operators */\nspan.o { color:#000000; }\n/* numbers */\nspan.n { color:#000000; }\n/* line number */\nspan.l { color:#96c2cd; }\n/* type or module */\nspan.t { color:#2b91af; }\n/* function */\nspan.f { color:#0000a0; }\n/* DU case or active pattern */\nspan.p { color:#800080; }\n/* mutable var or ref cell */\nspan.v { color:#000000; font-weight: bold; }\n/* printf formatters */\nspan.pf { color:#2b91af; }\n/* escaped chars */\nspan.e { color:#ff0080; }\n/* mutable var or ref cell */\n\n\n/* inactive code */\nspan.inactive { color:#808080; }\n/* preprocessor */\nspan.prep { color:#0000ff; }\n/* fsi output */\nspan.fsi { color:#808080; }\n\n/* omitted */\nspan.omitted { \n\tbackground:#3c4e52;\n  border-radius:5px;\n\tcolor:#808080;\n\tpadding:0px 0px 1px 0px;\n}\n/* tool tip */\ndiv.tip {\n\tbackground:#e5e5e5;\n  border-radius:4px;\n  font:9pt 'Droid Sans', arial, sans-serif;\n\tpadding:6px 8px 6px 8px;\n\tdisplay:none;\n  color:#000000;\n}\ntable.pre pre {\n  padding:0px;\n  margin:0px;\n  border:none;\n} \ntable.pre, pre.fssnip, pre {\n  line-height:13pt;\n  border:1px solid #d8d8d8;\n  border-collapse:separate;\n  white-space:pre;\n  font: 10pt consolas,monospace;\n  width:90%;\n  margin:10px 20px 20px 20px;\n  background-color:#fdfdfd;\n  padding:10px;\n  border-radius:5px;\n  color:#000000;  \n}\ntable.pre pre {\n  padding:0px;\n  margin:0px;\n  border-radius:0px;\n  width: 100%;\n}\ntable.pre td {\n  padding:0px;\n  white-space:normal;\n  margin:0px;\n}\ntable.pre td.lines {\n  width:30px;\n}\n\n/*-------------------------------------------------------------------------- \n  Formatting for page & standard document content\n/*--------------------------------------------------------------------------*/\n\nbody {\n  font-family: 'Open Sans', serif;\n  padding-top: 0px;\n  padding-bottom: 40px;\n}\n\npre {\n    word-wrap: inherit;\n}\n\n/* Format the heading - nicer spacing etc. */\n.masthead {\n  overflow: hidden;\n}\n.masthead .muted a {\n  text-decoration:none;\n  color:#999999;\n}\n.masthead ul, .masthead li {\n  margin-bottom:0px;\n}\n.masthead .nav li {\n  margin-top: 15px;\n  font-size:110%;\n}\n.masthead h3 {\n  margin-bottom:5px;\n  font-size:170%;\n}\nhr {\n  margin:0px 0px 20px 0px;\n}\n\n/* Make table headings and td.title bold */\ntd.title, thead {\n  font-weight:bold;\n}\n\n/* Format the right-side menu */\n#menu {\n  margin-top:50px;\n  font-size:11pt;\n  padding-left:20px;\n}\n\n#menu .nav-header {\n  font-size:12pt;\n  color:#606060;\n  margin-top:20px;\n}\n\n#menu li {\n  line-height:25px;\n}\n\n/* Change font sizes for headings etc. */\n#main h1 { font-size: 26pt; margin:10px 0px 15px 0px; font-weight:400; }\n#main h2 { font-size: 20pt; margin:20px 0px 0px 0px; font-weight:400; }\n#main h3 { font-size: 14pt; margin:15px 0px 0px 0px; font-weight:600; }\n#main p  { font-size: 11pt; margin:5px 0px 15px 0px; }\n#main ul { font-size: 11pt; margin-top:10px; }\n#main li { font-size: 11pt; margin: 5px 0px 5px 0px; }\n#main strong { font-weight:700; }\n\n/*-------------------------------------------------------------------------- \n  Formatting for API reference\n/*--------------------------------------------------------------------------*/\n\n.type-list .type-name, .module-list .module-name {\n  width:25%;\n  font-weight:bold;\n}\n.member-list .member-name {\n  width:35%;\n}\n#main .xmldoc h2 {\n  font-size:14pt;\n  margin:10px 0px 0px 0px;\n}\n#main .xmldoc h3 {\n  font-size:12pt;\n  margin:10px 0px 0px 0px;\n}\n.github-link {\n  float:right;\n  text-decoration:none;\n}\n.github-link img {\n  border-style:none;\n  margin-left:10px;\n}\n.github-link .hover { display:none; }\n.github-link:hover .hover { display:block; }\n.github-link .normal { display: block; }\n.github-link:hover .normal { display: none; }\n\n/*-------------------------------------------------------------------------- \n  Links \n/*--------------------------------------------------------------------------*/\n\nh1 a, h1 a:hover, h1 a:focus,\nh2 a, h2 a:hover, h2 a:focus,\nh3 a, h3 a:hover, h3 a:focus,\nh4 a, h4 a:hover, h4 a:focus,\nh5 a, h5 a:hover, h5 a:focus,\nh6 a, h6 a:hover, h6 a:focus { color : inherit; text-decoration : inherit; outline:none }\n\n/*-------------------------------------------------------------------------- \n  Additional formatting for the homepage \n/*--------------------------------------------------------------------------*/\n\n#nuget { \n  margin-top:20px;\n  font-size: 11pt; \n  padding:20px; \n}\n\n#nuget pre {\n  font-size:11pt;\n  -moz-border-radius: 0px;\n  -webkit-border-radius: 0px;\n  border-radius: 0px;\n  background: #404040;\n  border-style:none;\n  color: #e0e0e0;\n  margin-top:15px;\n}"
  },
  {
    "path": "docs/input/files/misc/tips.js",
    "content": "var currentTip = null;\nvar currentTipElement = null;\n\nfunction hideTip(evt, name, unique) {\n    var el = document.getElementById(name);\n    el.style.display = \"none\";\n    currentTip = null;\n}\n\nfunction findPos(obj) {\n    // no idea why, but it behaves differently in webbrowser component\n    if (window.location.search == \"?inapp\")\n        return [obj.offsetLeft + 10, obj.offsetTop + 30];\n\n    var curleft = 0;\n    var curtop = obj.offsetHeight;\n    while (obj) {\n        curleft += obj.offsetLeft;\n        curtop += obj.offsetTop;\n        obj = obj.offsetParent;\n    };\n    return [curleft, curtop];\n}\n\nfunction hideUsingEsc(e) {\n    if (!e) { e = event; }\n    hideTip(e, currentTipElement, currentTip);\n}\n\nfunction showTip(evt, name, unique, owner) {\n    document.onkeydown = hideUsingEsc;\n    if (currentTip == unique) return;\n    currentTip = unique;\n    currentTipElement = name;\n\n    var pos = findPos(owner ? owner : (evt.srcElement ? evt.srcElement : evt.target));\n    var posx = pos[0];\n    var posy = pos[1];\n\n    var el = document.getElementById(name);\n    var parent = (document.documentElement == null) ? document.body : document.documentElement;\n    el.style.position = \"absolute\";\n    el.style.left = posx + \"px\";\n    el.style.top = posy + \"px\";\n    el.style.display = \"block\";\n}"
  },
  {
    "path": "docs/input/housing.data",
    "content": "0.00632\t18.00\t2.310\t0\t0.5380\t6.5750\t65.20\t4.0900\t1\t296.0\t15.30\t396.90\t4.98\t24.00\n0.02731\t0.00\t7.070\t0\t0.4690\t6.4210\t78.90\t4.9671\t2\t242.0\t17.80\t396.90\t9.14\t21.60\n0.02729\t0.00\t7.070\t0\t0.4690\t7.1850\t61.10\t4.9671\t2\t242.0\t17.80\t392.83\t4.03\t34.70\n0.03237\t0.00\t2.180\t0\t0.4580\t6.9980\t45.80\t6.0622\t3\t222.0\t18.70\t394.63\t2.94\t33.40\n0.06905\t0.00\t2.180\t0\t0.4580\t7.1470\t54.20\t6.0622\t3\t222.0\t18.70\t396.90\t5.33\t36.20\n0.02985\t0.00\t2.180\t0\t0.4580\t6.4300\t58.70\t6.0622\t3\t222.0\t18.70\t394.12\t5.21\t28.70\n0.08829\t12.50\t7.870\t0\t0.5240\t6.0120\t66.60\t5.5605\t5\t311.0\t15.20\t395.60\t12.43\t22.90\n0.14455\t12.50\t7.870\t0\t0.5240\t6.1720\t96.10\t5.9505\t5\t311.0\t15.20\t396.90\t19.15\t27.10\n0.21124\t12.50\t7.870\t0\t0.5240\t5.6310\t100.00\t6.0821\t5\t311.0\t15.20\t386.63\t29.93\t16.50\n0.17004\t12.50\t7.870\t0\t0.5240\t6.0040\t85.90\t6.5921\t5\t311.0\t15.20\t386.71\t17.10\t18.90\n0.22489\t12.50\t7.870\t0\t0.5240\t6.3770\t94.30\t6.3467\t5\t311.0\t15.20\t392.52\t20.45\t15.00\n0.11747\t12.50\t7.870\t0\t0.5240\t6.0090\t82.90\t6.2267\t5\t311.0\t15.20\t396.90\t13.27\t18.90\n0.09378\t12.50\t7.870\t0\t0.5240\t5.8890\t39.00\t5.4509\t5\t311.0\t15.20\t390.50\t15.71\t21.70\n0.62976\t0.00\t8.140\t0\t0.5380\t5.9490\t61.80\t4.7075\t4\t307.0\t21.00\t396.90\t8.26\t20.40\n0.63796\t0.00\t8.140\t0\t0.5380\t6.0960\t84.50\t4.4619\t4\t307.0\t21.00\t380.02\t10.26\t18.20\n0.62739\t0.00\t8.140\t0\t0.5380\t5.8340\t56.50\t4.4986\t4\t307.0\t21.00\t395.62\t8.47\t19.90\n1.05393\t0.00\t8.140\t0\t0.5380\t5.9350\t29.30\t4.4986\t4\t307.0\t21.00\t386.85\t6.58\t23.10\n0.78420\t0.00\t8.140\t0\t0.5380\t5.9900\t81.70\t4.2579\t4\t307.0\t21.00\t386.75\t14.67\t17.50\n0.80271\t0.00\t8.140\t0\t0.5380\t5.4560\t36.60\t3.7965\t4\t307.0\t21.00\t288.99\t11.69\t20.20\n0.72580\t0.00\t8.140\t0\t0.5380\t5.7270\t69.50\t3.7965\t4\t307.0\t21.00\t390.95\t11.28\t18.20\n1.25179\t0.00\t8.140\t0\t0.5380\t5.5700\t98.10\t3.7979\t4\t307.0\t21.00\t376.57\t21.02\t13.60\n0.85204\t0.00\t8.140\t0\t0.5380\t5.9650\t89.20\t4.0123\t4\t307.0\t21.00\t392.53\t13.83\t19.60\n1.23247\t0.00\t8.140\t0\t0.5380\t6.1420\t91.70\t3.9769\t4\t307.0\t21.00\t396.90\t18.72\t15.20\n0.98843\t0.00\t8.140\t0\t0.5380\t5.8130\t100.00\t4.0952\t4\t307.0\t21.00\t394.54\t19.88\t14.50\n0.75026\t0.00\t8.140\t0\t0.5380\t5.9240\t94.10\t4.3996\t4\t307.0\t21.00\t394.33\t16.30\t15.60\n0.84054\t0.00\t8.140\t0\t0.5380\t5.5990\t85.70\t4.4546\t4\t307.0\t21.00\t303.42\t16.51\t13.90\n0.67191\t0.00\t8.140\t0\t0.5380\t5.8130\t90.30\t4.6820\t4\t307.0\t21.00\t376.88\t14.81\t16.60\n0.95577\t0.00\t8.140\t0\t0.5380\t6.0470\t88.80\t4.4534\t4\t307.0\t21.00\t306.38\t17.28\t14.80\n0.77299\t0.00\t8.140\t0\t0.5380\t6.4950\t94.40\t4.4547\t4\t307.0\t21.00\t387.94\t12.80\t18.40\n1.00245\t0.00\t8.140\t0\t0.5380\t6.6740\t87.30\t4.2390\t4\t307.0\t21.00\t380.23\t11.98\t21.00\n1.13081\t0.00\t8.140\t0\t0.5380\t5.7130\t94.10\t4.2330\t4\t307.0\t21.00\t360.17\t22.60\t12.70\n1.35472\t0.00\t8.140\t0\t0.5380\t6.0720\t100.00\t4.1750\t4\t307.0\t21.00\t376.73\t13.04\t14.50\n1.38799\t0.00\t8.140\t0\t0.5380\t5.9500\t82.00\t3.9900\t4\t307.0\t21.00\t232.60\t27.71\t13.20\n1.15172\t0.00\t8.140\t0\t0.5380\t5.7010\t95.00\t3.7872\t4\t307.0\t21.00\t358.77\t18.35\t13.10\n1.61282\t0.00\t8.140\t0\t0.5380\t6.0960\t96.90\t3.7598\t4\t307.0\t21.00\t248.31\t20.34\t13.50\n0.06417\t0.00\t5.960\t0\t0.4990\t5.9330\t68.20\t3.3603\t5\t279.0\t19.20\t396.90\t9.68\t18.90\n0.09744\t0.00\t5.960\t0\t0.4990\t5.8410\t61.40\t3.3779\t5\t279.0\t19.20\t377.56\t11.41\t20.00\n0.08014\t0.00\t5.960\t0\t0.4990\t5.8500\t41.50\t3.9342\t5\t279.0\t19.20\t396.90\t8.77\t21.00\n0.17505\t0.00\t5.960\t0\t0.4990\t5.9660\t30.20\t3.8473\t5\t279.0\t19.20\t393.43\t10.13\t24.70\n0.02763\t75.00\t2.950\t0\t0.4280\t6.5950\t21.80\t5.4011\t3\t252.0\t18.30\t395.63\t4.32\t30.80\n0.03359\t75.00\t2.950\t0\t0.4280\t7.0240\t15.80\t5.4011\t3\t252.0\t18.30\t395.62\t1.98\t34.90\n0.12744\t0.00\t6.910\t0\t0.4480\t6.7700\t2.90\t5.7209\t3\t233.0\t17.90\t385.41\t4.84\t26.60\n0.14150\t0.00\t6.910\t0\t0.4480\t6.1690\t6.60\t5.7209\t3\t233.0\t17.90\t383.37\t5.81\t25.30\n0.15936\t0.00\t6.910\t0\t0.4480\t6.2110\t6.50\t5.7209\t3\t233.0\t17.90\t394.46\t7.44\t24.70\n0.12269\t0.00\t6.910\t0\t0.4480\t6.0690\t40.00\t5.7209\t3\t233.0\t17.90\t389.39\t9.55\t21.20\n0.17142\t0.00\t6.910\t0\t0.4480\t5.6820\t33.80\t5.1004\t3\t233.0\t17.90\t396.90\t10.21\t19.30\n0.18836\t0.00\t6.910\t0\t0.4480\t5.7860\t33.30\t5.1004\t3\t233.0\t17.90\t396.90\t14.15\t20.00\n0.22927\t0.00\t6.910\t0\t0.4480\t6.0300\t85.50\t5.6894\t3\t233.0\t17.90\t392.74\t18.80\t16.60\n0.25387\t0.00\t6.910\t0\t0.4480\t5.3990\t95.30\t5.8700\t3\t233.0\t17.90\t396.90\t30.81\t14.40\n0.21977\t0.00\t6.910\t0\t0.4480\t5.6020\t62.00\t6.0877\t3\t233.0\t17.90\t396.90\t16.20\t19.40\n0.08873\t21.00\t5.640\t0\t0.4390\t5.9630\t45.70\t6.8147\t4\t243.0\t16.80\t395.56\t13.45\t19.70\n0.04337\t21.00\t5.640\t0\t0.4390\t6.1150\t63.00\t6.8147\t4\t243.0\t16.80\t393.97\t9.43\t20.50\n0.05360\t21.00\t5.640\t0\t0.4390\t6.5110\t21.10\t6.8147\t4\t243.0\t16.80\t396.90\t5.28\t25.00\n0.04981\t21.00\t5.640\t0\t0.4390\t5.9980\t21.40\t6.8147\t4\t243.0\t16.80\t396.90\t8.43\t23.40\n0.01360\t75.00\t4.000\t0\t0.4100\t5.8880\t47.60\t7.3197\t3\t469.0\t21.10\t396.90\t14.80\t18.90\n0.01311\t90.00\t1.220\t0\t0.4030\t7.2490\t21.90\t8.6966\t5\t226.0\t17.90\t395.93\t4.81\t35.40\n0.02055\t85.00\t0.740\t0\t0.4100\t6.3830\t35.70\t9.1876\t2\t313.0\t17.30\t396.90\t5.77\t24.70\n0.01432\t100.00\t1.320\t0\t0.4110\t6.8160\t40.50\t8.3248\t5\t256.0\t15.10\t392.90\t3.95\t31.60\n0.15445\t25.00\t5.130\t0\t0.4530\t6.1450\t29.20\t7.8148\t8\t284.0\t19.70\t390.68\t6.86\t23.30\n0.10328\t25.00\t5.130\t0\t0.4530\t5.9270\t47.20\t6.9320\t8\t284.0\t19.70\t396.90\t9.22\t19.60\n0.14932\t25.00\t5.130\t0\t0.4530\t5.7410\t66.20\t7.2254\t8\t284.0\t19.70\t395.11\t13.15\t18.70\n0.17171\t25.00\t5.130\t0\t0.4530\t5.9660\t93.40\t6.8185\t8\t284.0\t19.70\t378.08\t14.44\t16.00\n0.11027\t25.00\t5.130\t0\t0.4530\t6.4560\t67.80\t7.2255\t8\t284.0\t19.70\t396.90\t6.73\t22.20\n0.12650\t25.00\t5.130\t0\t0.4530\t6.7620\t43.40\t7.9809\t8\t284.0\t19.70\t395.58\t9.50\t25.00\n0.01951\t17.50\t1.380\t0\t0.4161\t7.1040\t59.50\t9.2229\t3\t216.0\t18.60\t393.24\t8.05\t33.00\n0.03584\t80.00\t3.370\t0\t0.3980\t6.2900\t17.80\t6.6115\t4\t337.0\t16.10\t396.90\t4.67\t23.50\n0.04379\t80.00\t3.370\t0\t0.3980\t5.7870\t31.10\t6.6115\t4\t337.0\t16.10\t396.90\t10.24\t19.40\n0.05789\t12.50\t6.070\t0\t0.4090\t5.8780\t21.40\t6.4980\t4\t345.0\t18.90\t396.21\t8.10\t22.00\n0.13554\t12.50\t6.070\t0\t0.4090\t5.5940\t36.80\t6.4980\t4\t345.0\t18.90\t396.90\t13.09\t17.40\n0.12816\t12.50\t6.070\t0\t0.4090\t5.8850\t33.00\t6.4980\t4\t345.0\t18.90\t396.90\t8.79\t20.90\n0.08826\t0.00\t10.810\t0\t0.4130\t6.4170\t6.60\t5.2873\t4\t305.0\t19.20\t383.73\t6.72\t24.20\n0.15876\t0.00\t10.810\t0\t0.4130\t5.9610\t17.50\t5.2873\t4\t305.0\t19.20\t376.94\t9.88\t21.70\n0.09164\t0.00\t10.810\t0\t0.4130\t6.0650\t7.80\t5.2873\t4\t305.0\t19.20\t390.91\t5.52\t22.80\n0.19539\t0.00\t10.810\t0\t0.4130\t6.2450\t6.20\t5.2873\t4\t305.0\t19.20\t377.17\t7.54\t23.40\n0.07896\t0.00\t12.830\t0\t0.4370\t6.2730\t6.00\t4.2515\t5\t398.0\t18.70\t394.92\t6.78\t24.10\n0.09512\t0.00\t12.830\t0\t0.4370\t6.2860\t45.00\t4.5026\t5\t398.0\t18.70\t383.23\t8.94\t21.40\n0.10153\t0.00\t12.830\t0\t0.4370\t6.2790\t74.50\t4.0522\t5\t398.0\t18.70\t373.66\t11.97\t20.00\n0.08707\t0.00\t12.830\t0\t0.4370\t6.1400\t45.80\t4.0905\t5\t398.0\t18.70\t386.96\t10.27\t20.80\n0.05646\t0.00\t12.830\t0\t0.4370\t6.2320\t53.70\t5.0141\t5\t398.0\t18.70\t386.40\t12.34\t21.20\n0.08387\t0.00\t12.830\t0\t0.4370\t5.8740\t36.60\t4.5026\t5\t398.0\t18.70\t396.06\t9.10\t20.30\n0.04113\t25.00\t4.860\t0\t0.4260\t6.7270\t33.50\t5.4007\t4\t281.0\t19.00\t396.90\t5.29\t28.00\n0.04462\t25.00\t4.860\t0\t0.4260\t6.6190\t70.40\t5.4007\t4\t281.0\t19.00\t395.63\t7.22\t23.90\n0.03659\t25.00\t4.860\t0\t0.4260\t6.3020\t32.20\t5.4007\t4\t281.0\t19.00\t396.90\t6.72\t24.80\n0.03551\t25.00\t4.860\t0\t0.4260\t6.1670\t46.70\t5.4007\t4\t281.0\t19.00\t390.64\t7.51\t22.90\n0.05059\t0.00\t4.490\t0\t0.4490\t6.3890\t48.00\t4.7794\t3\t247.0\t18.50\t396.90\t9.62\t23.90\n0.05735\t0.00\t4.490\t0\t0.4490\t6.6300\t56.10\t4.4377\t3\t247.0\t18.50\t392.30\t6.53\t26.60\n0.05188\t0.00\t4.490\t0\t0.4490\t6.0150\t45.10\t4.4272\t3\t247.0\t18.50\t395.99\t12.86\t22.50\n0.07151\t0.00\t4.490\t0\t0.4490\t6.1210\t56.80\t3.7476\t3\t247.0\t18.50\t395.15\t8.44\t22.20\n0.05660\t0.00\t3.410\t0\t0.4890\t7.0070\t86.30\t3.4217\t2\t270.0\t17.80\t396.90\t5.50\t23.60\n0.05302\t0.00\t3.410\t0\t0.4890\t7.0790\t63.10\t3.4145\t2\t270.0\t17.80\t396.06\t5.70\t28.70\n0.04684\t0.00\t3.410\t0\t0.4890\t6.4170\t66.10\t3.0923\t2\t270.0\t17.80\t392.18\t8.81\t22.60\n0.03932\t0.00\t3.410\t0\t0.4890\t6.4050\t73.90\t3.0921\t2\t270.0\t17.80\t393.55\t8.20\t22.00\n0.04203\t28.00\t15.040\t0\t0.4640\t6.4420\t53.60\t3.6659\t4\t270.0\t18.20\t395.01\t8.16\t22.90\n0.02875\t28.00\t15.040\t0\t0.4640\t6.2110\t28.90\t3.6659\t4\t270.0\t18.20\t396.33\t6.21\t25.00\n0.04294\t28.00\t15.040\t0\t0.4640\t6.2490\t77.30\t3.6150\t4\t270.0\t18.20\t396.90\t10.59\t20.60\n0.12204\t0.00\t2.890\t0\t0.4450\t6.6250\t57.80\t3.4952\t2\t276.0\t18.00\t357.98\t6.65\t28.40\n0.11504\t0.00\t2.890\t0\t0.4450\t6.1630\t69.60\t3.4952\t2\t276.0\t18.00\t391.83\t11.34\t21.40\n0.12083\t0.00\t2.890\t0\t0.4450\t8.0690\t76.00\t3.4952\t2\t276.0\t18.00\t396.90\t4.21\t38.70\n0.08187\t0.00\t2.890\t0\t0.4450\t7.8200\t36.90\t3.4952\t2\t276.0\t18.00\t393.53\t3.57\t43.80\n0.06860\t0.00\t2.890\t0\t0.4450\t7.4160\t62.50\t3.4952\t2\t276.0\t18.00\t396.90\t6.19\t33.20\n0.14866\t0.00\t8.560\t0\t0.5200\t6.7270\t79.90\t2.7778\t5\t384.0\t20.90\t394.76\t9.42\t27.50\n0.11432\t0.00\t8.560\t0\t0.5200\t6.7810\t71.30\t2.8561\t5\t384.0\t20.90\t395.58\t7.67\t26.50\n0.22876\t0.00\t8.560\t0\t0.5200\t6.4050\t85.40\t2.7147\t5\t384.0\t20.90\t70.80\t10.63\t18.60\n0.21161\t0.00\t8.560\t0\t0.5200\t6.1370\t87.40\t2.7147\t5\t384.0\t20.90\t394.47\t13.44\t19.30\n0.13960\t0.00\t8.560\t0\t0.5200\t6.1670\t90.00\t2.4210\t5\t384.0\t20.90\t392.69\t12.33\t20.10\n0.13262\t0.00\t8.560\t0\t0.5200\t5.8510\t96.70\t2.1069\t5\t384.0\t20.90\t394.05\t16.47\t19.50\n0.17120\t0.00\t8.560\t0\t0.5200\t5.8360\t91.90\t2.2110\t5\t384.0\t20.90\t395.67\t18.66\t19.50\n0.13117\t0.00\t8.560\t0\t0.5200\t6.1270\t85.20\t2.1224\t5\t384.0\t20.90\t387.69\t14.09\t20.40\n0.12802\t0.00\t8.560\t0\t0.5200\t6.4740\t97.10\t2.4329\t5\t384.0\t20.90\t395.24\t12.27\t19.80\n0.26363\t0.00\t8.560\t0\t0.5200\t6.2290\t91.20\t2.5451\t5\t384.0\t20.90\t391.23\t15.55\t19.40\n0.10793\t0.00\t8.560\t0\t0.5200\t6.1950\t54.40\t2.7778\t5\t384.0\t20.90\t393.49\t13.00\t21.70\n0.10084\t0.00\t10.010\t0\t0.5470\t6.7150\t81.60\t2.6775\t6\t432.0\t17.80\t395.59\t10.16\t22.80\n0.12329\t0.00\t10.010\t0\t0.5470\t5.9130\t92.90\t2.3534\t6\t432.0\t17.80\t394.95\t16.21\t18.80\n0.22212\t0.00\t10.010\t0\t0.5470\t6.0920\t95.40\t2.5480\t6\t432.0\t17.80\t396.90\t17.09\t18.70\n0.14231\t0.00\t10.010\t0\t0.5470\t6.2540\t84.20\t2.2565\t6\t432.0\t17.80\t388.74\t10.45\t18.50\n0.17134\t0.00\t10.010\t0\t0.5470\t5.9280\t88.20\t2.4631\t6\t432.0\t17.80\t344.91\t15.76\t18.30\n0.13158\t0.00\t10.010\t0\t0.5470\t6.1760\t72.50\t2.7301\t6\t432.0\t17.80\t393.30\t12.04\t21.20\n0.15098\t0.00\t10.010\t0\t0.5470\t6.0210\t82.60\t2.7474\t6\t432.0\t17.80\t394.51\t10.30\t19.20\n0.13058\t0.00\t10.010\t0\t0.5470\t5.8720\t73.10\t2.4775\t6\t432.0\t17.80\t338.63\t15.37\t20.40\n0.14476\t0.00\t10.010\t0\t0.5470\t5.7310\t65.20\t2.7592\t6\t432.0\t17.80\t391.50\t13.61\t19.30\n0.06899\t0.00\t25.650\t0\t0.5810\t5.8700\t69.70\t2.2577\t2\t188.0\t19.10\t389.15\t14.37\t22.00\n0.07165\t0.00\t25.650\t0\t0.5810\t6.0040\t84.10\t2.1974\t2\t188.0\t19.10\t377.67\t14.27\t20.30\n0.09299\t0.00\t25.650\t0\t0.5810\t5.9610\t92.90\t2.0869\t2\t188.0\t19.10\t378.09\t17.93\t20.50\n0.15038\t0.00\t25.650\t0\t0.5810\t5.8560\t97.00\t1.9444\t2\t188.0\t19.10\t370.31\t25.41\t17.30\n0.09849\t0.00\t25.650\t0\t0.5810\t5.8790\t95.80\t2.0063\t2\t188.0\t19.10\t379.38\t17.58\t18.80\n0.16902\t0.00\t25.650\t0\t0.5810\t5.9860\t88.40\t1.9929\t2\t188.0\t19.10\t385.02\t14.81\t21.40\n0.38735\t0.00\t25.650\t0\t0.5810\t5.6130\t95.60\t1.7572\t2\t188.0\t19.10\t359.29\t27.26\t15.70\n0.25915\t0.00\t21.890\t0\t0.6240\t5.6930\t96.00\t1.7883\t4\t437.0\t21.20\t392.11\t17.19\t16.20\n0.32543\t0.00\t21.890\t0\t0.6240\t6.4310\t98.80\t1.8125\t4\t437.0\t21.20\t396.90\t15.39\t18.00\n0.88125\t0.00\t21.890\t0\t0.6240\t5.6370\t94.70\t1.9799\t4\t437.0\t21.20\t396.90\t18.34\t14.30\n0.34006\t0.00\t21.890\t0\t0.6240\t6.4580\t98.90\t2.1185\t4\t437.0\t21.20\t395.04\t12.60\t19.20\n1.19294\t0.00\t21.890\t0\t0.6240\t6.3260\t97.70\t2.2710\t4\t437.0\t21.20\t396.90\t12.26\t19.60\n0.59005\t0.00\t21.890\t0\t0.6240\t6.3720\t97.90\t2.3274\t4\t437.0\t21.20\t385.76\t11.12\t23.00\n0.32982\t0.00\t21.890\t0\t0.6240\t5.8220\t95.40\t2.4699\t4\t437.0\t21.20\t388.69\t15.03\t18.40\n0.97617\t0.00\t21.890\t0\t0.6240\t5.7570\t98.40\t2.3460\t4\t437.0\t21.20\t262.76\t17.31\t15.60\n0.55778\t0.00\t21.890\t0\t0.6240\t6.3350\t98.20\t2.1107\t4\t437.0\t21.20\t394.67\t16.96\t18.10\n0.32264\t0.00\t21.890\t0\t0.6240\t5.9420\t93.50\t1.9669\t4\t437.0\t21.20\t378.25\t16.90\t17.40\n0.35233\t0.00\t21.890\t0\t0.6240\t6.4540\t98.40\t1.8498\t4\t437.0\t21.20\t394.08\t14.59\t17.10\n0.24980\t0.00\t21.890\t0\t0.6240\t5.8570\t98.20\t1.6686\t4\t437.0\t21.20\t392.04\t21.32\t13.30\n0.54452\t0.00\t21.890\t0\t0.6240\t6.1510\t97.90\t1.6687\t4\t437.0\t21.20\t396.90\t18.46\t17.80\n0.29090\t0.00\t21.890\t0\t0.6240\t6.1740\t93.60\t1.6119\t4\t437.0\t21.20\t388.08\t24.16\t14.00\n1.62864\t0.00\t21.890\t0\t0.6240\t5.0190\t100.00\t1.4394\t4\t437.0\t21.20\t396.90\t34.41\t14.40\n3.32105\t0.00\t19.580\t1\t0.8710\t5.4030\t100.00\t1.3216\t5\t403.0\t14.70\t396.90\t26.82\t13.40\n4.09740\t0.00\t19.580\t0\t0.8710\t5.4680\t100.00\t1.4118\t5\t403.0\t14.70\t396.90\t26.42\t15.60\n2.77974\t0.00\t19.580\t0\t0.8710\t4.9030\t97.80\t1.3459\t5\t403.0\t14.70\t396.90\t29.29\t11.80\n2.37934\t0.00\t19.580\t0\t0.8710\t6.1300\t100.00\t1.4191\t5\t403.0\t14.70\t172.91\t27.80\t13.80\n2.15505\t0.00\t19.580\t0\t0.8710\t5.6280\t100.00\t1.5166\t5\t403.0\t14.70\t169.27\t16.65\t15.60\n2.36862\t0.00\t19.580\t0\t0.8710\t4.9260\t95.70\t1.4608\t5\t403.0\t14.70\t391.71\t29.53\t14.60\n2.33099\t0.00\t19.580\t0\t0.8710\t5.1860\t93.80\t1.5296\t5\t403.0\t14.70\t356.99\t28.32\t17.80\n2.73397\t0.00\t19.580\t0\t0.8710\t5.5970\t94.90\t1.5257\t5\t403.0\t14.70\t351.85\t21.45\t15.40\n1.65660\t0.00\t19.580\t0\t0.8710\t6.1220\t97.30\t1.6180\t5\t403.0\t14.70\t372.80\t14.10\t21.50\n1.49632\t0.00\t19.580\t0\t0.8710\t5.4040\t100.00\t1.5916\t5\t403.0\t14.70\t341.60\t13.28\t19.60\n1.12658\t0.00\t19.580\t1\t0.8710\t5.0120\t88.00\t1.6102\t5\t403.0\t14.70\t343.28\t12.12\t15.30\n2.14918\t0.00\t19.580\t0\t0.8710\t5.7090\t98.50\t1.6232\t5\t403.0\t14.70\t261.95\t15.79\t19.40\n1.41385\t0.00\t19.580\t1\t0.8710\t6.1290\t96.00\t1.7494\t5\t403.0\t14.70\t321.02\t15.12\t17.00\n3.53501\t0.00\t19.580\t1\t0.8710\t6.1520\t82.60\t1.7455\t5\t403.0\t14.70\t88.01\t15.02\t15.60\n2.44668\t0.00\t19.580\t0\t0.8710\t5.2720\t94.00\t1.7364\t5\t403.0\t14.70\t88.63\t16.14\t13.10\n1.22358\t0.00\t19.580\t0\t0.6050\t6.9430\t97.40\t1.8773\t5\t403.0\t14.70\t363.43\t4.59\t41.30\n1.34284\t0.00\t19.580\t0\t0.6050\t6.0660\t100.00\t1.7573\t5\t403.0\t14.70\t353.89\t6.43\t24.30\n1.42502\t0.00\t19.580\t0\t0.8710\t6.5100\t100.00\t1.7659\t5\t403.0\t14.70\t364.31\t7.39\t23.30\n1.27346\t0.00\t19.580\t1\t0.6050\t6.2500\t92.60\t1.7984\t5\t403.0\t14.70\t338.92\t5.50\t27.00\n1.46336\t0.00\t19.580\t0\t0.6050\t7.4890\t90.80\t1.9709\t5\t403.0\t14.70\t374.43\t1.73\t50.00\n1.83377\t0.00\t19.580\t1\t0.6050\t7.8020\t98.20\t2.0407\t5\t403.0\t14.70\t389.61\t1.92\t50.00\n1.51902\t0.00\t19.580\t1\t0.6050\t8.3750\t93.90\t2.1620\t5\t403.0\t14.70\t388.45\t3.32\t50.00\n2.24236\t0.00\t19.580\t0\t0.6050\t5.8540\t91.80\t2.4220\t5\t403.0\t14.70\t395.11\t11.64\t22.70\n2.92400\t0.00\t19.580\t0\t0.6050\t6.1010\t93.00\t2.2834\t5\t403.0\t14.70\t240.16\t9.81\t25.00\n2.01019\t0.00\t19.580\t0\t0.6050\t7.9290\t96.20\t2.0459\t5\t403.0\t14.70\t369.30\t3.70\t50.00\n1.80028\t0.00\t19.580\t0\t0.6050\t5.8770\t79.20\t2.4259\t5\t403.0\t14.70\t227.61\t12.14\t23.80\n2.30040\t0.00\t19.580\t0\t0.6050\t6.3190\t96.10\t2.1000\t5\t403.0\t14.70\t297.09\t11.10\t23.80\n2.44953\t0.00\t19.580\t0\t0.6050\t6.4020\t95.20\t2.2625\t5\t403.0\t14.70\t330.04\t11.32\t22.30\n1.20742\t0.00\t19.580\t0\t0.6050\t5.8750\t94.60\t2.4259\t5\t403.0\t14.70\t292.29\t14.43\t17.40\n2.31390\t0.00\t19.580\t0\t0.6050\t5.8800\t97.30\t2.3887\t5\t403.0\t14.70\t348.13\t12.03\t19.10\n0.13914\t0.00\t4.050\t0\t0.5100\t5.5720\t88.50\t2.5961\t5\t296.0\t16.60\t396.90\t14.69\t23.10\n0.09178\t0.00\t4.050\t0\t0.5100\t6.4160\t84.10\t2.6463\t5\t296.0\t16.60\t395.50\t9.04\t23.60\n0.08447\t0.00\t4.050\t0\t0.5100\t5.8590\t68.70\t2.7019\t5\t296.0\t16.60\t393.23\t9.64\t22.60\n0.06664\t0.00\t4.050\t0\t0.5100\t6.5460\t33.10\t3.1323\t5\t296.0\t16.60\t390.96\t5.33\t29.40\n0.07022\t0.00\t4.050\t0\t0.5100\t6.0200\t47.20\t3.5549\t5\t296.0\t16.60\t393.23\t10.11\t23.20\n0.05425\t0.00\t4.050\t0\t0.5100\t6.3150\t73.40\t3.3175\t5\t296.0\t16.60\t395.60\t6.29\t24.60\n0.06642\t0.00\t4.050\t0\t0.5100\t6.8600\t74.40\t2.9153\t5\t296.0\t16.60\t391.27\t6.92\t29.90\n0.05780\t0.00\t2.460\t0\t0.4880\t6.9800\t58.40\t2.8290\t3\t193.0\t17.80\t396.90\t5.04\t37.20\n0.06588\t0.00\t2.460\t0\t0.4880\t7.7650\t83.30\t2.7410\t3\t193.0\t17.80\t395.56\t7.56\t39.80\n0.06888\t0.00\t2.460\t0\t0.4880\t6.1440\t62.20\t2.5979\t3\t193.0\t17.80\t396.90\t9.45\t36.20\n0.09103\t0.00\t2.460\t0\t0.4880\t7.1550\t92.20\t2.7006\t3\t193.0\t17.80\t394.12\t4.82\t37.90\n0.10008\t0.00\t2.460\t0\t0.4880\t6.5630\t95.60\t2.8470\t3\t193.0\t17.80\t396.90\t5.68\t32.50\n0.08308\t0.00\t2.460\t0\t0.4880\t5.6040\t89.80\t2.9879\t3\t193.0\t17.80\t391.00\t13.98\t26.40\n0.06047\t0.00\t2.460\t0\t0.4880\t6.1530\t68.80\t3.2797\t3\t193.0\t17.80\t387.11\t13.15\t29.60\n0.05602\t0.00\t2.460\t0\t0.4880\t7.8310\t53.60\t3.1992\t3\t193.0\t17.80\t392.63\t4.45\t50.00\n0.07875\t45.00\t3.440\t0\t0.4370\t6.7820\t41.10\t3.7886\t5\t398.0\t15.20\t393.87\t6.68\t32.00\n0.12579\t45.00\t3.440\t0\t0.4370\t6.5560\t29.10\t4.5667\t5\t398.0\t15.20\t382.84\t4.56\t29.80\n0.08370\t45.00\t3.440\t0\t0.4370\t7.1850\t38.90\t4.5667\t5\t398.0\t15.20\t396.90\t5.39\t34.90\n0.09068\t45.00\t3.440\t0\t0.4370\t6.9510\t21.50\t6.4798\t5\t398.0\t15.20\t377.68\t5.10\t37.00\n0.06911\t45.00\t3.440\t0\t0.4370\t6.7390\t30.80\t6.4798\t5\t398.0\t15.20\t389.71\t4.69\t30.50\n0.08664\t45.00\t3.440\t0\t0.4370\t7.1780\t26.30\t6.4798\t5\t398.0\t15.20\t390.49\t2.87\t36.40\n0.02187\t60.00\t2.930\t0\t0.4010\t6.8000\t9.90\t6.2196\t1\t265.0\t15.60\t393.37\t5.03\t31.10\n0.01439\t60.00\t2.930\t0\t0.4010\t6.6040\t18.80\t6.2196\t1\t265.0\t15.60\t376.70\t4.38\t29.10\n0.01381\t80.00\t0.460\t0\t0.4220\t7.8750\t32.00\t5.6484\t4\t255.0\t14.40\t394.23\t2.97\t50.00\n0.04011\t80.00\t1.520\t0\t0.4040\t7.2870\t34.10\t7.3090\t2\t329.0\t12.60\t396.90\t4.08\t33.30\n0.04666\t80.00\t1.520\t0\t0.4040\t7.1070\t36.60\t7.3090\t2\t329.0\t12.60\t354.31\t8.61\t30.30\n0.03768\t80.00\t1.520\t0\t0.4040\t7.2740\t38.30\t7.3090\t2\t329.0\t12.60\t392.20\t6.62\t34.60\n0.03150\t95.00\t1.470\t0\t0.4030\t6.9750\t15.30\t7.6534\t3\t402.0\t17.00\t396.90\t4.56\t34.90\n0.01778\t95.00\t1.470\t0\t0.4030\t7.1350\t13.90\t7.6534\t3\t402.0\t17.00\t384.30\t4.45\t32.90\n0.03445\t82.50\t2.030\t0\t0.4150\t6.1620\t38.40\t6.2700\t2\t348.0\t14.70\t393.77\t7.43\t24.10\n0.02177\t82.50\t2.030\t0\t0.4150\t7.6100\t15.70\t6.2700\t2\t348.0\t14.70\t395.38\t3.11\t42.30\n0.03510\t95.00\t2.680\t0\t0.4161\t7.8530\t33.20\t5.1180\t4\t224.0\t14.70\t392.78\t3.81\t48.50\n0.02009\t95.00\t2.680\t0\t0.4161\t8.0340\t31.90\t5.1180\t4\t224.0\t14.70\t390.55\t2.88\t50.00\n0.13642\t0.00\t10.590\t0\t0.4890\t5.8910\t22.30\t3.9454\t4\t277.0\t18.60\t396.90\t10.87\t22.60\n0.22969\t0.00\t10.590\t0\t0.4890\t6.3260\t52.50\t4.3549\t4\t277.0\t18.60\t394.87\t10.97\t24.40\n0.25199\t0.00\t10.590\t0\t0.4890\t5.7830\t72.70\t4.3549\t4\t277.0\t18.60\t389.43\t18.06\t22.50\n0.13587\t0.00\t10.590\t1\t0.4890\t6.0640\t59.10\t4.2392\t4\t277.0\t18.60\t381.32\t14.66\t24.40\n0.43571\t0.00\t10.590\t1\t0.4890\t5.3440\t100.00\t3.8750\t4\t277.0\t18.60\t396.90\t23.09\t20.00\n0.17446\t0.00\t10.590\t1\t0.4890\t5.9600\t92.10\t3.8771\t4\t277.0\t18.60\t393.25\t17.27\t21.70\n0.37578\t0.00\t10.590\t1\t0.4890\t5.4040\t88.60\t3.6650\t4\t277.0\t18.60\t395.24\t23.98\t19.30\n0.21719\t0.00\t10.590\t1\t0.4890\t5.8070\t53.80\t3.6526\t4\t277.0\t18.60\t390.94\t16.03\t22.40\n0.14052\t0.00\t10.590\t0\t0.4890\t6.3750\t32.30\t3.9454\t4\t277.0\t18.60\t385.81\t9.38\t28.10\n0.28955\t0.00\t10.590\t0\t0.4890\t5.4120\t9.80\t3.5875\t4\t277.0\t18.60\t348.93\t29.55\t23.70\n0.19802\t0.00\t10.590\t0\t0.4890\t6.1820\t42.40\t3.9454\t4\t277.0\t18.60\t393.63\t9.47\t25.00\n0.04560\t0.00\t13.890\t1\t0.5500\t5.8880\t56.00\t3.1121\t5\t276.0\t16.40\t392.80\t13.51\t23.30\n0.07013\t0.00\t13.890\t0\t0.5500\t6.6420\t85.10\t3.4211\t5\t276.0\t16.40\t392.78\t9.69\t28.70\n0.11069\t0.00\t13.890\t1\t0.5500\t5.9510\t93.80\t2.8893\t5\t276.0\t16.40\t396.90\t17.92\t21.50\n0.11425\t0.00\t13.890\t1\t0.5500\t6.3730\t92.40\t3.3633\t5\t276.0\t16.40\t393.74\t10.50\t23.00\n0.35809\t0.00\t6.200\t1\t0.5070\t6.9510\t88.50\t2.8617\t8\t307.0\t17.40\t391.70\t9.71\t26.70\n0.40771\t0.00\t6.200\t1\t0.5070\t6.1640\t91.30\t3.0480\t8\t307.0\t17.40\t395.24\t21.46\t21.70\n0.62356\t0.00\t6.200\t1\t0.5070\t6.8790\t77.70\t3.2721\t8\t307.0\t17.40\t390.39\t9.93\t27.50\n0.61470\t0.00\t6.200\t0\t0.5070\t6.6180\t80.80\t3.2721\t8\t307.0\t17.40\t396.90\t7.60\t30.10\n0.31533\t0.00\t6.200\t0\t0.5040\t8.2660\t78.30\t2.8944\t8\t307.0\t17.40\t385.05\t4.14\t44.80\n0.52693\t0.00\t6.200\t0\t0.5040\t8.7250\t83.00\t2.8944\t8\t307.0\t17.40\t382.00\t4.63\t50.00\n0.38214\t0.00\t6.200\t0\t0.5040\t8.0400\t86.50\t3.2157\t8\t307.0\t17.40\t387.38\t3.13\t37.60\n0.41238\t0.00\t6.200\t0\t0.5040\t7.1630\t79.90\t3.2157\t8\t307.0\t17.40\t372.08\t6.36\t31.60\n0.29819\t0.00\t6.200\t0\t0.5040\t7.6860\t17.00\t3.3751\t8\t307.0\t17.40\t377.51\t3.92\t46.70\n0.44178\t0.00\t6.200\t0\t0.5040\t6.5520\t21.40\t3.3751\t8\t307.0\t17.40\t380.34\t3.76\t31.50\n0.53700\t0.00\t6.200\t0\t0.5040\t5.9810\t68.10\t3.6715\t8\t307.0\t17.40\t378.35\t11.65\t24.30\n0.46296\t0.00\t6.200\t0\t0.5040\t7.4120\t76.90\t3.6715\t8\t307.0\t17.40\t376.14\t5.25\t31.70\n0.57529\t0.00\t6.200\t0\t0.5070\t8.3370\t73.30\t3.8384\t8\t307.0\t17.40\t385.91\t2.47\t41.70\n0.33147\t0.00\t6.200\t0\t0.5070\t8.2470\t70.40\t3.6519\t8\t307.0\t17.40\t378.95\t3.95\t48.30\n0.44791\t0.00\t6.200\t1\t0.5070\t6.7260\t66.50\t3.6519\t8\t307.0\t17.40\t360.20\t8.05\t29.00\n0.33045\t0.00\t6.200\t0\t0.5070\t6.0860\t61.50\t3.6519\t8\t307.0\t17.40\t376.75\t10.88\t24.00\n0.52058\t0.00\t6.200\t1\t0.5070\t6.6310\t76.50\t4.1480\t8\t307.0\t17.40\t388.45\t9.54\t25.10\n0.51183\t0.00\t6.200\t0\t0.5070\t7.3580\t71.60\t4.1480\t8\t307.0\t17.40\t390.07\t4.73\t31.50\n0.08244\t30.00\t4.930\t0\t0.4280\t6.4810\t18.50\t6.1899\t6\t300.0\t16.60\t379.41\t6.36\t23.70\n0.09252\t30.00\t4.930\t0\t0.4280\t6.6060\t42.20\t6.1899\t6\t300.0\t16.60\t383.78\t7.37\t23.30\n0.11329\t30.00\t4.930\t0\t0.4280\t6.8970\t54.30\t6.3361\t6\t300.0\t16.60\t391.25\t11.38\t22.00\n0.10612\t30.00\t4.930\t0\t0.4280\t6.0950\t65.10\t6.3361\t6\t300.0\t16.60\t394.62\t12.40\t20.10\n0.10290\t30.00\t4.930\t0\t0.4280\t6.3580\t52.90\t7.0355\t6\t300.0\t16.60\t372.75\t11.22\t22.20\n0.12757\t30.00\t4.930\t0\t0.4280\t6.3930\t7.80\t7.0355\t6\t300.0\t16.60\t374.71\t5.19\t23.70\n0.20608\t22.00\t5.860\t0\t0.4310\t5.5930\t76.50\t7.9549\t7\t330.0\t19.10\t372.49\t12.50\t17.60\n0.19133\t22.00\t5.860\t0\t0.4310\t5.6050\t70.20\t7.9549\t7\t330.0\t19.10\t389.13\t18.46\t18.50\n0.33983\t22.00\t5.860\t0\t0.4310\t6.1080\t34.90\t8.0555\t7\t330.0\t19.10\t390.18\t9.16\t24.30\n0.19657\t22.00\t5.860\t0\t0.4310\t6.2260\t79.20\t8.0555\t7\t330.0\t19.10\t376.14\t10.15\t20.50\n0.16439\t22.00\t5.860\t0\t0.4310\t6.4330\t49.10\t7.8265\t7\t330.0\t19.10\t374.71\t9.52\t24.50\n0.19073\t22.00\t5.860\t0\t0.4310\t6.7180\t17.50\t7.8265\t7\t330.0\t19.10\t393.74\t6.56\t26.20\n0.14030\t22.00\t5.860\t0\t0.4310\t6.4870\t13.00\t7.3967\t7\t330.0\t19.10\t396.28\t5.90\t24.40\n0.21409\t22.00\t5.860\t0\t0.4310\t6.4380\t8.90\t7.3967\t7\t330.0\t19.10\t377.07\t3.59\t24.80\n0.08221\t22.00\t5.860\t0\t0.4310\t6.9570\t6.80\t8.9067\t7\t330.0\t19.10\t386.09\t3.53\t29.60\n0.36894\t22.00\t5.860\t0\t0.4310\t8.2590\t8.40\t8.9067\t7\t330.0\t19.10\t396.90\t3.54\t42.80\n0.04819\t80.00\t3.640\t0\t0.3920\t6.1080\t32.00\t9.2203\t1\t315.0\t16.40\t392.89\t6.57\t21.90\n0.03548\t80.00\t3.640\t0\t0.3920\t5.8760\t19.10\t9.2203\t1\t315.0\t16.40\t395.18\t9.25\t20.90\n0.01538\t90.00\t3.750\t0\t0.3940\t7.4540\t34.20\t6.3361\t3\t244.0\t15.90\t386.34\t3.11\t44.00\n0.61154\t20.00\t3.970\t0\t0.6470\t8.7040\t86.90\t1.8010\t5\t264.0\t13.00\t389.70\t5.12\t50.00\n0.66351\t20.00\t3.970\t0\t0.6470\t7.3330\t100.00\t1.8946\t5\t264.0\t13.00\t383.29\t7.79\t36.00\n0.65665\t20.00\t3.970\t0\t0.6470\t6.8420\t100.00\t2.0107\t5\t264.0\t13.00\t391.93\t6.90\t30.10\n0.54011\t20.00\t3.970\t0\t0.6470\t7.2030\t81.80\t2.1121\t5\t264.0\t13.00\t392.80\t9.59\t33.80\n0.53412\t20.00\t3.970\t0\t0.6470\t7.5200\t89.40\t2.1398\t5\t264.0\t13.00\t388.37\t7.26\t43.10\n0.52014\t20.00\t3.970\t0\t0.6470\t8.3980\t91.50\t2.2885\t5\t264.0\t13.00\t386.86\t5.91\t48.80\n0.82526\t20.00\t3.970\t0\t0.6470\t7.3270\t94.50\t2.0788\t5\t264.0\t13.00\t393.42\t11.25\t31.00\n0.55007\t20.00\t3.970\t0\t0.6470\t7.2060\t91.60\t1.9301\t5\t264.0\t13.00\t387.89\t8.10\t36.50\n0.76162\t20.00\t3.970\t0\t0.6470\t5.5600\t62.80\t1.9865\t5\t264.0\t13.00\t392.40\t10.45\t22.80\n0.78570\t20.00\t3.970\t0\t0.6470\t7.0140\t84.60\t2.1329\t5\t264.0\t13.00\t384.07\t14.79\t30.70\n0.57834\t20.00\t3.970\t0\t0.5750\t8.2970\t67.00\t2.4216\t5\t264.0\t13.00\t384.54\t7.44\t50.00\n0.54050\t20.00\t3.970\t0\t0.5750\t7.4700\t52.60\t2.8720\t5\t264.0\t13.00\t390.30\t3.16\t43.50\n0.09065\t20.00\t6.960\t1\t0.4640\t5.9200\t61.50\t3.9175\t3\t223.0\t18.60\t391.34\t13.65\t20.70\n0.29916\t20.00\t6.960\t0\t0.4640\t5.8560\t42.10\t4.4290\t3\t223.0\t18.60\t388.65\t13.00\t21.10\n0.16211\t20.00\t6.960\t0\t0.4640\t6.2400\t16.30\t4.4290\t3\t223.0\t18.60\t396.90\t6.59\t25.20\n0.11460\t20.00\t6.960\t0\t0.4640\t6.5380\t58.70\t3.9175\t3\t223.0\t18.60\t394.96\t7.73\t24.40\n0.22188\t20.00\t6.960\t1\t0.4640\t7.6910\t51.80\t4.3665\t3\t223.0\t18.60\t390.77\t6.58\t35.20\n0.05644\t40.00\t6.410\t1\t0.4470\t6.7580\t32.90\t4.0776\t4\t254.0\t17.60\t396.90\t3.53\t32.40\n0.09604\t40.00\t6.410\t0\t0.4470\t6.8540\t42.80\t4.2673\t4\t254.0\t17.60\t396.90\t2.98\t32.00\n0.10469\t40.00\t6.410\t1\t0.4470\t7.2670\t49.00\t4.7872\t4\t254.0\t17.60\t389.25\t6.05\t33.20\n0.06127\t40.00\t6.410\t1\t0.4470\t6.8260\t27.60\t4.8628\t4\t254.0\t17.60\t393.45\t4.16\t33.10\n0.07978\t40.00\t6.410\t0\t0.4470\t6.4820\t32.10\t4.1403\t4\t254.0\t17.60\t396.90\t7.19\t29.10\n0.21038\t20.00\t3.330\t0\t0.4429\t6.8120\t32.20\t4.1007\t5\t216.0\t14.90\t396.90\t4.85\t35.10\n0.03578\t20.00\t3.330\t0\t0.4429\t7.8200\t64.50\t4.6947\t5\t216.0\t14.90\t387.31\t3.76\t45.40\n0.03705\t20.00\t3.330\t0\t0.4429\t6.9680\t37.20\t5.2447\t5\t216.0\t14.90\t392.23\t4.59\t35.40\n0.06129\t20.00\t3.330\t1\t0.4429\t7.6450\t49.70\t5.2119\t5\t216.0\t14.90\t377.07\t3.01\t46.00\n0.01501\t90.00\t1.210\t1\t0.4010\t7.9230\t24.80\t5.8850\t1\t198.0\t13.60\t395.52\t3.16\t50.00\n0.00906\t90.00\t2.970\t0\t0.4000\t7.0880\t20.80\t7.3073\t1\t285.0\t15.30\t394.72\t7.85\t32.20\n0.01096\t55.00\t2.250\t0\t0.3890\t6.4530\t31.90\t7.3073\t1\t300.0\t15.30\t394.72\t8.23\t22.00\n0.01965\t80.00\t1.760\t0\t0.3850\t6.2300\t31.50\t9.0892\t1\t241.0\t18.20\t341.60\t12.93\t20.10\n0.03871\t52.50\t5.320\t0\t0.4050\t6.2090\t31.30\t7.3172\t6\t293.0\t16.60\t396.90\t7.14\t23.20\n0.04590\t52.50\t5.320\t0\t0.4050\t6.3150\t45.60\t7.3172\t6\t293.0\t16.60\t396.90\t7.60\t22.30\n0.04297\t52.50\t5.320\t0\t0.4050\t6.5650\t22.90\t7.3172\t6\t293.0\t16.60\t371.72\t9.51\t24.80\n0.03502\t80.00\t4.950\t0\t0.4110\t6.8610\t27.90\t5.1167\t4\t245.0\t19.20\t396.90\t3.33\t28.50\n0.07886\t80.00\t4.950\t0\t0.4110\t7.1480\t27.70\t5.1167\t4\t245.0\t19.20\t396.90\t3.56\t37.30\n0.03615\t80.00\t4.950\t0\t0.4110\t6.6300\t23.40\t5.1167\t4\t245.0\t19.20\t396.90\t4.70\t27.90\n0.08265\t0.00\t13.920\t0\t0.4370\t6.1270\t18.40\t5.5027\t4\t289.0\t16.00\t396.90\t8.58\t23.90\n0.08199\t0.00\t13.920\t0\t0.4370\t6.0090\t42.30\t5.5027\t4\t289.0\t16.00\t396.90\t10.40\t21.70\n0.12932\t0.00\t13.920\t0\t0.4370\t6.6780\t31.10\t5.9604\t4\t289.0\t16.00\t396.90\t6.27\t28.60\n0.05372\t0.00\t13.920\t0\t0.4370\t6.5490\t51.00\t5.9604\t4\t289.0\t16.00\t392.85\t7.39\t27.10\n0.14103\t0.00\t13.920\t0\t0.4370\t5.7900\t58.00\t6.3200\t4\t289.0\t16.00\t396.90\t15.84\t20.30\n0.06466\t70.00\t2.240\t0\t0.4000\t6.3450\t20.10\t7.8278\t5\t358.0\t14.80\t368.24\t4.97\t22.50\n0.05561\t70.00\t2.240\t0\t0.4000\t7.0410\t10.00\t7.8278\t5\t358.0\t14.80\t371.58\t4.74\t29.00\n0.04417\t70.00\t2.240\t0\t0.4000\t6.8710\t47.40\t7.8278\t5\t358.0\t14.80\t390.86\t6.07\t24.80\n0.03537\t34.00\t6.090\t0\t0.4330\t6.5900\t40.40\t5.4917\t7\t329.0\t16.10\t395.75\t9.50\t22.00\n0.09266\t34.00\t6.090\t0\t0.4330\t6.4950\t18.40\t5.4917\t7\t329.0\t16.10\t383.61\t8.67\t26.40\n0.10000\t34.00\t6.090\t0\t0.4330\t6.9820\t17.70\t5.4917\t7\t329.0\t16.10\t390.43\t4.86\t33.10\n0.05515\t33.00\t2.180\t0\t0.4720\t7.2360\t41.10\t4.0220\t7\t222.0\t18.40\t393.68\t6.93\t36.10\n0.05479\t33.00\t2.180\t0\t0.4720\t6.6160\t58.10\t3.3700\t7\t222.0\t18.40\t393.36\t8.93\t28.40\n0.07503\t33.00\t2.180\t0\t0.4720\t7.4200\t71.90\t3.0992\t7\t222.0\t18.40\t396.90\t6.47\t33.40\n0.04932\t33.00\t2.180\t0\t0.4720\t6.8490\t70.30\t3.1827\t7\t222.0\t18.40\t396.90\t7.53\t28.20\n0.49298\t0.00\t9.900\t0\t0.5440\t6.6350\t82.50\t3.3175\t4\t304.0\t18.40\t396.90\t4.54\t22.80\n0.34940\t0.00\t9.900\t0\t0.5440\t5.9720\t76.70\t3.1025\t4\t304.0\t18.40\t396.24\t9.97\t20.30\n2.63548\t0.00\t9.900\t0\t0.5440\t4.9730\t37.80\t2.5194\t4\t304.0\t18.40\t350.45\t12.64\t16.10\n0.79041\t0.00\t9.900\t0\t0.5440\t6.1220\t52.80\t2.6403\t4\t304.0\t18.40\t396.90\t5.98\t22.10\n0.26169\t0.00\t9.900\t0\t0.5440\t6.0230\t90.40\t2.8340\t4\t304.0\t18.40\t396.30\t11.72\t19.40\n0.26938\t0.00\t9.900\t0\t0.5440\t6.2660\t82.80\t3.2628\t4\t304.0\t18.40\t393.39\t7.90\t21.60\n0.36920\t0.00\t9.900\t0\t0.5440\t6.5670\t87.30\t3.6023\t4\t304.0\t18.40\t395.69\t9.28\t23.80\n0.25356\t0.00\t9.900\t0\t0.5440\t5.7050\t77.70\t3.9450\t4\t304.0\t18.40\t396.42\t11.50\t16.20\n0.31827\t0.00\t9.900\t0\t0.5440\t5.9140\t83.20\t3.9986\t4\t304.0\t18.40\t390.70\t18.33\t17.80\n0.24522\t0.00\t9.900\t0\t0.5440\t5.7820\t71.70\t4.0317\t4\t304.0\t18.40\t396.90\t15.94\t19.80\n0.40202\t0.00\t9.900\t0\t0.5440\t6.3820\t67.20\t3.5325\t4\t304.0\t18.40\t395.21\t10.36\t23.10\n0.47547\t0.00\t9.900\t0\t0.5440\t6.1130\t58.80\t4.0019\t4\t304.0\t18.40\t396.23\t12.73\t21.00\n0.16760\t0.00\t7.380\t0\t0.4930\t6.4260\t52.30\t4.5404\t5\t287.0\t19.60\t396.90\t7.20\t23.80\n0.18159\t0.00\t7.380\t0\t0.4930\t6.3760\t54.30\t4.5404\t5\t287.0\t19.60\t396.90\t6.87\t23.10\n0.35114\t0.00\t7.380\t0\t0.4930\t6.0410\t49.90\t4.7211\t5\t287.0\t19.60\t396.90\t7.70\t20.40\n0.28392\t0.00\t7.380\t0\t0.4930\t5.7080\t74.30\t4.7211\t5\t287.0\t19.60\t391.13\t11.74\t18.50\n0.34109\t0.00\t7.380\t0\t0.4930\t6.4150\t40.10\t4.7211\t5\t287.0\t19.60\t396.90\t6.12\t25.00\n0.19186\t0.00\t7.380\t0\t0.4930\t6.4310\t14.70\t5.4159\t5\t287.0\t19.60\t393.68\t5.08\t24.60\n0.30347\t0.00\t7.380\t0\t0.4930\t6.3120\t28.90\t5.4159\t5\t287.0\t19.60\t396.90\t6.15\t23.00\n0.24103\t0.00\t7.380\t0\t0.4930\t6.0830\t43.70\t5.4159\t5\t287.0\t19.60\t396.90\t12.79\t22.20\n0.06617\t0.00\t3.240\t0\t0.4600\t5.8680\t25.80\t5.2146\t4\t430.0\t16.90\t382.44\t9.97\t19.30\n0.06724\t0.00\t3.240\t0\t0.4600\t6.3330\t17.20\t5.2146\t4\t430.0\t16.90\t375.21\t7.34\t22.60\n0.04544\t0.00\t3.240\t0\t0.4600\t6.1440\t32.20\t5.8736\t4\t430.0\t16.90\t368.57\t9.09\t19.80\n0.05023\t35.00\t6.060\t0\t0.4379\t5.7060\t28.40\t6.6407\t1\t304.0\t16.90\t394.02\t12.43\t17.10\n0.03466\t35.00\t6.060\t0\t0.4379\t6.0310\t23.30\t6.6407\t1\t304.0\t16.90\t362.25\t7.83\t19.40\n0.05083\t0.00\t5.190\t0\t0.5150\t6.3160\t38.10\t6.4584\t5\t224.0\t20.20\t389.71\t5.68\t22.20\n0.03738\t0.00\t5.190\t0\t0.5150\t6.3100\t38.50\t6.4584\t5\t224.0\t20.20\t389.40\t6.75\t20.70\n0.03961\t0.00\t5.190\t0\t0.5150\t6.0370\t34.50\t5.9853\t5\t224.0\t20.20\t396.90\t8.01\t21.10\n0.03427\t0.00\t5.190\t0\t0.5150\t5.8690\t46.30\t5.2311\t5\t224.0\t20.20\t396.90\t9.80\t19.50\n0.03041\t0.00\t5.190\t0\t0.5150\t5.8950\t59.60\t5.6150\t5\t224.0\t20.20\t394.81\t10.56\t18.50\n0.03306\t0.00\t5.190\t0\t0.5150\t6.0590\t37.30\t4.8122\t5\t224.0\t20.20\t396.14\t8.51\t20.60\n0.05497\t0.00\t5.190\t0\t0.5150\t5.9850\t45.40\t4.8122\t5\t224.0\t20.20\t396.90\t9.74\t19.00\n0.06151\t0.00\t5.190\t0\t0.5150\t5.9680\t58.50\t4.8122\t5\t224.0\t20.20\t396.90\t9.29\t18.70\n0.01301\t35.00\t1.520\t0\t0.4420\t7.2410\t49.30\t7.0379\t1\t284.0\t15.50\t394.74\t5.49\t32.70\n0.02498\t0.00\t1.890\t0\t0.5180\t6.5400\t59.70\t6.2669\t1\t422.0\t15.90\t389.96\t8.65\t16.50\n0.02543\t55.00\t3.780\t0\t0.4840\t6.6960\t56.40\t5.7321\t5\t370.0\t17.60\t396.90\t7.18\t23.90\n0.03049\t55.00\t3.780\t0\t0.4840\t6.8740\t28.10\t6.4654\t5\t370.0\t17.60\t387.97\t4.61\t31.20\n0.03113\t0.00\t4.390\t0\t0.4420\t6.0140\t48.50\t8.0136\t3\t352.0\t18.80\t385.64\t10.53\t17.50\n0.06162\t0.00\t4.390\t0\t0.4420\t5.8980\t52.30\t8.0136\t3\t352.0\t18.80\t364.61\t12.67\t17.20\n0.01870\t85.00\t4.150\t0\t0.4290\t6.5160\t27.70\t8.5353\t4\t351.0\t17.90\t392.43\t6.36\t23.10\n0.01501\t80.00\t2.010\t0\t0.4350\t6.6350\t29.70\t8.3440\t4\t280.0\t17.00\t390.94\t5.99\t24.50\n0.02899\t40.00\t1.250\t0\t0.4290\t6.9390\t34.50\t8.7921\t1\t335.0\t19.70\t389.85\t5.89\t26.60\n0.06211\t40.00\t1.250\t0\t0.4290\t6.4900\t44.40\t8.7921\t1\t335.0\t19.70\t396.90\t5.98\t22.90\n0.07950\t60.00\t1.690\t0\t0.4110\t6.5790\t35.90\t10.7103\t4\t411.0\t18.30\t370.78\t5.49\t24.10\n0.07244\t60.00\t1.690\t0\t0.4110\t5.8840\t18.50\t10.7103\t4\t411.0\t18.30\t392.33\t7.79\t18.60\n0.01709\t90.00\t2.020\t0\t0.4100\t6.7280\t36.10\t12.1265\t5\t187.0\t17.00\t384.46\t4.50\t30.10\n0.04301\t80.00\t1.910\t0\t0.4130\t5.6630\t21.90\t10.5857\t4\t334.0\t22.00\t382.80\t8.05\t18.20\n0.10659\t80.00\t1.910\t0\t0.4130\t5.9360\t19.50\t10.5857\t4\t334.0\t22.00\t376.04\t5.57\t20.60\n8.98296\t0.00\t18.100\t1\t0.7700\t6.2120\t97.40\t2.1222\t24\t666.0\t20.20\t377.73\t17.60\t17.80\n3.84970\t0.00\t18.100\t1\t0.7700\t6.3950\t91.00\t2.5052\t24\t666.0\t20.20\t391.34\t13.27\t21.70\n5.20177\t0.00\t18.100\t1\t0.7700\t6.1270\t83.40\t2.7227\t24\t666.0\t20.20\t395.43\t11.48\t22.70\n4.26131\t0.00\t18.100\t0\t0.7700\t6.1120\t81.30\t2.5091\t24\t666.0\t20.20\t390.74\t12.67\t22.60\n4.54192\t0.00\t18.100\t0\t0.7700\t6.3980\t88.00\t2.5182\t24\t666.0\t20.20\t374.56\t7.79\t25.00\n3.83684\t0.00\t18.100\t0\t0.7700\t6.2510\t91.10\t2.2955\t24\t666.0\t20.20\t350.65\t14.19\t19.90\n3.67822\t0.00\t18.100\t0\t0.7700\t5.3620\t96.20\t2.1036\t24\t666.0\t20.20\t380.79\t10.19\t20.80\n4.22239\t0.00\t18.100\t1\t0.7700\t5.8030\t89.00\t1.9047\t24\t666.0\t20.20\t353.04\t14.64\t16.80\n3.47428\t0.00\t18.100\t1\t0.7180\t8.7800\t82.90\t1.9047\t24\t666.0\t20.20\t354.55\t5.29\t21.90\n4.55587\t0.00\t18.100\t0\t0.7180\t3.5610\t87.90\t1.6132\t24\t666.0\t20.20\t354.70\t7.12\t27.50\n3.69695\t0.00\t18.100\t0\t0.7180\t4.9630\t91.40\t1.7523\t24\t666.0\t20.20\t316.03\t14.00\t21.90\n3.52220\t0.00\t18.100\t0\t0.6310\t3.8630\t100.00\t1.5106\t24\t666.0\t20.20\t131.42\t13.33\t23.10\n4.89822\t0.00\t18.100\t0\t0.6310\t4.9700\t100.00\t1.3325\t24\t666.0\t20.20\t375.52\t3.26\t50.00\n5.66998\t0.00\t18.100\t1\t0.6310\t6.6830\t96.80\t1.3567\t24\t666.0\t20.20\t375.33\t3.73\t50.00\n6.53876\t0.00\t18.100\t1\t0.6310\t7.0160\t97.50\t1.2024\t24\t666.0\t20.20\t392.05\t2.96\t50.00\n9.23230\t0.00\t18.100\t0\t0.6310\t6.2160\t100.00\t1.1691\t24\t666.0\t20.20\t366.15\t9.53\t50.00\n8.26725\t0.00\t18.100\t1\t0.6680\t5.8750\t89.60\t1.1296\t24\t666.0\t20.20\t347.88\t8.88\t50.00\n1.10810\t0.00\t18.100\t0\t0.6680\t4.9060\t100.00\t1.1742\t24\t666.0\t20.20\t396.90\t34.77\t13.80\n8.49820\t0.00\t18.100\t0\t0.6680\t4.1380\t100.00\t1.1370\t24\t666.0\t20.20\t396.90\t37.97\t13.80\n9.60910\t0.00\t18.100\t0\t0.6710\t7.3130\t97.90\t1.3163\t24\t666.0\t20.20\t396.90\t13.44\t15.00\n5.28800\t0.00\t18.100\t0\t0.6710\t6.6490\t93.30\t1.3449\t24\t666.0\t20.20\t363.02\t23.24\t13.90\n9.82349\t0.00\t18.100\t0\t0.6710\t6.7940\t98.80\t1.3580\t24\t666.0\t20.20\t396.90\t21.24\t13.30\n3.64820\t0.00\t18.100\t0\t0.6710\t6.3800\t96.20\t1.3861\t24\t666.0\t20.20\t396.90\t23.69\t13.10\n7.86670\t0.00\t18.100\t0\t0.6710\t6.2230\t100.00\t1.3861\t24\t666.0\t20.20\t393.74\t21.78\t10.20\n8.97620\t0.00\t18.100\t0\t0.6710\t6.9680\t91.90\t1.4165\t24\t666.0\t20.20\t396.90\t17.21\t10.40\n5.87440\t0.00\t18.100\t0\t0.6710\t6.5450\t99.10\t1.5192\t24\t666.0\t20.20\t396.90\t21.08\t10.90\n9.18702\t0.00\t18.100\t0\t0.7000\t5.5360\t100.00\t1.5804\t24\t666.0\t20.20\t396.90\t23.60\t11.30\n7.99248\t0.00\t18.100\t0\t0.7000\t5.5200\t100.00\t1.5331\t24\t666.0\t20.20\t396.90\t24.56\t12.30\n0.08490\t0.00\t18.100\t0\t0.7000\t4.3680\t91.20\t1.4395\t24\t666.0\t20.20\t285.83\t30.63\t8.80\n6.81180\t0.00\t18.100\t0\t0.7000\t5.2770\t98.10\t1.4261\t24\t666.0\t20.20\t396.90\t30.81\t7.20\n4.39380\t0.00\t18.100\t0\t0.7000\t4.6520\t100.00\t1.4672\t24\t666.0\t20.20\t396.90\t28.28\t10.50\n2.59710\t0.00\t18.100\t0\t0.7000\t5.0000\t89.50\t1.5184\t24\t666.0\t20.20\t396.90\t31.99\t7.40\n4.33370\t0.00\t18.100\t0\t0.7000\t4.8800\t100.00\t1.5895\t24\t666.0\t20.20\t372.92\t30.62\t10.20\n8.15174\t0.00\t18.100\t0\t0.7000\t5.3900\t98.90\t1.7281\t24\t666.0\t20.20\t396.90\t20.85\t11.50\n6.96215\t0.00\t18.100\t0\t0.7000\t5.7130\t97.00\t1.9265\t24\t666.0\t20.20\t394.43\t17.11\t15.10\n5.29305\t0.00\t18.100\t0\t0.7000\t6.0510\t82.50\t2.1678\t24\t666.0\t20.20\t378.38\t18.76\t23.20\n1.57790\t0.00\t18.100\t0\t0.7000\t5.0360\t97.00\t1.7700\t24\t666.0\t20.20\t396.90\t25.68\t9.70\n8.64476\t0.00\t18.100\t0\t0.6930\t6.1930\t92.60\t1.7912\t24\t666.0\t20.20\t396.90\t15.17\t13.80\n3.35980\t0.00\t18.100\t0\t0.6930\t5.8870\t94.70\t1.7821\t24\t666.0\t20.20\t396.90\t16.35\t12.70\n8.71675\t0.00\t18.100\t0\t0.6930\t6.4710\t98.80\t1.7257\t24\t666.0\t20.20\t391.98\t17.12\t13.10\n5.87205\t0.00\t18.100\t0\t0.6930\t6.4050\t96.00\t1.6768\t24\t666.0\t20.20\t396.90\t19.37\t12.50\n7.67202\t0.00\t18.100\t0\t0.6930\t5.7470\t98.90\t1.6334\t24\t666.0\t20.20\t393.10\t19.92\t8.50\n8.35180\t0.00\t18.100\t0\t0.6930\t5.4530\t100.00\t1.4896\t24\t666.0\t20.20\t396.90\t30.59\t5.00\n9.91655\t0.00\t18.100\t0\t0.6930\t5.8520\t77.80\t1.5004\t24\t666.0\t20.20\t338.16\t29.97\t6.30\n5.04610\t0.00\t18.100\t0\t0.6930\t5.9870\t100.00\t1.5888\t24\t666.0\t20.20\t396.90\t26.77\t5.60\n4.23620\t0.00\t18.100\t0\t0.6930\t6.3430\t100.00\t1.5741\t24\t666.0\t20.20\t396.90\t20.32\t7.20\n9.59571\t0.00\t18.100\t0\t0.6930\t6.4040\t100.00\t1.6390\t24\t666.0\t20.20\t376.11\t20.31\t12.10\n4.80170\t0.00\t18.100\t0\t0.6930\t5.3490\t96.00\t1.7028\t24\t666.0\t20.20\t396.90\t19.77\t8.30\n1.52920\t0.00\t18.100\t0\t0.6930\t5.5310\t85.40\t1.6074\t24\t666.0\t20.20\t329.46\t27.38\t8.50\n7.92080\t0.00\t18.100\t0\t0.6930\t5.6830\t100.00\t1.4254\t24\t666.0\t20.20\t384.97\t22.98\t5.00\n0.71620\t0.00\t18.100\t0\t0.6590\t4.1380\t100.00\t1.1781\t24\t666.0\t20.20\t370.22\t23.34\t11.90\n1.95110\t0.00\t18.100\t0\t0.6590\t5.6080\t100.00\t1.2852\t24\t666.0\t20.20\t332.09\t12.13\t27.90\n7.40389\t0.00\t18.100\t0\t0.5970\t5.6170\t97.90\t1.4547\t24\t666.0\t20.20\t314.64\t26.40\t17.20\n4.43830\t0.00\t18.100\t0\t0.5970\t6.8520\t100.00\t1.4655\t24\t666.0\t20.20\t179.36\t19.78\t27.50\n1.13580\t0.00\t18.100\t0\t0.5970\t5.7570\t100.00\t1.4130\t24\t666.0\t20.20\t2.60\t10.11\t15.00\n4.05070\t0.00\t18.100\t0\t0.5970\t6.6570\t100.00\t1.5275\t24\t666.0\t20.20\t35.05\t21.22\t17.20\n8.81100\t0.00\t18.100\t0\t0.5970\t4.6280\t100.00\t1.5539\t24\t666.0\t20.20\t28.79\t34.37\t17.90\n8.65580\t0.00\t18.100\t0\t0.5970\t5.1550\t100.00\t1.5894\t24\t666.0\t20.20\t210.97\t20.08\t16.30\n5.74610\t0.00\t18.100\t0\t0.6930\t4.5190\t100.00\t1.6582\t24\t666.0\t20.20\t88.27\t36.98\t7.00\n8.08460\t0.00\t18.100\t0\t0.6790\t6.4340\t100.00\t1.8347\t24\t666.0\t20.20\t27.25\t29.05\t7.20\n0.83420\t0.00\t18.100\t0\t0.6790\t6.7820\t90.80\t1.8195\t24\t666.0\t20.20\t21.57\t25.79\t7.50\n5.94060\t0.00\t18.100\t0\t0.6790\t5.3040\t89.10\t1.6475\t24\t666.0\t20.20\t127.36\t26.64\t10.40\n3.53410\t0.00\t18.100\t0\t0.6790\t5.9570\t100.00\t1.8026\t24\t666.0\t20.20\t16.45\t20.62\t8.80\n1.81230\t0.00\t18.100\t0\t0.7180\t6.8240\t76.50\t1.7940\t24\t666.0\t20.20\t48.45\t22.74\t8.40\n1.08740\t0.00\t18.100\t0\t0.7180\t6.4110\t100.00\t1.8589\t24\t666.0\t20.20\t318.75\t15.02\t16.70\n7.02259\t0.00\t18.100\t0\t0.7180\t6.0060\t95.30\t1.8746\t24\t666.0\t20.20\t319.98\t15.70\t14.20\n2.04820\t0.00\t18.100\t0\t0.6140\t5.6480\t87.60\t1.9512\t24\t666.0\t20.20\t291.55\t14.10\t20.80\n7.05042\t0.00\t18.100\t0\t0.6140\t6.1030\t85.10\t2.0218\t24\t666.0\t20.20\t2.52\t23.29\t13.40\n8.79212\t0.00\t18.100\t0\t0.5840\t5.5650\t70.60\t2.0635\t24\t666.0\t20.20\t3.65\t17.16\t11.70\n5.86030\t0.00\t18.100\t0\t0.6790\t5.8960\t95.40\t1.9096\t24\t666.0\t20.20\t7.68\t24.39\t8.30\n2.24720\t0.00\t18.100\t0\t0.5840\t5.8370\t59.70\t1.9976\t24\t666.0\t20.20\t24.65\t15.69\t10.20\n7.66190\t0.00\t18.100\t0\t0.6790\t6.2020\t78.70\t1.8629\t24\t666.0\t20.20\t18.82\t14.52\t10.90\n7.36711\t0.00\t18.100\t0\t0.6790\t6.1930\t78.10\t1.9356\t24\t666.0\t20.20\t96.73\t21.52\t11.00\n9.33889\t0.00\t18.100\t0\t0.6790\t6.3800\t95.60\t1.9682\t24\t666.0\t20.20\t60.72\t24.08\t9.50\n8.49213\t0.00\t18.100\t0\t0.5840\t6.3480\t86.10\t2.0527\t24\t666.0\t20.20\t83.45\t17.64\t14.50\n0.06230\t0.00\t18.100\t0\t0.5840\t6.8330\t94.30\t2.0882\t24\t666.0\t20.20\t81.33\t19.69\t14.10\n6.44405\t0.00\t18.100\t0\t0.5840\t6.4250\t74.80\t2.2004\t24\t666.0\t20.20\t97.95\t12.03\t16.10\n5.58107\t0.00\t18.100\t0\t0.7130\t6.4360\t87.90\t2.3158\t24\t666.0\t20.20\t100.19\t16.22\t14.30\n3.91340\t0.00\t18.100\t0\t0.7130\t6.2080\t95.00\t2.2222\t24\t666.0\t20.20\t100.63\t15.17\t11.70\n1.16040\t0.00\t18.100\t0\t0.7400\t6.6290\t94.60\t2.1247\t24\t666.0\t20.20\t109.85\t23.27\t13.40\n4.42080\t0.00\t18.100\t0\t0.7400\t6.4610\t93.30\t2.0026\t24\t666.0\t20.20\t27.49\t18.05\t9.60\n5.17720\t0.00\t18.100\t0\t0.7400\t6.1520\t100.00\t1.9142\t24\t666.0\t20.20\t9.32\t26.45\t8.70\n3.67810\t0.00\t18.100\t0\t0.7400\t5.9350\t87.90\t1.8206\t24\t666.0\t20.20\t68.95\t34.02\t8.40\n9.39063\t0.00\t18.100\t0\t0.7400\t5.6270\t93.90\t1.8172\t24\t666.0\t20.20\t396.90\t22.88\t12.80\n2.05110\t0.00\t18.100\t0\t0.7400\t5.8180\t92.40\t1.8662\t24\t666.0\t20.20\t391.45\t22.11\t10.50\n9.72418\t0.00\t18.100\t0\t0.7400\t6.4060\t97.20\t2.0651\t24\t666.0\t20.20\t385.96\t19.52\t17.10\n5.66637\t0.00\t18.100\t0\t0.7400\t6.2190\t100.00\t2.0048\t24\t666.0\t20.20\t395.69\t16.59\t18.40\n9.96654\t0.00\t18.100\t0\t0.7400\t6.4850\t100.00\t1.9784\t24\t666.0\t20.20\t386.73\t18.85\t15.40\n2.80230\t0.00\t18.100\t0\t0.7400\t5.8540\t96.60\t1.8956\t24\t666.0\t20.20\t240.52\t23.79\t10.80\n0.67180\t0.00\t18.100\t0\t0.7400\t6.4590\t94.80\t1.9879\t24\t666.0\t20.20\t43.06\t23.98\t11.80\n6.28807\t0.00\t18.100\t0\t0.7400\t6.3410\t96.40\t2.0720\t24\t666.0\t20.20\t318.01\t17.79\t14.90\n9.92485\t0.00\t18.100\t0\t0.7400\t6.2510\t96.60\t2.1980\t24\t666.0\t20.20\t388.52\t16.44\t12.60\n9.32909\t0.00\t18.100\t0\t0.7130\t6.1850\t98.70\t2.2616\t24\t666.0\t20.20\t396.90\t18.13\t14.10\n7.52601\t0.00\t18.100\t0\t0.7130\t6.4170\t98.30\t2.1850\t24\t666.0\t20.20\t304.21\t19.31\t13.00\n6.71772\t0.00\t18.100\t0\t0.7130\t6.7490\t92.60\t2.3236\t24\t666.0\t20.20\t0.32\t17.44\t13.40\n5.44114\t0.00\t18.100\t0\t0.7130\t6.6550\t98.20\t2.3552\t24\t666.0\t20.20\t355.29\t17.73\t15.20\n5.09017\t0.00\t18.100\t0\t0.7130\t6.2970\t91.80\t2.3682\t24\t666.0\t20.20\t385.09\t17.27\t16.10\n8.24809\t0.00\t18.100\t0\t0.7130\t7.3930\t99.30\t2.4527\t24\t666.0\t20.20\t375.87\t16.74\t17.80\n9.51363\t0.00\t18.100\t0\t0.7130\t6.7280\t94.10\t2.4961\t24\t666.0\t20.20\t6.68\t18.71\t14.90\n4.75237\t0.00\t18.100\t0\t0.7130\t6.5250\t86.50\t2.4358\t24\t666.0\t20.20\t50.92\t18.13\t14.10\n4.66883\t0.00\t18.100\t0\t0.7130\t5.9760\t87.90\t2.5806\t24\t666.0\t20.20\t10.48\t19.01\t12.70\n8.20058\t0.00\t18.100\t0\t0.7130\t5.9360\t80.30\t2.7792\t24\t666.0\t20.20\t3.50\t16.94\t13.50\n7.75223\t0.00\t18.100\t0\t0.7130\t6.3010\t83.70\t2.7831\t24\t666.0\t20.20\t272.21\t16.23\t14.90\n6.80117\t0.00\t18.100\t0\t0.7130\t6.0810\t84.40\t2.7175\t24\t666.0\t20.20\t396.90\t14.70\t20.00\n4.81213\t0.00\t18.100\t0\t0.7130\t6.7010\t90.00\t2.5975\t24\t666.0\t20.20\t255.23\t16.42\t16.40\n3.69311\t0.00\t18.100\t0\t0.7130\t6.3760\t88.40\t2.5671\t24\t666.0\t20.20\t391.43\t14.65\t17.70\n6.65492\t0.00\t18.100\t0\t0.7130\t6.3170\t83.00\t2.7344\t24\t666.0\t20.20\t396.90\t13.99\t19.50\n5.82115\t0.00\t18.100\t0\t0.7130\t6.5130\t89.90\t2.8016\t24\t666.0\t20.20\t393.82\t10.29\t20.20\n7.83932\t0.00\t18.100\t0\t0.6550\t6.2090\t65.40\t2.9634\t24\t666.0\t20.20\t396.90\t13.22\t21.40\n3.16360\t0.00\t18.100\t0\t0.6550\t5.7590\t48.20\t3.0665\t24\t666.0\t20.20\t334.40\t14.13\t19.90\n3.77498\t0.00\t18.100\t0\t0.6550\t5.9520\t84.70\t2.8715\t24\t666.0\t20.20\t22.01\t17.15\t19.00\n4.42228\t0.00\t18.100\t0\t0.5840\t6.0030\t94.50\t2.5403\t24\t666.0\t20.20\t331.29\t21.32\t19.10\n5.57570\t0.00\t18.100\t0\t0.5800\t5.9260\t71.00\t2.9084\t24\t666.0\t20.20\t368.74\t18.13\t19.10\n3.07510\t0.00\t18.100\t0\t0.5800\t5.7130\t56.70\t2.8237\t24\t666.0\t20.20\t396.90\t14.76\t20.10\n4.34879\t0.00\t18.100\t0\t0.5800\t6.1670\t84.00\t3.0334\t24\t666.0\t20.20\t396.90\t16.29\t19.90\n4.03841\t0.00\t18.100\t0\t0.5320\t6.2290\t90.70\t3.0993\t24\t666.0\t20.20\t395.33\t12.87\t19.60\n3.56868\t0.00\t18.100\t0\t0.5800\t6.4370\t75.00\t2.8965\t24\t666.0\t20.20\t393.37\t14.36\t23.20\n4.64689\t0.00\t18.100\t0\t0.6140\t6.9800\t67.60\t2.5329\t24\t666.0\t20.20\t374.68\t11.66\t29.80\n8.05579\t0.00\t18.100\t0\t0.5840\t5.4270\t95.40\t2.4298\t24\t666.0\t20.20\t352.58\t18.14\t13.80\n6.39312\t0.00\t18.100\t0\t0.5840\t6.1620\t97.40\t2.2060\t24\t666.0\t20.20\t302.76\t24.10\t13.30\n4.87141\t0.00\t18.100\t0\t0.6140\t6.4840\t93.60\t2.3053\t24\t666.0\t20.20\t396.21\t18.68\t16.70\n5.02340\t0.00\t18.100\t0\t0.6140\t5.3040\t97.30\t2.1007\t24\t666.0\t20.20\t349.48\t24.91\t12.00\n0.23300\t0.00\t18.100\t0\t0.6140\t6.1850\t96.70\t2.1705\t24\t666.0\t20.20\t379.70\t18.03\t14.60\n4.33370\t0.00\t18.100\t0\t0.6140\t6.2290\t88.00\t1.9512\t24\t666.0\t20.20\t383.32\t13.11\t21.40\n5.82401\t0.00\t18.100\t0\t0.5320\t6.2420\t64.70\t3.4242\t24\t666.0\t20.20\t396.90\t10.74\t23.00\n5.70818\t0.00\t18.100\t0\t0.5320\t6.7500\t74.90\t3.3317\t24\t666.0\t20.20\t393.07\t7.74\t23.70\n5.73116\t0.00\t18.100\t0\t0.5320\t7.0610\t77.00\t3.4106\t24\t666.0\t20.20\t395.28\t7.01\t25.00\n2.81838\t0.00\t18.100\t0\t0.5320\t5.7620\t40.30\t4.0983\t24\t666.0\t20.20\t392.92\t10.42\t21.80\n2.37857\t0.00\t18.100\t0\t0.5830\t5.8710\t41.90\t3.7240\t24\t666.0\t20.20\t370.73\t13.34\t20.60\n3.67367\t0.00\t18.100\t0\t0.5830\t6.3120\t51.90\t3.9917\t24\t666.0\t20.20\t388.62\t10.58\t21.20\n5.69175\t0.00\t18.100\t0\t0.5830\t6.1140\t79.80\t3.5459\t24\t666.0\t20.20\t392.68\t14.98\t19.10\n4.83567\t0.00\t18.100\t0\t0.5830\t5.9050\t53.20\t3.1523\t24\t666.0\t20.20\t388.22\t11.45\t20.60\n0.15086\t0.00\t27.740\t0\t0.6090\t5.4540\t92.70\t1.8209\t4\t711.0\t20.10\t395.09\t18.06\t15.20\n0.18337\t0.00\t27.740\t0\t0.6090\t5.4140\t98.30\t1.7554\t4\t711.0\t20.10\t344.05\t23.97\t7.00\n0.20746\t0.00\t27.740\t0\t0.6090\t5.0930\t98.00\t1.8226\t4\t711.0\t20.10\t318.43\t29.68\t8.10\n0.10574\t0.00\t27.740\t0\t0.6090\t5.9830\t98.80\t1.8681\t4\t711.0\t20.10\t390.11\t18.07\t13.60\n0.11132\t0.00\t27.740\t0\t0.6090\t5.9830\t83.50\t2.1099\t4\t711.0\t20.10\t396.90\t13.35\t20.10\n0.17331\t0.00\t9.690\t0\t0.5850\t5.7070\t54.00\t2.3817\t6\t391.0\t19.20\t396.90\t12.01\t21.80\n0.27957\t0.00\t9.690\t0\t0.5850\t5.9260\t42.60\t2.3817\t6\t391.0\t19.20\t396.90\t13.59\t24.50\n0.17899\t0.00\t9.690\t0\t0.5850\t5.6700\t28.80\t2.7986\t6\t391.0\t19.20\t393.29\t17.60\t23.10\n0.28960\t0.00\t9.690\t0\t0.5850\t5.3900\t72.90\t2.7986\t6\t391.0\t19.20\t396.90\t21.14\t19.70\n0.26838\t0.00\t9.690\t0\t0.5850\t5.7940\t70.60\t2.8927\t6\t391.0\t19.20\t396.90\t14.10\t18.30\n0.23912\t0.00\t9.690\t0\t0.5850\t6.0190\t65.30\t2.4091\t6\t391.0\t19.20\t396.90\t12.92\t21.20\n0.17783\t0.00\t9.690\t0\t0.5850\t5.5690\t73.50\t2.3999\t6\t391.0\t19.20\t395.77\t15.10\t17.50\n0.22438\t0.00\t9.690\t0\t0.5850\t6.0270\t79.70\t2.4982\t6\t391.0\t19.20\t396.90\t14.33\t16.80\n0.06263\t0.00\t11.930\t0\t0.5730\t6.5930\t69.10\t2.4786\t1\t273.0\t21.00\t391.99\t9.67\t22.40\n0.04527\t0.00\t11.930\t0\t0.5730\t6.1200\t76.70\t2.2875\t1\t273.0\t21.00\t396.90\t9.08\t20.60\n0.06076\t0.00\t11.930\t0\t0.5730\t6.9760\t91.00\t2.1675\t1\t273.0\t21.00\t396.90\t5.64\t23.90\n0.10959\t0.00\t11.930\t0\t0.5730\t6.7940\t89.30\t2.3889\t1\t273.0\t21.00\t393.45\t6.48\t22.00\n0.04741\t0.00\t11.930\t0\t0.5730\t6.0300\t80.80\t2.5050\t1\t273.0\t21.00\t396.90\t7.88\t11.90"
  },
  {
    "path": "docs/input/index.fsx",
    "content": "﻿(*** hide ***)\n#r \"../../src/Hype/bin/Debug/DiffSharp.dll\"\n#r \"../../src/Hype/bin/Debug/Hype.dll\"\nopen DiffSharp.AD.Float32\n\n(**\nHype: Compositional Machine Learning and Hyperparameter Optimization\n====================================================================\n\nHype is a proof-of-concept deep learning library, where you can perform optimization on [compositional](http://mathworld.wolfram.com/Composition.html) machine learning systems of many components, even when such components themselves internally perform optimization. \n\nThis is enabled by nested automatic differentiation (AD) giving you access to the automatic exact derivative of any floating-point value in your code with respect to any other. Underlying computations are run by a BLAS/LAPACK backend (OpenBLAS by default).\n\n### Automatic derivatives\n\nYou do not need to worry about supplying gradients (or Hessians) of your models, which are computed exactly and efficiently by AD. The underlying AD functionality is provided by [DiffSharp](http://diffsharp.github.io/DiffSharp/index.html). \n\n\"Reverse mode\" AD is a generalized form of \"backpropagation\" and is distinct from numerical or symbolic differentiation.\n\nIn addition to reverse AD, Hype makes use of forward AD and nested combinations of forward and reverse AD. The core [differentiation API](http://diffsharp.github.io/DiffSharp/api-overview.html) provides gradients, Hessians, Jacobians, directional derivatives, and matrix-free exact Hessian- and Jacobian-vector products.\n\n### Hypergradients\n\nYou can get exact gradients of the training or validation loss with respect to hyperparameters. These __hypergradients__ allow you to do gradient-based optimization of gradient-based optimization, meaning that you can do things like optimizing learning rate and momentum schedules, weight initialization parameters, or step sizes and mass matrices in Hamiltonian Monte Carlo models. (A recent article doing this with Python: _Maclaurin, Dougal, David Duvenaud, and Ryan P. Adams. \"Gradient-based Hyperparameter Optimization through Reversible Learning.\" arXiv preprint arXiv:1502.03492 (2015)._)\n\n*)\n\nopen Hype\nopen Hype.Neural\n\n// Train a network with stochastic gradient descent and a learning rate schedule\nlet train (x:DV) = \n    let n = FeedForward()\n    n.Add(Linear(784, 300))\n    n.Add(tanh)\n    n.Add(Linear(300, 10))\n    let loss, _ = Layer.Train(n, data, {Params.Default with \n                                        LearningRate = Schedule x\n                                        Momentum = Momentum.DefaultNesterov\n                                        Batch = Minibatch 100\n                                        Loss = CrossEntropyOnLinear})\n    loss // Return the loss at the end of training\n\n// Train the training, i.e., optimize the learning schedule vector by using its hypergradient\nlet hypertrain = \n    Optimize.Minimize(train, DV.create 200 (D 1.f), {Params.Default with Epochs = 50})\n\n(**\n\nYou can also take derivatives with respect to training data, to analyze training sensitivities.\n\n### Compositionality\n\nNested AD handles higher-order derivatives up to any level, including in complex cases such as \n\n$$$\n    \\mathbf{min} \\left(x \\; \\mapsto \\; f(x) + \\mathbf{min} \\left( y \\; \\mapsto \\; g(x,\\,y) \\right) \\right)\\, ,\n\nwhere $\\mathbf{min}$ uses gradient-based optimization. (Note that the inner function has a reference to the argument of the outer function.) This allows you to create complex systems where many components may internally perform optimization.\n\nFor example, you can optimize the rules of a multi-player game where the players themselves optimize their own strategy using a simple model of the opponent which they optimize according to their opponent's observed behaviour. \n\nOr you can perform optimization of procedures that are internally using differentiation for purposes other than optimization, such as adaptive control or simulations.\n\n### Complex objective functions\n\nYou can use derivatives in the definition of objective functions for training your models. For example, your objective function can take input sensitivities into account, for training models that are invariant under a set of input transformations.\n\nRoadmap\n-------\n\n<div class=\"row\">\n<div class=\"col-sm-6\">\n<div class=\"alert alert-info\">\n  <strong>In the current release</strong> \n\n* OpenBLAS backend by default\n* Regression, feedforward neural networks\n* Recurrent neural networks, LSTMs, GRUs\n* Hamiltonian Monte Carlo\n</div>\n</div>\n\n<div class=\"col-sm-6\">\n<div class=\"alert alert-info\">\n  <strong>Upcoming features</strong> \n\n* GPU/CUDA backend\n* Probabilistic inference\n* Convolutional neural networks\n</div>\n</div>\n</div>\n\nAbout\n-----\n\nHype is developed by [Atılım Güneş Baydin](http://www.cs.nuim.ie/~gunes/) and [Barak A. Pearlmutter](http://bcl.hamilton.ie/~barak/) at the [Brain and Computation Lab](http://www.bcl.hamilton.ie/), Hamilton Institute, National University of Ireland Maynooth.\n\nLicense\n-------\n\nHype is released under the MIT license.\n*)\n\n"
  },
  {
    "path": "docs/input/templates/docpage.cshtml",
    "content": "﻿@{\n  Layout = \"template\";\n  Title = Properties[\"page-title\"];\n  Description = Properties[\"project-summary\"];\n}\n@Properties[\"document\"]\n@Properties[\"tooltips\"]"
  },
  {
    "path": "docs/input/templates/reference/module.cshtml",
    "content": "@using FSharp.MetadataFormat\n@{\n  Layout = \"template\";\n  Title = Model.Module.Name + \" - \" + Properties[\"project-name\"];\n}\n\n@{\n  // Get all the members & comment for the type\n  var members = (IEnumerable<Member>)Model.Module.AllMembers;\n  var comment = (Comment)Model.Module.Comment;\n\n  // Group all members by their category which is an inline annotation\n  // that can be added to members using special XML comment:\n  //\n  //     /// [category:Something]\n  //\n  // ...and can be used to categorize members in large modules or types\n  // (but if this is not used, then all members end up in just one category)\n  var byCategory = members\n    .GroupBy(m => m.Category)\n    .OrderBy(g => String.IsNullOrEmpty(g.Key) ? \"ZZZ\" : g.Key)\n    .Select((g, n) => new { \n      Index = n, \n      GroupKey = g.Key, \n      Members = g.OrderBy(m => m.Name), \n      Name = String.IsNullOrEmpty(g.Key) ? \"Other module members\" : g.Key\n    });\n\n  // Get nested modules and nested types as statically typed collections\n  var nestModules = (IEnumerable<Module>)Model.Module.NestedModules;\n  var nestTypes = (IEnumerable<FSharp.MetadataFormat.Type>)Model.Module.NestedTypes;\n}\n\n<h1>@Model.Module.Name</h1>\n<div class=\"xmldoc\">\n  @foreach (var sec in comment.Sections) {\n    // XML comment for the type has multiple sections that can be labelled\n    // with categories (to give comment for an individual category). Here, \n    // we print only those that belong to the <default> section.\n    if (!byCategory.Any(g => g.GroupKey == sec.Key))\n    {\n      if (sec.Key != \"<default>\")        {\n        <h2>@sec.Key</h2>\n      }\n      @sec.Value  \n    }\n  }\n</div>\n@if (byCategory.Count() > 1)\n{\n  <!-- If there is more than 1 category in the type, generate TOC -->\n  <h2>Table of contents</h2>\n  <ul>\n    @foreach (var g in byCategory)\n    {\n      <li><a href=\"@(\"#section\" + g.Index.ToString())\">@g.Name</a></li>            \n    }\n  </ul>\n}\n\n<!-- Render nested types and modules, if there are any -->\n@if (nestTypes.Count() + nestModules.Count() > 0)\n{\n  <h2>Nested types and modules</h2>\n  <div>\n    @RenderPart(\"part-nested\", new {\n      Types = nestTypes,\n      Modules = nestModules\n    })\n  </div>\n}\n\n@foreach (var g in byCategory)\n{\n  // Iterate over all the categories and print members. If there are more than one\n  // categories, print the category heading (as <h2>) and add XML comment from the type\n  // that is related to this specific category.\n  if (byCategory.Count() > 1)\n  {\n    <h2>@g.Name<a name=\"@(\"section\" + g.Index.ToString())\">&#160;</a></h2>    \n    var info = comment.Sections.FirstOrDefault(kvp => kvp.Key == g.GroupKey);\n    if (info.Key != null)\n    {\n      <div class=\"xmldoc\">\n        @info.Value\n      </div>\n    }\n  }\n\n  @RenderPart(\"part-members\", new { \n      Header = \"Functions and values\",\n      TableHeader = \"Function or value\",\n      Members = g.Members.Where(m => m.Kind == MemberKind.ValueOrFunction)\n  })\n\n  @RenderPart(\"part-members\", new { \n      Header = \"Type extensions\",\n      TableHeader = \"Type extension\",\n      Members = g.Members.Where(m => m.Kind == MemberKind.TypeExtension)\n  })\n\n  @RenderPart(\"part-members\", new { \n      Header = \"Active patterns\",\n      TableHeader = \"Active pattern\",\n      Members = g.Members.Where(m => m.Kind == MemberKind.ActivePattern)\n  })\n}"
  },
  {
    "path": "docs/input/templates/reference/namespaces.cshtml",
    "content": "@using FSharp.MetadataFormat\n@{\n  Layout = \"template\";\n  Title = \"Namespaces - \" + Properties[\"project-name\"];\n}\n\n<h1>@Model.Name</h1>\n\n@{ var nsIndex = 0; }\n@foreach (var ns in Model.Namespaces)\n{\n  nsIndex++;\n  var typedNs = (Namespace)ns;\n  var allCategories =\n    typedNs.Types.Select(t => t.Category)\n      .Concat(typedNs.Modules.Select(m => m.Category))\n      .Distinct()\n      .OrderBy(s => String.IsNullOrEmpty(s) ? \"ZZZ\" : s);\n  var allByCategory = \n    allCategories\n      .Select((c, i) => new {\n        Name = String.IsNullOrEmpty(c) ? \"Other namespace members\" : c,\n        Index = String.Format(\"{0}_{1}\", nsIndex, i),\n        Types = typedNs.Types.Where(t => t.Category == c).ToArray(),\n        Modules = typedNs.Modules.Where(m => m.Category == c).ToArray() })\n      .Where(c => c.Types.Length + c.Modules.Length > 0).ToArray();\n\n  <h2>@ns.Name Namespace</h2>\n  if (allByCategory.Length > 1)\n  {\n    <!-- If there is more than 1 category in the type, generate TOC -->\n    <ul>\n      @foreach (var g in allByCategory)\n      {\n        <li><a href=\"@(\"#section\" + g.Index.ToString())\">@g.Name</a></li>            \n      }\n    </ul>\n  }\n  foreach(var g in allByCategory) \n  {\n    if (allByCategory.Length > 1)\n    {\n      <h3><a class=\"anchor\" name=\"@(\"section\" + g.Index)\" href=\"#@(\"section\" + g.Index)\">@g.Name</a></h3>    \n    }\n    <div>\n    @RenderPart(\"part-nested\", new\n    {\n      Types = g.Types,\n      Modules = g.Modules\n    })\n    </div>\n  }\n}\n"
  },
  {
    "path": "docs/input/templates/reference/part-members.cshtml",
    "content": "@if (Enumerable.Count(Model.Members) > 0) {\n  <h3>@Model.Header</h3>\n  <table class=\"table table-bordered member-list\" style=\"border-color:#2f2f2f\">\n    <thead>\n      <tr><td style=\"border-color:#2f2f2f\">@Model.TableHeader</td><td style=\"border-color:#2f2f2f\">Description</td></tr>\n    </thead>\n    <tbody>\n    @foreach (var it in Model.Members)\n    {\n      <tr>\n        <td class=\"member-name\" style=\"border-color:#2f2f2f\">\n          @{ var id = Html.UniqueID().ToString(); }\n          <code onmouseout=\"hideTip(event, '@id', @id)\" onmouseover=\"showTip(event, '@id', @id)\">\n            @Html.Encode(it.Details.FormatUsage(40))\n          </code>\n          <div class=\"tip\" id=\"@id\">\n            <strong>Signature:</strong> @Html.Encode(it.Details.Signature)<br />\n            @if (!it.Details.Modifiers.IsEmpty) {\n              <strong>Modifiers:</strong> @it.Details.FormatModifiers<br />\n            }\n            @if (!it.Details.TypeArguments.IsEmpty) {\n              <strong>Type parameters:</strong> @it.Details.FormatTypeArguments\n            }\n          </div>\n        </td>\n        <td class=\"xmldoc\" style=\"border-color:#2f2f2f\">\n          @if (!String.IsNullOrEmpty(it.Details.FormatSourceLocation))\n          {\n            <a href=\"@it.Details.FormatSourceLocation\" class=\"github-link\">\n              <img src=\"../content/img/github.png\" class=\"normal\" />\n              <img src=\"../content/img/github-blue.png\" class=\"hover\" />\n            </a>\n          }\n          @it.Comment.FullText\n        </td>\n      </tr>\n    }\n    </tbody>\n  </table>\n}"
  },
  {
    "path": "docs/input/templates/reference/part-nested.cshtml",
    "content": "@if (Enumerable.Count(Model.Types) > 0) {\n  <table class=\"table table-bordered type-list\" style=\"border-color:#2f2f2f\">\n    <thead>\n      <tr><td style=\"border-color:#2f2f2f\">Type</td><td style=\"border-color:#2f2f2f\">Description</td></tr>\n    </thead>\n    <tbody>\n      @foreach (var it in Model.Types)\n      {\n        <tr>\n          <td class=\"type-name\" style=\"border-color:#2f2f2f\">\n            <a href=\"@(it.UrlName).html\">@it.Name</a>\n          </td>\n          <td class=\"xmldoc\" style=\"border-color:#2f2f2f\">@it.Comment.Blurb</td>\n        </tr>\n      }\n    </tbody>\n  </table>\n}\n@if (Enumerable.Count(Model.Modules) > 0) {\n  <table class=\"table table-bordered module-list\" style=\"border-color:#2f2f2f\">\n    <thead>\n      <tr><td style=\"border-color:#2f2f2f\">Module</td><td style=\"border-color:#2f2f2f\">Description</td></tr>\n    </thead>\n    <tbody>\n      @foreach (var it in Model.Modules)\n      {\n        <tr>\n          <td class=\"module-name\" style=\"border-color:#2f2f2f\">\n            <a href=\"@(it.UrlName).html\">@it.Name</a>\n          </td>\n          <td class=\"xmldoc\" style=\"border-color:#2f2f2f\">@it.Comment.Blurb</td>\n        </tr>\n      }\n    </tbody>\n  </table>\n}"
  },
  {
    "path": "docs/input/templates/reference/type.cshtml",
    "content": "@using FSharp.MetadataFormat\n@{\n  Layout = \"template\";\n  Title = Model.Type.Name + \" - \" + Properties[\"project-name\"];\n}\n\n@{\n  // Get all the members & comment for the type\n  var members = (IEnumerable<Member>)Model.Type.AllMembers;\n  var comment = (Comment)Model.Type.Comment;\n  \n  // Group all members by their category which is an inline annotation\n  // that can be added to members using special XML comment:\n  //\n  //     /// [category:Something]\n  //\n  // ...and can be used to categorize members in large modules or types\n  // (but if this is not used, then all members end up in just one category)\n  var byCategory = members\n    .GroupBy(m => m.Category)\n    .OrderBy(g => String.IsNullOrEmpty(g.Key) ? \"ZZZ\" : g.Key)\n    .Select((g, n) => new { \n      Index = n, \n      GroupKey = g.Key, \n      Members = g.OrderBy(m => m.Kind == MemberKind.StaticParameter ? \"\" : m.Name), \n      Name = String.IsNullOrEmpty(g.Key) ? \"Other type members\" : g.Key \n    });\n}\n\n<h1>@Model.Type.Name</h1>\n<div class=\"xmldoc\">\n  @foreach (var sec in comment.Sections) {\n    // XML comment for the type has multiple sections that can be labelled\n    // with categories (to give comment for an individual category). Here, \n    // we print only those that belong to the <default> section.\n    if (!byCategory.Any(g => g.GroupKey == sec.Key)) {\n      if (sec.Key != \"<default>\") {\n        <h2>@sec.Key</h2>\n      }\n      @sec.Value\n    }\n  }\n</div>\n@if (byCategory.Count() > 1)\n{\n  <!-- If there is more than 1 category in the type, generate TOC -->\n  <h2>Table of contents</h2>\n  <ul>\n    @foreach (var g in byCategory)\n    {\n      <li><a href=\"@(\"#section\" + g.Index.ToString())\">@g.Name</a></li>\n    }\n  </ul>\n}\n@foreach (var g in byCategory) {\n  // Iterate over all the categories and print members. If there are more than one\n  // categories, print the category heading (as <h2>) and add XML comment from the type\n  // that is related to this specific category.\n  if (byCategory.Count() > 1) {\n    <h2>@g.Name<a name=\"@(\"section\" + g.Index.ToString())\">&#160;</a></h2>\n    var info = comment.Sections.FirstOrDefault(kvp => kvp.Key == g.GroupKey);\n    if (info.Key != null) {\n      <div class=\"xmldoc\">\n        @info.Value\n      </div>\n    }\n  }\n\n  @RenderPart(\"part-members\", new {\n    Header = \"Union Cases\",\n    TableHeader = \"Union Case\",\n    Members = g.Members.Where(m => m.Kind == MemberKind.UnionCase)\n  })\n\n  @RenderPart(\"part-members\", new {\n    Header = \"Record Fields\",\n    TableHeader = \"Record Field\",\n    Members = g.Members.Where(m => m.Kind == MemberKind.RecordField)\n  })\n        \n  @RenderPart(\"part-members\", new {\n    Header = \"Static parameters\",\n    TableHeader = \"Static parameters\",\n    Members = g.Members.Where(m => m.Kind == MemberKind.StaticParameter)\n  })\n\n  @RenderPart(\"part-members\", new {\n    Header = \"Constructors\",\n    TableHeader = \"Constructor\",\n    Members = g.Members.Where(m => m.Kind == MemberKind.Constructor)\n  })\n\n  @RenderPart(\"part-members\", new {\n    Header = \"Instance members\",\n    TableHeader = \"Instance member\",\n    Members = g.Members.Where(m => m.Kind == MemberKind.InstanceMember)\n  })\n\n  @RenderPart(\"part-members\", new {\n    Header = \"Static members\",\n    TableHeader = \"Static member\",\n    Members = g.Members.Where(m => m.Kind == MemberKind.StaticMember)\n  })\n}\n"
  },
  {
    "path": "docs/input/templates/template.cshtml",
    "content": "﻿<!DOCTYPE html>\n<html lang=\"en\">\n  <head>\n    <meta charset=\"utf-8\">\n    <title>@Title</title>\n    <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n    <meta name=\"description\" content=\"Hype\">\n    <meta name=\"author\" content=\"Atılım Güneş Baydin; Barak A. Pearlmutter\">\n\n    <link rel=\"stylesheet\" href=\"http://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css\">\n    <script src=\"https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js\"></script>\n    <script src=\"http://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js\"></script>\n    <script type=\"text/javascript\" src=\"http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML\"></script>\n\n    <script type=\"text/javascript\" src=\"https://www.google.com/jsapi\"></script>\n    <script type=\"text/javascript\">\n        google.load(\"visualization\", \"1.1\", { packages: [\"corechart\", \"annotationchart\", \"calendar\", \"linechart\", \"geochart\", \"map\", \"sankey\", \"table\", \"treemap\"] })\n    </script>\n\n    <link type=\"text/css\" rel=\"stylesheet\" href=\"../misc/style.css\" />\n    <script type=\"text/javascript\" src=\"../misc/tips.js\"></script>\n    <!-- HTML5 shim, for IE6-8 support of HTML5 elements -->\n    <!--[if lt IE 9]>\n      <script src=\"http://html5shim.googlecode.com/svn/trunk/html5.js\"></script>\n    <![endif]-->\n    <script>\n      (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){\n      (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),\n      m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)\n      })(window,document,'script','//www.google-analytics.com/analytics.js','ga');\n\n      ga('create', 'UA-48900508-5', 'auto');\n      ga('send', 'pageview');\n\n    </script>    \n  </head>\n  <body>\n    <nav class=\"navbar navbar-inverse\">\n      <div class=\"container\">\n        <div class=\"navbar-header\">\n          <a class=\"navbar-brand\" href=\"../index.html\"><span class=\"hype\">Hype</span></a>\n        </div>\n        <div>\n          <ul class=\"nav navbar-nav\">\n            <li><a href=\"https://github.com/hypelib/Hype\">GitHub</a></li>\n            <li><a href=\"https://github.com/hypelib/Hype/releases\">Release notes</a></li>\n            <li><a href=\"index.html\">API reference</a></li>\n            <li><a href=\"../download.html\">Download</a></li>\n          </ul>\n        </div>\n      </div>\n    </nav>\n    <div class=\"container\">\n      <div class=\"row\">\n        <div class=\"col-sm-9\">\n          @RenderBody()\n        </div>\n        <div class=\"col-sm-3\">\n          <a href=\"../index.html\"><img src=\"../img/hype.png\" style=\"width:200px\"/></a><br>\n          <ul class=\"nav nav-pills nav-stacked\">\n            <li><a href=\"../index.html\">Home</a></li>\n          </ul>                \n          <div class=\"nav-label\">Basics</div>\n          <ul class=\"nav nav-pills nav-stacked\">\n            <li><a href=\"../optimization.html\">Optimization</a></li>\n            <li><a href=\"../training.html\">Training</a></li>\n          </ul>\n          <div class=\"nav-label\">Examples</div>\n          <ul class=\"nav nav-pills nav-stacked\">\n            <li><a href=\"../regression.html\">Regression</a></li>\n            <li><a href=\"../feedforwardnets.html\">Neural Networks</a></li>\n            <li><a href=\"../recurrentnets.html\">Recurrent Neural Nets</a></li>\n            <li><a href=\"../hmc.html\">Hamiltonian MCMC</a></li>\n          </ul>\n        </div>\n      </div>\n    </div>\n  <!-- Start of StatCounter Code for Default Guide -->\n  <script type=\"text/javascript\">\n  var sc_project=10701961; \n  var sc_invisible=1; \n  var sc_security=\"a67091ad\"; \n  var scJsHost = ((\"https:\" == document.location.protocol) ?\n  \"https://secure.\" : \"http://www.\");\n  document.write(\"<sc\"+\"ript type='text/javascript' src='\" +\n  scJsHost+\n  \"statcounter.com/counter/counter.js'></\"+\"script>\");\n  </script>\n  <noscript><div class=\"statcounter\"><a title=\"shopify\n  analytics ecommerce\" href=\"http://statcounter.com/shopify/\"\n  target=\"_blank\"><img class=\"statcounter\"\n  src=\"http://c.statcounter.com/10701961/0/a67091ad/1/\"\n  alt=\"shopify analytics ecommerce\"></a></div></noscript>\n  <!-- End of StatCounter Code for Default Guide -->        \n  </body>  \n</html>"
  },
  {
    "path": "docs/input/templates/template.html",
    "content": "<!DOCTYPE html>\n<html lang=\"en\">\n  <head>\n    <meta charset=\"utf-8\">\n    <title>{page-title}</title>\n    <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n    <meta name=\"description\" content=\"Hype\">\n    <meta name=\"author\" content=\"Atılım Güneş Baydin; Barak A. Pearlmutter\">\n\n    <link rel=\"stylesheet\" href=\"http://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css\">\n    <script src=\"https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js\"></script>\n    <script src=\"http://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js\"></script>\n    <script type=\"text/javascript\" src=\"http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML\"></script>\n\n    <script type=\"text/javascript\" src=\"https://www.google.com/jsapi\"></script>\n    <script type=\"text/javascript\">\n        google.load(\"visualization\", \"1.1\", { packages: [\"corechart\", \"annotationchart\", \"calendar\", \"linechart\", \"geochart\", \"map\", \"sankey\", \"table\", \"treemap\"] })\n    </script>\n\n    <link type=\"text/css\" rel=\"stylesheet\" href=\"misc/style.css\" />\n    <script type=\"text/javascript\" src=\"misc/tips.js\"></script>\n    <!-- HTML5 shim, for IE6-8 support of HTML5 elements -->\n    <!--[if lt IE 9]>\n      <script src=\"http://html5shim.googlecode.com/svn/trunk/html5.js\"></script>\n    <![endif]-->\n    <script>\n      (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){\n      (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),\n      m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)\n      })(window,document,'script','//www.google-analytics.com/analytics.js','ga');\n\n      ga('create', 'UA-48900508-5', 'auto');\n      ga('send', 'pageview');\n\n    </script>    \n  </head>\n  <body>\n    <nav class=\"navbar navbar-inverse\">\n      <div class=\"container\">\n        <div class=\"navbar-header\">\n          <a class=\"navbar-brand\" href=\"index.html\"><span class=\"hype\">Hype</span></a>\n        </div>\n        <div>\n          <ul class=\"nav navbar-nav\">\n            <li><a href=\"https://github.com/hypelib/Hype\">GitHub</a></li>\n            <li><a href=\"https://github.com/hypelib/Hype/releases\">Release notes</a></li>\n            <li><a href=\"reference/index.html\">API reference</a></li>\n            <li><a href=\"download.html\">Download</a></li>\n          </ul>\n        </div>\n      </div>\n    </nav>\n    <div class=\"container\">\n      <div class=\"row\">\n        <div class=\"col-sm-9\">\n          {document}\n          {tooltips}\n        </div>\n        <div class=\"col-sm-3\">\n          <a href=\"index.html\"><img src=\"img/hype.png\" style=\"width:200px\"/></a><br>\n          <ul class=\"nav nav-pills nav-stacked\">\n            <li><a href=\"index.html\">Home</a></li>\n          </ul>          \n          <div class=\"nav-label\">Basics</div>\n          <ul class=\"nav nav-pills nav-stacked\">\n            <li><a href=\"optimization.html\">Optimization</a></li>\n            <li><a href=\"training.html\">Training</a></li>\n          </ul>\n          <div class=\"nav-label\">Examples</div>\n          <ul class=\"nav nav-pills nav-stacked\">\n            <li><a href=\"regression.html\">Regression</a></li>\n            <li><a href=\"feedforwardnets.html\">Neural Networks</a></li>\n            <li><a href=\"recurrentnets.html\">Recurrent Neural Nets</a></li>\n            <li><a href=\"hmc.html\">Hamiltonian MCMC</a></li>\n          </ul>\n        </div>\n      </div>\n    </div>\n  <!-- Start of StatCounter Code for Default Guide -->\n  <script type=\"text/javascript\">\n  var sc_project=10701961; \n  var sc_invisible=1; \n  var sc_security=\"a67091ad\"; \n  var scJsHost = ((\"https:\" == document.location.protocol) ?\n  \"https://secure.\" : \"http://www.\");\n  document.write(\"<sc\"+\"ript type='text/javascript' src='\" +\n  scJsHost+\n  \"statcounter.com/counter/counter.js'></\"+\"script>\");\n  </script>\n  <noscript><div class=\"statcounter\"><a title=\"shopify\n  analytics ecommerce\" href=\"http://statcounter.com/shopify/\"\n  target=\"_blank\"><img class=\"statcounter\"\n  src=\"http://c.statcounter.com/10701961/0/a67091ad/1/\"\n  alt=\"shopify analytics ecommerce\"></a></div></noscript>\n  <!-- End of StatCounter Code for Default Guide -->    \n  </body>\n</html>"
  },
  {
    "path": "paket.dependencies",
    "content": "source https://api.nuget.org/v3/index.json\nframework: netstandard2.0\nredirects: on\nstorage: none\n\nnuget System.Drawing.Common >= 4.5.1\nnuget DiffSharp >= 0.8.4-beta\nnuget FSharp.Core\n\n#These packages are used in .fsx examples which are currently difficult to make cross-platform unless they're local\nnuget FSharp.Formatting storage: packages\n//nuget R.NET storage: packages\nnuget RProvider storage: packages\nnuget XPlot.GoogleCharts.WPF storage: packages"
  },
  {
    "path": "src/Hype/AssemblyInfo.fs",
    "content": "﻿namespace Hype.AssemblyInfo\n\nopen System.Reflection\nopen System.Runtime.CompilerServices\nopen System.Runtime.InteropServices\n\n// General Information about an assembly is controlled through the following \n// set of attributes. Change these attribute values to modify the information\n// associated with an assembly.\n[<assembly: AssemblyTitle(\"Hype\")>]\n[<assembly: AssemblyDescription(\"Hype: Compositional Machine Learning and Hyperparameter Optimization\")>]\n[<assembly: AssemblyConfiguration(\"\")>]\n[<assembly: AssemblyCompany(\"National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter)\")>]\n[<assembly: AssemblyProduct(\"Hype\")>]\n[<assembly: AssemblyCopyright(\"Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter)\")>]\n[<assembly: AssemblyTrademark(\"\")>]\n[<assembly: AssemblyCulture(\"\")>]\n\n// Setting ComVisible to false makes the types in this assembly not visible \n// to COM components.  If you need to access a type in this assembly from \n// COM, set the ComVisible attribute to true on that type.\n[<assembly: ComVisible(false)>]\n\n// The following GUID is for the ID of the typelib if this project is exposed to COM\n[<assembly: Guid(\"c923664d-182e-48d5-bb30-f1505d7d28df\")>]\n\n// Version information for an assembly consists of the following four values:\n// \n//       Major Version\n//       Minor Version \n//       Build Number\n//       Revision\n// \n// You can specify all the values or you can default the Build and Revision Numbers \n// by using the '*' as shown below:\n// [<assembly: AssemblyVersion(\"1.0.*\")>]\n[<assembly: AssemblyVersion(\"0.1.3\")>]\n[<assembly: AssemblyInformationalVersion(\"0.1.3\")>]\n[<assembly: AssemblyFileVersion(\"0.1.3.*\")>]\n\ndo\n    ()"
  },
  {
    "path": "src/Hype/Classifier.fs",
    "content": "﻿//\n// This file is part of\n// Hype: Compositional Machine Learning and Hyperparameter Optimization\n//\n// Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter)\n//\n// Hype is released under the MIT license.\n// (See accompanying LICENSE file.)\n//\n// Written by:\n//\n//   Atilim Gunes Baydin\n//   atilimgunes.baydin@nuim.ie\n//\n//   Barak A. Pearlmutter\n//   barak@cs.nuim.ie\n//\n//   Brain and Computation Lab\n//   Hamilton Institute & Department of Computer Science\n//   National University of Ireland Maynooth\n//   Maynooth, Co. Kildare\n//   Ireland\n//\n//   www.bcl.hamilton.ie\n//\n\nnamespace Hype\n\nopen Hype\nopen Hype.Neural\nopen DiffSharp.AD.Float32\nopen DiffSharp.Util\n\n\n/// Base type for classifiers\n[<AbstractClass>]\ntype Classifier(f:DM->DM) =\n    let f = f\n    member c.Run(x:DM) = f x\n    member c.Run(x:DV) = x |> DM.ofDV x.Length |> f |> DM.toDV\n    abstract member Classify : DM -> int[]\n    abstract member Classify : DV -> int\n    member c.ClassificationError(x:DM, y:int[]) =\n        let cc = c.Classify(x)\n        let incorrect = Array.map2 (fun c y -> if c = y then 0 else 1) cc y\n        (float32 (incorrect |> Array.sum)) / (float32 incorrect.Length)\n    member c.ClassificationError(d:Dataset) =\n        c.ClassificationError(d.X, d.Yi)\n\n/// Classifier for binary classification\ntype LogisticClassifier(f) =\n    inherit Classifier(f)\n    new(l:Layer) = LogisticClassifier(l.Run)\n    override c.Classify(x:DM) =\n        let cc = Array.zeroCreate x.Cols\n        x |> f |> DM.iteriCols (fun i v -> if v.[0] > D 0.5f then cc.[i] <- 1)\n        cc\n    override c.Classify(x:DV) =\n        if c.Run(x).[0] > D 0.5f then 1 else 0\n    member c.ClassificationError(d:Dataset) =\n        let yi = d.Y |> DM.toDV |> DV.toArray |> Array.map (float32>>int)\n        c.ClassificationError(d.X, yi)\n\n/// Classifier for softmax classification\ntype SoftmaxClassifier(f) =\n    inherit Classifier(f)\n    new(l:Layer) = SoftmaxClassifier(l.Run)\n    override c.Classify(x:DM) = \n        let cc = Array.zeroCreate x.Cols\n        x |> f |> DM.iteriCols (fun i v -> cc.[i] <- DV.MaxIndex(v))\n        cc\n    override c.Classify(x:DV) =\n        DV.MaxIndex(c.Run(x))"
  },
  {
    "path": "src/Hype/Hype.fs",
    "content": "﻿//\n// This file is part of\n// Hype: Compositional Machine Learning and Hyperparameter Optimization\n//\n// Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter)\n//\n// Hype is released under the MIT license.\n// (See accompanying LICENSE file.)\n//\n// Written by:\n//\n//   Atilim Gunes Baydin\n//   atilimgunes.baydin@nuim.ie\n//\n//   Barak A. Pearlmutter\n//   barak@cs.nuim.ie\n//\n//   Brain and Computation Lab\n//   Hamilton Institute & Department of Computer Science\n//   National University of Ireland Maynooth\n//   Maynooth, Co. Kildare\n//   Ireland\n//\n//   www.bcl.hamilton.ie\n//\n\n/// Main namespace\nnamespace Hype\n\nopen System.IO\nopen DiffSharp.AD.Float32\nopen DiffSharp.Util\n\n/// Random number generator\ntype Rnd() =\n    static let mutable R = new System.Random()\n    /// Seed the random number generator with integer `seed`\n    static member Seed(seed) = R <- new System.Random(seed)\n    /// Generate a random permutation of a set of length `n`\n    static member Permutation(n:int) =\n        let swap i j (a:_[]) =\n            let tmp = a.[i]\n            a.[i] <- a.[j]\n            a.[j] <- tmp\n        let a = Array.init n (fun i -> i)\n        a |> Array.iteri (fun i _ -> swap i (R.Next(i, n)) a)\n        a\n    /// Sample a non-negative random integer\n    static member UniformInt() = R.Next()\n    /// Sample a non-negative random integer less than `max`\n    static member UniformInt(max) = R.Next(max)\n    /// Sample a random integer between `min` and `max`\n    static member UniformInt(min, max) = R.Next(min, max)\n    /// Sample a `float32` from the standard uniform distribution. X ~ U(0,1)\n    static member Uniform() = float32 (R.NextDouble())\n    /// Sample a `D` from the standard uniform distribution. X ~ U(0,1)\n    static member UniformD() = D (float32 (R.NextDouble()))\n    /// Sample a `float32` from the uniform distribution between zero and `max`. X ~ U(0,max)\n    static member Uniform(max) = max * (float32 (R.NextDouble()))\n    /// Sample a `D` from the unifrom distribution between zero and `max`. X ~ U(0,max)\n    static member UniformD(max) = max * D (float32 (R.NextDouble()))\n    /// Sample a `float32` from the uniform distribution between `min` and `max`. X ~ U(min,max)\n    static member Uniform(min, max) = min + (float32 (R.NextDouble())) * (max - min)\n    /// Sample a `D` from the uniform distribution between `min` and `max`. X ~ U(min,max)\n    static member UniformD(min, max) = min + D (float32 (R.NextDouble())) * (max - min)\n    /// Sample a `float32` from the standard normal distribution. X ~ N(0,1)\n    static member Normal() =\n        let rec n() = \n            let x, y = (float32 (R.NextDouble())) * 2.0f - 1.0f, (float32 (R.NextDouble())) * 2.0f - 1.0f\n            let s = x * x + y * y\n            if s > 1.0f then n() else x * sqrt (-2.0f * (log s) / s)\n        n()\n    /// Sample a `D` from the standard normal distribution. X ~ N(0,1)\n    static member NormalD() = D (Rnd.Normal())\n    /// Sample a `float32` from the normal distribution with given mean `mu` and standard deviation `sigma`. X ~ N(mu,sigma)\n    static member Normal(mu, sigma) = Rnd.Normal() * sigma + mu\n    /// Sample a `D` from the normal distribution with given mean `mu` and standard deviation `sigma`. X ~ N(mu,sigma)\n    static member NormalD(mu, sigma) = Rnd.NormalD() * sigma + mu\n    \n    /// Sample a `DV` of length `n` from the standard uniform distribution. Elements of vector X ~ U(0,1)\n    static member UniformDV(n) = DV (Array.Parallel.init n (fun _ -> Rnd.Uniform()))\n    /// Sample a `DV` of length `n` from the uniform distribution between zero and `max`. Elements of vector X ~ U(0,max)\n    static member UniformDV(n, max) = DV.init n (fun _ -> Rnd.UniformD(max))\n    /// Sample a `DV` of length `n` from the uniform distribution between `min` and `max`. Elements of vector X ~ U(min,max)\n    static member UniformDV(n, min, max) = DV.init n (fun _ -> Rnd.UniformD(min, max))\n    /// Sample a `DV` of length `n` from the standard normal distribution. Elements of vector X ~ N(0,1)\n    static member NormalDV(n) = DV (Array.Parallel.init n (fun _ -> Rnd.Normal()))\n    /// Sample a `DV` of length `n` from the normal distribution with given mean `mu` and standard deviation `sigma`. Elements of vector X ~ N(mu,sigma)\n    static member NormalDV(n, mu, sigma) = DV.init n (fun _ -> Rnd.NormalD(mu, sigma))\n\n    /// Sample a `DM` of `m` rows and `n` columns from the standard uniform distribution. Elements of matrix X ~ U(0,1)\n    static member UniformDM(m, n) = DM (Array2D.Parallel.init m n (fun _ _ -> Rnd.Uniform()))\n    /// Sample a `DM` of `m` rows and `n` columns from the uniform distribution between zero and `max`. Elements of matrix X ~ U(0,max)\n    static member UniformDM(m, n, max) = DM.init m n (fun _ _ -> Rnd.UniformD(max))\n    /// Sample a `DM` of `m` rows and `n` columns from the uniform distribution between `min` and `max`. Elements of matrix X ~ U(min,max)\n    static member UniformDM(m, n, min, max) = DM.init m n (fun _ _ -> Rnd.UniformD(min, max))\n    /// Sample a `DM` of `m` rows and `n` columns from the standard normal distribution. Elements of matrix X ~ N(0,1)\n    static member NormalDM(m, n) = DM (Array2D.Parallel.init m n (fun _ _ -> Rnd.Normal()))\n    /// Sample a `DM` of `m` rows and `n` columns from the normal distribution with given mean `mu` and standard deviation `sigma`. Elements of matrix X ~ N(mu,sigma)\n    static member NormalDM(m, n, mu, sigma) = DM.init m n (fun _ _ -> Rnd.NormalD(mu, sigma))\n    \n    /// Select a random element of array `a`\n    static member Choice(a:_[]) = a.[R.Next(a.Length)]\n    /// Select a random element of array `a`, given selection probabilities in array `probs`\n    static member Choice(a:_[], probs:float32[]) = Rnd.Choice(a, toDV probs)\n    /// Select a random element of array `a`, given selection probabilities in vector `probs`\n    static member Choice(a:_[], probs:DV) =\n        let probs' = probs / (DV.sum(probs))\n        let p = float32 (R.NextDouble())\n        let mutable r = 0.f\n        let mutable i = 0\n        let mutable hit = false\n        while not hit do\n            r <- r + (float32 probs'.[i])\n            if r >= p then \n                hit <- true\n            else\n                i <- i + 1\n        a.[i]\n\n/// Dataset for holding training data\ntype Dataset private (x:DM, y:DM, xi:seq<int>, yi:seq<int>) =\n    /// The matrix X of input values, where columns are the individual inputs Xi\n    member val X = x with get\n    /// The matrix Y of output values, where columns are the individual outputs Yi\n    member val Y = y with get\n    /// The index of the maximum elements of individual inputs Xi, used for one-hot representations\n    member val Xi = xi |> Array.ofSeq with get\n    /// The index of the maximum elements of individual outputs Yi, used for one-hot reprsentations\n    member val Yi = yi |> Array.ofSeq with get\n    /// Construct a dataset with given input matrix `x` and output matrix `y`. Columns of `x` and `y` are the individual inputs and corresponding outputs.\n    new(x:DM, y:DM) =\n        let xi = x |> DM.toCols |> Seq.toArray |> Array.map DV.maxIndex\n        let yi = y |> DM.toCols |> Seq.toArray |> Array.map DV.maxIndex\n        Dataset(x, y, xi, yi)\n    /// Construct a dataset of one-hot input and output elements. `xi` are the input indices, `onehotdimsx` is the input dimensions, `yi` are the output indices, `onehotdimsy` is the output dimensions.\n    new(xi:seq<int>, onehotdimsx:int, yi:seq<int>, onehotdimsy:int) =\n        let x = xi |> Seq.map (fun i -> DV.standardBasis onehotdimsx i) |> DM.ofCols\n        let y = yi |> Seq.map (fun i -> DV.standardBasis onehotdimsy i) |> DM.ofCols\n        Dataset(x, y, xi, yi)\n    /// Construct a dataset of one-hot input and output elements. `xi` are the input indices, input dimensions is max(xi) + 1, `yi` are the output indices, output dimensions is max(yi) + 1.\n    new(xi:seq<int>, yi:seq<int>) =\n        let onehotdimsx = 1 + Seq.max xi\n        let onehotdimsy = 1 + Seq.max yi\n        Dataset(xi, onehotdimsx, yi, onehotdimsy)\n    /// Construct a dataset with given input matrix `x` and one-hot output elements. `yi` are the output indices, `onehotdimsy` is the output dimensions.\n    new(x:DM, yi:seq<int>, onehotdimsy:int) =\n        let xi = x |> DM.toCols |> Seq.toArray |> Array.map DV.maxIndex\n        let y = yi |> Seq.map (fun i -> DV.standardBasis onehotdimsy i) |> DM.ofCols\n        Dataset(x, y, xi, yi)\n    /// Construct a dataset with one-hot input elements and given output matrix `y`. `xi` are the input indices, `onehotdimsx` is the input dimensions.\n    new(xi:seq<int>, onehotdimsx:int, y:DM) =\n        let x = xi |> Seq.map (fun i -> DV.standardBasis onehotdimsx i) |> DM.ofCols\n        let yi = y |> DM.toCols |> Seq.toArray |> Array.map DV.maxIndex\n        Dataset(x, y, xi, yi)\n    /// Construct a dataset with given input matrix `x` and one-hot output elements. `yi` are the output indices, output dimensions is max(yi) + 1.\n    new(x:DM, yi:seq<int>) =\n        let onehotdimsy = 1 + Seq.max yi\n        Dataset(x, yi, onehotdimsy)\n    /// Construct a dataset with one-hot input elements and given output matrix `y`. `xi` are the input indices, input dimensions is max(xi) + 1.\n    new(xi:seq<int>, y:DM) =\n        let onehotdimsx = 1 + Seq.max xi\n        Dataset(xi, onehotdimsx, y)\n    /// Construct a dataset from the given sequence of input-output vector pairs\n    new(s:seq<DV*DV>) =\n        let x, y = s |> Seq.toArray |> Array.unzip\n        Dataset(x |> DM.ofCols, y |> DM.ofCols)\n    /// The empty dataset\n    static member empty = Dataset(DM.empty, DM.empty)\n    /// Check whether dataset `d` is empty\n    static member isEmpty (d:Dataset) = DM.isEmpty d.X && DM.isEmpty d.Y\n    /// Normalize the values in the input matrix X and output matrix Y of dataset `d` to be in the range [0,1]\n    static member normalize (d:Dataset) = d.Normalize()\n    /// Normalize the values in the input matrix X of dataset `d` to be in the range [0,1]\n    static member normalizeX (d:Dataset) = d.NormalizeX()\n    /// Normalize the values in the output matrix Y of dataset `d` to be in the range [0,1]\n    static member normalizeY (d:Dataset) = d.NormalizeY()\n    /// Standardize the values in the input matrix X and output matrix Y of dataset `d` to have zero mean and unit variance\n    static member standardize (d:Dataset) = d.Standardize()\n    /// Standardize the values in the input matrix X of dataset `d` to have zero mean and unit variance\n    static member standardizeX (d:Dataset) = d.StandardizeX()\n    /// Standardize the values in the output matrix Y of dataset `d` to have zero mean and unit variance\n    static member standardizeY (d:Dataset) = d.StandardizeY()\n    /// Append a new row `v` to the input matrix X of dataset `d`\n    static member appendRowX (v:DV) (d:Dataset) = d.AppendRowX(v)\n    /// Append a new tow `v` to the output matrix Y of dataset `d`\n    static member appendRowY (v:DV) (d:Dataset) = d.AppendRowY(v)\n    /// Append a row of ones to the input matrix X of dataset `d`\n    static member appendBiasRowX (d:Dataset) = d.AppendBiasRowX()\n    /// Get a summary string of dataset `d`\n    static member toString (d:Dataset) = d.ToString()\n    /// Get a string representation of dataset `d` showing all values\n    static member toStringFull (d:Dataset) = d.ToStringFull()\n    /// Get the input-output pairs of dataset `d` as a sequence\n    static member toSeq (d:Dataset) = d.ToSeq()\n    /// The length of dataset `d`, i.e., the number of columns in input matrix X and output matrix Y\n    static member length (d:Dataset) = d.Length\n    /// Sample a random subset of length `n` from dataset `d`\n    static member randomSubset (n:int) (d:Dataset) = d.RandomSubset(n)\n    /// Shuffle the order of elements in dataset `d`\n    static member shuffle (d:Dataset) = d.Shuffle()\n    /// Get the input-output pair with index `i` from dataset `d`\n    static member item (i:int) (d:Dataset) = d.[i]\n    /// Get element `i`\n    member d.Item\n        with get i = d.X.[*,i], d.Y.[*,i]\n    /// The length of the dataset, i.e., the number of columns in input matrix X and output matrix Y\n    member d.Length = d.X.Cols\n    /// Get the input-output pairs as a sequence\n    member d.ToSeq() =\n        Seq.init d.Length (fun i -> d.[i])\n    /// Sample a random subset of length `n` from this dataset\n    member d.RandomSubset(n) =\n        let bi = Rnd.Permutation(d.Length)\n        let x = Seq.init n (fun i -> d.X.[*, bi.[i]])\n        let y = Seq.init n (fun i -> d.Y.[*, bi.[i]])\n        Dataset(DM.ofCols x, DM.ofCols y)\n    /// Normalize the values in the input matrix X and output matrix Y to be in the range [0,1]\n    member d.Normalize() = Dataset(DM.normalize d.X, DM.normalize d.Y)\n    /// Normalize the values in the input matrix X to be in the range [0,1]\n    member d.NormalizeX() = Dataset(DM.normalize d.X, d.Y)\n    /// Normalize the values in the output matrix Y to be in the range [0,1]\n    member d.NormalizeY() = Dataset(d.X, DM.normalize d.Y)\n    /// Standardize the values in the input matrix X and output matrix Y to have zero mean and unit variance\n    member d.Standardize() = Dataset(DM.standardize d.X, DM.standardize d.Y)\n    /// Standardize the values in the input matrix X to have zero mean and unit variance\n    member d.StandardizeX() = Dataset(DM.standardize d.X, d.Y)\n    /// Standardize the values in the output matrix Y to have zero mean and unit variance\n    member d.StandardizeY() = Dataset(d.X, DM.standardize d.Y)\n    /// Shuffle the order of elements in the dataset\n    member d.Shuffle() = d.RandomSubset d.Length\n    /// Get a slice of the dataset between `lower` and `upper` indices\n    member d.GetSlice(lower, upper) =\n        let l = max 0 (defaultArg lower 0)\n        let u = min (d.X.Cols - 1) (defaultArg upper (d.Length - 1))\n        Dataset(d.X.[*,l..u], d.Y.[*,l..u])\n    /// Get a new dataset of the entries for which the `predicate` is true\n    member d.Filter (predicate:(DV*DV)->bool) =\n        d.ToSeq() |> Seq.filter predicate |> Dataset\n    /// Append a new row `v` to the input matrix X\n    member d.AppendRowX(v:DV) = Dataset(d.X |> DM.appendRow v, d.Y)\n    /// Append a new row `v` to the output matrix Y\n    member d.AppendRowY(v:DV) = Dataset(d.X, d.Y |> DM.appendRow v)\n    /// Append a row of all ones to the input matrix X\n    member d.AppendBiasRowX() = d.AppendRowX(DV.create d.Length 1.f)\n    /// Get a summary string of this dataset\n    override d.ToString() =\n        \"Hype.Dataset\\n\"\n            + sprintf \"   X: %i x %i\\n\" d.X.Rows d.X.Cols\n            + sprintf \"   Y: %i x %i\" d.Y.Rows d.Y.Cols\n    /// Get a string representation of this dataset showing all values\n    member d.ToStringFull() =\n        \"Hype.Dataset\\n\"\n            + sprintf \"   X:\\n%O\\n\\n\" d.X\n            + sprintf \"   Y:\\n%O\" d.Y\n    /// Get a string visualization of this dataset\n    member d.Visualize() =\n        \"Hype.Dataset\\n\"\n            + sprintf \"   X:\\n%s\\n\\n\" (d.X.Visualize())\n            + sprintf \"   Y:\\n%s\" (d.Y.Visualize())\n    /// Visualize the values of the input matrix X where each column will be reshaped to an image with `imagerows` rows\n    member d.VisualizeXColsAsImageGrid(imagerows:int) =\n        d.ToString() + \"\\n\"\n            + \"X's columns \" + Util.VisualizeDMRowsAsImageGrid(d.X |> DM.transpose, imagerows)\n    /// Visualize the values of the output matrix Y where each column will be reshaped to an image with `imagerows` rows\n    member d.VisualizeYColsAsImageGrid(imagerows:int) =\n        d.ToString() + \"\\n\"\n            + \"Y's columns \" + Util.VisualizeDMRowsAsImageGrid(d.Y |> DM.transpose, imagerows)\n\n/// Various utility functions\nand Util =\n    static member printLog (s:string) = printfn \"[%A] %s\" System.DateTime.Now s\n    static member printModel (f:DV->DV) (d:Dataset) =\n        d.ToSeq()\n        |> Seq.map (fun (x, y) -> f x, y)\n        |> Seq.iter (fun (x, y) -> printfn \"f x: %A, y: %A\" x y)\n    /// Load bitmap image with given `filename` to `DM`\n    static member LoadImage(filename:string) =\n        let bmp = new System.Drawing.Bitmap(filename)\n        let m = DM.init bmp.Height bmp.Width (fun i j -> float32 (bmp.GetPixel(i, j).GetBrightness()))\n        bmp.Dispose()\n        m\n    /// Load values from delimited text file with given `filename` and separator characters `separators`\n    static member LoadDelimited(filename:string, separators:char[]) =\n        System.IO.File.ReadLines(filename)\n        |> Seq.map (fun x -> x.Split(separators) |> Array.map float32)\n        |> Seq.map toDV\n        |> DM.ofRows\n    /// Load values from delimited text file with given `filename` and a default set of separator characters: space, comma, or tab\n    static member LoadDelimited(filename:string) =\n        Util.LoadDelimited(filename, [|' '; ','; '\\t'|])\n    /// Load values from the MNIST database images, from given `filename`, reading `n` number of elements\n    static member LoadMNISTPixels(filename, n) =\n        let d = new BinaryReader(File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.Read))\n        let magicnumber = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder\n        match magicnumber with\n        | 2051 -> // Images\n            let maxitems = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder\n            let rows = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder\n            let cols = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder\n            let n = min n maxitems\n            d.ReadBytes(n * rows * cols)\n            |> Array.map float32\n            |> DV\n            |> DM.ofDV n\n            |> DM.transpose\n        | _ -> failwith \"Given file is not in the MNIST format.\"\n    /// Load values from the MNIST database labels, from given `filename`, reading `n` number of elements\n    static member LoadMNISTLabels(filename, n) =\n        let d = new BinaryReader(File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.Read))\n        let magicnumber = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder\n        match magicnumber with\n        | 2049 -> // Labels\n            let maxitems = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder\n            d.ReadBytes(min n maxitems)\n            |> Array.map int\n        | _ -> failwith \"Given file is not in the MNIST format.\"\n    /// Load values from the MNIST database images, from given `filename`, reading all elements\n    static member LoadMNISTPixels(filename) = Util.LoadMNISTPixels(filename, System.Int32.MaxValue)\n    /// Load values from the MNIST database labels, from given `filename`, reading all elements\n    static member LoadMNISTLabels(filename) = Util.LoadMNISTLabels(filename, System.Int32.MaxValue)\n    /// Generate a string representation of matrix `w`, reshaping each row into an image with `imagerows` rows, and presenting resulting images together in an optimal grid layout.\n    static member VisualizeDMRowsAsImageGrid(w:DM, imagerows:int) =\n        let rows = w.Rows\n        let mm = int (floor (sqrt (float rows)))\n        let nn = int (ceil (float rows / float mm))\n        let m = imagerows\n        let n = (w.[0, *] |> DV.toDM m).Cols\n        let mutable mat = DM.create (mm * m) (nn * n) (DM.mean w)\n        for i = 0 to mm - 1 do\n            for j = 0 to nn - 1 do\n                let row = i * nn + j\n                if row < w.Rows then\n                    mat <- DM.AddSubMatrix(mat, i * m, j * n, w.[row, *] |> DV.toDM m)\n        sprintf \"reshaped to (%i x %i), presented in a (%i x %i) grid:\\n%s\\n\" m n mm nn (mat.Visualize())"
  },
  {
    "path": "src/Hype/Hype.fsproj",
    "content": "﻿<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<Project Sdk=\"Microsoft.NET.Sdk\">\n  <PropertyGroup>\n    <TargetFrameworks>netstandard2.0</TargetFrameworks>\n    <PlatformTarget>x64</PlatformTarget>\n    <PackageLicenseExpression>BSD-2-Clause</PackageLicenseExpression>\n    <Title>Hype</Title>\n    <AutoGenerateBindingRedirects>true</AutoGenerateBindingRedirects>\n    <CopyLocalLockFileAssemblies>true</CopyLocalLockFileAssemblies>\n  </PropertyGroup>\n  <ItemGroup>\n    <Compile Include=\"AssemblyInfo.fs\" />\n    <Compile Include=\"Hype.fs\" />\n    <Compile Include=\"Optimize.fs\" />\n    <Compile Include=\"Neural.fs\" />\n    <Compile Include=\"Classifier.fs\" />\n    <Compile Include=\"NLP.fs\" />\n    <Compile Include=\"Inference.fs\" />\n    <Content Include=\"paket.references\" />\n    <Content Include=\"app.config\" />\n  </ItemGroup>\n  <Import Project=\"..\\..\\.paket\\Paket.Restore.targets\" />\n</Project>"
  },
  {
    "path": "src/Hype/Inference.fs",
    "content": "﻿//\n// This file is part of\n// Hype: Compositional Machine Learning and Hyperparameter Optimization\n//\n// Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter)\n//\n// Hype is released under the MIT license.\n// (See accompanying LICENSE file.)\n//\n// Written by:\n//\n//   Atilim Gunes Baydin\n//   atilimgunes.baydin@nuim.ie\n//\n//   Barak A. Pearlmutter\n//   barak@cs.nuim.ie\n//\n//   Brain and Computation Lab\n//   Hamilton Institute & Department of Computer Science\n//   National University of Ireland Maynooth\n//   Maynooth, Co. Kildare\n//   Ireland\n//\n//   www.bcl.hamilton.ie\n//\n\n/// Inference namespace\nnamespace Hype.Inference\n\nopen Hype\nopen DiffSharp.AD.Float32\nopen DiffSharp.Util\n\n/// Hamiltonian MCMC sampler\ntype HMCSampler() =\n    static member Sample(n, hdelta, hsteps, x0:DV, f:DV->D) =\n        let leapFrog (u:DV->D) (k:DV->D) (d:D) steps (x0, p0) =\n            let hd = d / 2.f\n            [1..steps] \n            |> List.fold (fun (x, p) _ ->\n                let p' = p - hd * grad u x\n                let x' = x + d * grad k p'\n                x', p' - hd * grad u x') (x0, p0)\n\n        let u x = -log (f x) // potential energy\n        let k p = (p * p) / D 2.f // kinetic energy\n        let hamilton x p = u x + k p\n        let x = ref x0\n        [|for i in 1..n do\n            let p = DV.init x0.Length (fun _ -> Rnd.Normal())\n            let x', p' = leapFrog u k hdelta hsteps (!x, p)\n            if Rnd.Uniform() < float32 (exp ((hamilton !x p) - (hamilton x' p'))) then x := x'\n            yield !x|]"
  },
  {
    "path": "src/Hype/NLP.fs",
    "content": "﻿//\n// This file is part of\n// Hype: Compositional Machine Learning and Hyperparameter Optimization\n//\n// Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter)\n//\n// Hype is released under the MIT license.\n// (See accompanying LICENSE file.)\n//\n// Written by:\n//\n//   Atilim Gunes Baydin\n//   atilimgunes.baydin@nuim.ie\n//\n//   Barak A. Pearlmutter\n//   barak@cs.nuim.ie\n//\n//   Brain and Computation Lab\n//   Hamilton Institute & Department of Computer Science\n//   National University of Ireland Maynooth\n//   Maynooth, Co. Kildare\n//   Ireland\n//\n//   www.bcl.hamilton.ie\n//\n\n/// Natural language processing namespace\nnamespace Hype.NLP\n\nopen Hype\nopen DiffSharp.AD.Float32\nopen DiffSharp.Util\n\n/// Language model\ntype Language(tokens:string[], punctuation:string[]) =\n    member val Tokens = tokens\n    \n    static member TokenizeWords(text:string, punctuation) =\n        //let mutable t' = text.ToLowerInvariant()\n        let mutable t' = text\n        punctuation |> Array.iter (fun p -> t' <- t'.Replace(p, \" \" + p + \" \"))\n        t'.Split([|\" \"|], System.StringSplitOptions.RemoveEmptyEntries)\n\n    new(text:string, punctuation:string[]) = Language(Language.TokenizeWords(text, punctuation) |> Set.ofArray |> Set.toArray, punctuation)\n    new(text:string) = Language(text, [|\".\"; \",\"; \":\"; \";\"; \"(\"; \")\"; \"!\"; \"?\"|])\n\n    member l.Length = l.Tokens.Length\n    member l.EncodeOneHot(x:string) =\n        Language.TokenizeWords(x, punctuation) |> l.EncodeOneHot\n    member l.EncodeOneHot(x:string[]) =\n        try\n            //x |> Array.map (fun v -> v.ToLowerInvariant())\n            x\n            |> Array.map (fun v -> Array.findIndex (fun t -> t = v) l.Tokens)\n            |> Array.map (DV.standardBasis l.Length) |> DM.ofCols\n        with\n            | _ -> failwith \"Given token is not found in the language.\"\n    member l.DecodeOneHot(x:DM) =\n        try\n            x |> DM.toCols |> Seq.map DV.maxIndex \n            |> Seq.map (fun i -> l.Tokens.[i]) |> Seq.toArray\n        with\n            | _ -> [||]\n    member l.Sample(probs:DM) = probs |> DM.toCols |> Seq.map (fun v -> Rnd.Choice(l.Tokens, v)) |> Seq.toArray\n    member l.Sample(probs:DV) = Rnd.Choice(l.Tokens, probs)\n    member l.Sample(model:DM->DM, start:string, stop:string[], maxlen) =\n        let mutable x = start\n        let mutable i = 0\n        let mutable t = ([while i < maxlen do\n                            yield x\n                            let p = x |> l.EncodeOneHot |> model\n                            let d = l.Sample(p).[0]\n                            match stop |> Array.tryFind (fun p -> p = d) with\n                                | Some(_) ->\n                                    yield d\n                                    i <- maxlen\n                                | _ -> \n                                    x <- d\n                                    i <- i + 1]\n                        |> List.map ((+) \" \")\n                        |> List.fold (+) \"\").Trim()\n        punctuation |> Array.iter (fun p -> t <- t.Replace(\" \" + p, p))\n        t"
  },
  {
    "path": "src/Hype/Neural.fs",
    "content": "﻿//\n// This file is part of\n// Hype: Compositional Machine Learning and Hyperparameter Optimization\n//\n// Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter)\n//\n// Hype is released under the MIT license.\n// (See accompanying LICENSE file.)\n//\n// Written by:\n//\n//   Atilim Gunes Baydin\n//   atilimgunes.baydin@nuim.ie\n//\n//   Barak A. Pearlmutter\n//   barak@cs.nuim.ie\n//\n//   Brain and Computation Lab\n//   Hamilton Institute & Department of Computer Science\n//   National University of Ireland Maynooth\n//   Maynooth, Co. Kildare\n//   Ireland\n//\n//   www.bcl.hamilton.ie\n//\n\n/// Neural networks namespace\nnamespace Hype.Neural\n\nopen Hype\nopen DiffSharp.AD.Float32\nopen DiffSharp.Util\n\n\n/// Base type for neural layers\n[<AbstractClass>]\ntype Layer() =\n    abstract member Init : unit -> unit\n    abstract member Reset : unit -> unit\n    abstract member Run : DM -> DM\n    abstract member Encode : unit -> DV\n    abstract member EncodeLength : int\n    abstract member Decode : DV -> unit\n    abstract member ToStringFull : unit -> string\n    abstract member Visualize : unit -> string\n    member l.Train(d:Dataset) = Layer.Train(l, d)\n    member l.Train(d:Dataset, v:Dataset) = Layer.Train(l, d, v)\n    member l.Train(d:Dataset, par:Params) = Layer.Train(l, d, par)\n    member l.Train(d:Dataset, v:Dataset, par:Params) = Layer.Train(l, d, v, par)\n    static member init (l:Layer) = l.Init()\n    static member reset (l:Layer) = l.Reset()\n    static member run x (l:Layer) = l.Run(x)\n    static member encode (l:Layer) = l.Encode()\n    static member encodeLength (l:Layer) = l.EncodeLength\n    static member decode (l:Layer) (w:DV) = l.Decode(w)\n    static member toString (l:Layer) = l.ToString()\n    static member toStringFull (l:Layer) = l.ToStringFull()\n    static member visualize (l:Layer) = l.Visualize()\n    static member Train (l:Layer, d:Dataset) = Layer.Train(l, d, Dataset.empty, Params.Default)\n    static member Train (l:Layer, d:Dataset, par:Params) = Layer.Train(l, d, Dataset.empty, par)\n    static member Train (l:Layer, d:Dataset, v:Dataset) = Layer.Train(l, d, v, Params.Default)\n    static member Train (l:Layer, d:Dataset, v:Dataset, par:Params) =\n        let f =\n            fun w x ->\n                l.Decode w\n                l.Run x\n        let w0 = l.Encode()\n//        try\n//            grad (fun w -> Loss.L1Loss.FuncDM(d) (f w)) w0 |> ignore\n//        with\n//            | _ -> failwith \"Input/output dimensions mismatch between dataset and the layer.\"\n        let w, loss, _, lhist = Optimize.Train(f, w0, d, v, par)\n        w |> l.Decode\n        loss, lhist\n\n/// Initialization schemes for neural layer weights\ntype Initializer =\n    | InitUniform of D * D\n    | InitNormal of D * D\n    | InitRBM of D\n    | InitReLU\n    | InitSigmoid\n    | InitTanh\n    | InitStandard\n    | InitCustom of (int->int->D)\n    override i.ToString() =\n        match i with\n        | InitUniform(min, max) -> sprintf \"Uniform min=%A max=%A\" min max\n        | InitNormal(mu, sigma) -> sprintf \"Normal mu=%A sigma=%A\" mu sigma\n        | InitRBM sigma -> sprintf \"RBM sigma=%A\" sigma\n        | InitReLU -> \"ReLU\"\n        | InitSigmoid -> \"Sigmoid\"\n        | InitTanh -> \"Tanh\"\n        | InitStandard -> \"Standard\"\n        | InitCustom f -> \"Custom\"\n    member i.InitDM(m, n) =\n        let fanOut, fanIn = m, n\n        match i with\n        | InitUniform(min, max) -> Rnd.UniformDM(m, n, min, max)\n        | InitNormal(mu, sigma) -> Rnd.NormalDM(m, n, mu, sigma)\n        | InitRBM sigma -> Rnd.NormalDM(m, n, D 0.f, sigma)\n        | InitReLU -> Rnd.NormalDM(m, n, D 0.f, sqrt (D 2.f / (float32 fanIn)))\n        | InitSigmoid -> let r = D 4.f * sqrt (D 6.f / (fanIn + fanOut)) in Rnd.UniformDM(m, n, -r, r)\n        | InitTanh -> let r = sqrt (D 6.f / (fanIn + fanOut)) in Rnd.UniformDM(m, n, -r, r)\n        | InitStandard -> let r = (D 1.f) / sqrt (float32 fanIn) in Rnd.UniformDM(m, n, -r, r)\n        | InitCustom f -> DM.init m n (fun _ _ -> f fanIn fanOut)\n    member i.InitDM(m:DM) = i.InitDM(m.Rows, m.Cols)\n\n\n/// Linear layer\ntype Linear(inputs:int, outputs:int, initializer:Initializer) =\n    inherit Layer()\n    new(inputs, outputs) = Linear(inputs, outputs, Initializer.InitStandard)\n    \n    member val W = initializer.InitDM(outputs, inputs) with get, set\n    member val b = DV.zeroCreate outputs with get, set\n    \n    override l.Init() =\n        l.W <- initializer.InitDM(l.W)\n        l.b <- DV.zeroCreate l.b.Length\n    override l.Reset() = ()\n    override l.Run (x:DM) = (l.W * x) + l.b\n    override l.Encode () = DV.append (DM.toDV l.W) l.b\n    override l.EncodeLength = l.W.Length + l.b.Length\n    override l.Decode w =\n        let ww = w |> DV.split [l.W.Length; l.b.Length] |> Array.ofSeq\n        l.W <- ww.[0] |> DM.ofDV l.W.Rows\n        l.b <- ww.[1]\n    override l.ToString() =\n        \"Hype.Neural.Linear\\n\" \n            + \"   \" + l.W.Cols.ToString() + \" -> \" + l.W.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   W   : %i x %i\\n\" l.W.Rows l.W.Cols\n            + sprintf \"   b   : %i\" l.b.Length\n    override l.ToStringFull() =\n        \"Hype.Neural.Linear\\n\" \n            + \"   \" + l.W.Cols.ToString() + \" -> \" + l.W.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   W:\\n%O\\n\" l.W\n            + sprintf \"   b:\\n%O\" l.b\n    override l.Visualize() =\n        \"Hype.Neural.Linear\\n\" \n            + \"   \" + l.W.Cols.ToString() + \" -> \" + l.W.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   W:\\n%s\\n\" (l.W.Visualize())\n            + sprintf \"   b:\\n%s\" (l.b.Visualize())\n    member l.VisualizeWRowsAsImageGrid(imagerows:int) =\n        \"Hype.Neural.Linear\\n\" \n            + \"   \" + l.W.Cols.ToString() + \" -> \" + l.W.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   W's rows %s\\n\" (Util.VisualizeDMRowsAsImageGrid(l.W, imagerows))\n            + sprintf \"   b:\\n%s\" (l.b.Visualize())\n\n\n/// Linear layer with no bias\ntype LinearNoBias(inputs:int, outputs:int, initializer:Initializer) =\n    inherit Layer()\n    new(inputs, outputs) = LinearNoBias(inputs, outputs, Initializer.InitStandard)\n\n    member val W = initializer.InitDM(outputs, inputs) with get, set\n\n    override l.Init() = l.W <- initializer.InitDM(l.W)\n    override l.Reset() = ()\n    override l.Run (x:DM) = l.W * x\n    override l.Encode () = l.W |> DM.toDV\n    override l.EncodeLength = l.W.Length\n    override l.Decode w = l.W <- w |> DM.ofDV l.W.Rows\n    override l.ToString() =\n        \"Hype.Neural.LinearNoBias\\n\" \n            + \"   \" + l.W.Cols.ToString() + \" -> \" + l.W.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   W   : %i x %i\" l.W.Rows l.W.Cols\n    override l.ToStringFull() =\n        \"Hype.Neural.LinearNoBias\\n\" \n            + \"   \" + l.W.Cols.ToString() + \" -> \" + l.W.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   W:\\n%O\" l.W\n    override l.Visualize() =\n        \"Hype.Neural.LinearNoBias\\n\" \n            + \"   \" + l.W.Cols.ToString() + \" -> \" + l.W.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   W:\\n%s\" (l.W.Visualize())\n    member l.VisualizeWRowsAsImageGrid(imagerows:int) =\n        \"Hype.Neural.LinearNoBias\\n\" \n            + \"   \" + l.W.Cols.ToString() + \" -> \" + l.W.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   W's rows %s\" (Util.VisualizeDMRowsAsImageGrid(l.W, imagerows))\n\n\n/// Activation layer with custom functions\ntype Activation(f:DM->DM) =\n    inherit Layer()\n    let f = f\n\n    override l.Init () = ()\n    override l.Reset () = ()\n    override l.Run (x:DM) = f x\n    override l.Encode () = DV.empty\n    override l.EncodeLength = 0\n    override l.Decode w = ()\n    override l.ToString() =\n        sprintf \"Hype.Neural.Activation\"\n    override l.ToStringFull() = l.ToString()\n    override l.Visualize() = l.ToString()\n\n\n/// Feedforward sequence of layers\ntype FeedForward() =\n    inherit Layer()\n    let mutable (layers:Layer[]) = Array.empty\n    let mutable encodelength = 0\n    let update() = \n        encodelength <- layers |> Array.map Layer.encodeLength |> Array.sum\n    member n.Add(l) =\n        layers <- Array.append layers [|l|]\n        update()\n    member n.Insert(i, l) =\n        let a = ResizeArray(layers)\n        a.Insert(i, l)\n        layers <- a.ToArray()\n        update()\n    member n.Remove(i) =\n        let a = ResizeArray(layers)\n        a.RemoveAt(i)\n        layers <- a.ToArray()\n        update()\n    member n.Add(f:DM->DM) = n.Add(Activation(f))\n    member n.Insert(i, f:DM->DM) = n.Insert(i, Activation(f))\n    member n.Length = layers.Length\n    member n.Item\n        with get i = layers.[i]\n    override n.Init() = layers |> Array.iter Layer.init\n    override n.Reset() = layers |> Array.iter Layer.reset\n    override n.Run(x:DM) = Array.fold Layer.run x layers\n    override n.Encode() = layers |> Array.map Layer.encode |> Array.reduce DV.append\n    override n.EncodeLength = encodelength\n    override n.Decode(w) =\n        w |> DV.split (layers |> Array.map Layer.encodeLength)\n        |> Seq.iter2 Layer.decode layers\n    override n.ToString() =\n        let s = System.Text.StringBuilder()\n        if n.Length > 0 then\n            s.Append(\"   \") |> ignore\n            for i = 0 to layers.Length - 1 do\n                s.Append(\"(\" + i.ToString() + \") -> \") |> ignore\n            s.Remove(s.Length - 4, 4) |> ignore\n            s.Append(\"\\n\\n\") |> ignore\n            for i = 0 to layers.Length - 1 do\n                s.Append(\"   (\" + i.ToString() + \"): \" + layers.[i].ToString() + \"\\n\\n\") |> ignore\n        \"Hype.Neural.FeedForward\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" encodelength\n            + s.ToString()\n    override n.ToStringFull() =\n        let s = System.Text.StringBuilder()\n        if n.Length > 0 then\n            s.Append(\"   \") |> ignore\n            for i = 0 to layers.Length - 1 do\n                s.Append(\"(\" + i.ToString() + \") -> \") |> ignore\n            s.Remove(s.Length - 4, 4) |> ignore\n            s.Append(\"\\n\\n\") |> ignore\n            for i = 0 to layers.Length - 1 do\n                s.Append(\"   (\" + i.ToString() + \"): \" + layers.[i].ToStringFull() + \"\\n\\n\") |> ignore\n        \"Hype.Neural.FeedForward\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" encodelength\n            + s.ToString()\n    override n.Visualize() =\n        let s = System.Text.StringBuilder()\n        if n.Length > 0 then\n            s.Append(\"   \") |> ignore\n            for i = 0 to layers.Length - 1 do\n                s.Append(\"(\" + i.ToString() + \") -> \") |> ignore\n            s.Remove(s.Length - 4, 4) |> ignore\n            s.Append(\"\\n\\n\") |> ignore\n            for i = 0 to layers.Length - 1 do\n                s.Append(\"   (\" + i.ToString() + \"): \" + layers.[i].Visualize() + \"\\n\\n\") |> ignore\n        \"Hype.Neural.FeedForward\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" encodelength\n            + s.ToString()\n\n\n/// Vanilla RNN layer\ntype Recurrent(inputs:int, hiddenunits:int, outputs:int, activation:DV->DV, initializer:Initializer) =\n    inherit Layer()\n    new(inputs, hiddenunits, outputs) = Recurrent(inputs, hiddenunits, outputs, tanh, Initializer.InitTanh)\n    new(inputs, hiddenunits, outputs, activation) = Recurrent(inputs, hiddenunits, outputs, activation, Initializer.InitTanh)\n\n    member val Act = activation with get\n    member val Whh = initializer.InitDM(hiddenunits, hiddenunits) with get, set\n    member val Wxh = initializer.InitDM(hiddenunits, inputs) with get, set\n    member val Why = initializer.InitDM(outputs, hiddenunits) with get, set\n    member val bh = DV.zeroCreate hiddenunits with get, set\n    member val by = DV.zeroCreate outputs with get, set\n    member val h = DV.zeroCreate hiddenunits with get, set\n\n    override l.Init() = \n        l.Whh <- initializer.InitDM(l.Whh)\n        l.Wxh <- initializer.InitDM(l.Wxh)\n        l.Why <- initializer.InitDM(l.Why)\n        l.bh <- DV.zeroCreate hiddenunits\n        l.by <- DV.zeroCreate outputs\n        l.h <- DV.zeroCreate hiddenunits\n    override l.Reset() = l.h <- DV.zeroCreate hiddenunits\n    override l.Run (x:DM) = \n        let y = x |> DM.mapCols (fun x -> \n                                    l.h <- l.Act ((l.Whh * l.h) + (l.Wxh * x) + l.bh)\n                                    (l.Why * l.h) + l.by)\n        l.h <- primalDeep l.h\n        y\n    override l.Encode () = [l.Whh; l.Wxh; l.Why] |> List.map DM.toDV |> List.append [l.bh; l.by] |> Seq.fold DV.append DV.Zero\n    override l.EncodeLength = l.Whh.Length + l.Wxh.Length + l.Why.Length + l.bh.Length + l.by.Length\n    override l.Decode w =\n        let ww = w |> DV.split [l.bh.Length; l.by.Length; l.Whh.Length; l.Wxh.Length; l.Why.Length] |> Array.ofSeq\n        l.bh <- ww.[0]\n        l.by <- ww.[1]\n        l.Whh <- ww.[2] |> DM.ofDV l.Whh.Rows\n        l.Wxh <- ww.[3] |> DM.ofDV l.Wxh.Rows\n        l.Why <- ww.[4] |> DM.ofDV l.Why.Rows\n        l.h <- DV.zeroCreate hiddenunits\n    override l.ToString() =\n        \"Hype.Neural.Recurrent\\n\"\n            + \"   \" + l.Wxh.Cols.ToString() + \" -> \" + l.Whh.Rows.ToString() + \" -> \" + l.Why.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Whh : %i x %i\\n\" l.Whh.Rows l.Whh.Cols\n            + sprintf \"   Wxh : %i x %i\\n\" l.Wxh.Rows l.Wxh.Cols\n            + sprintf \"   Why : %i x %i\\n\" l.Why.Rows l.Why.Cols\n            + sprintf \"   bh  : %i\\n\" l.bh.Length\n            + sprintf \"   by  : %i\" l.by.Length\n    override l.ToStringFull() =\n        \"Hype.Neural.Recurrent\\n\"\n            + \"   \" + l.Wxh.Cols.ToString() + \" -> \" + l.Whh.Rows.ToString() + \" -> \" + l.Why.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Whh:\\n%O\\n\" l.Whh\n            + sprintf \"   Wxh:\\n%O\\n\" l.Wxh\n            + sprintf \"   Why:\\n%O\\n\" l.Why\n            + sprintf \"   bh:\\n%O\\n\" l.bh\n            + sprintf \"   by:\\n%O\" l.by\n    override l.Visualize() =\n        \"Hype.Neural.Recurrent\\n\"\n            + \"   \" + l.Wxh.Cols.ToString() + \" -> \" + l.Whh.Rows.ToString() + \" -> \" + l.Why.Rows.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Whh:\\n%s\\n\" (l.Whh.Visualize())\n            + sprintf \"   Wxh:\\n%s\\n\" (l.Wxh.Visualize())\n            + sprintf \"   Why:\\n%s\\n\" (l.Why.Visualize())\n            + sprintf \"   bh:\\n%s\\n\" (l.bh.Visualize())\n            + sprintf \"   by:\\n%s\" (l.by.Visualize())\n\n\n/// Long short-term memory layer\ntype LSTM(inputs:int, memcells:int) =\n    inherit Layer()\n    let initializer = Initializer.InitTanh\n\n    member val Wxi = initializer.InitDM(memcells, inputs) with get, set\n    member val Whi = initializer.InitDM(memcells, memcells) with get, set\n    member val Wxc = initializer.InitDM(memcells, inputs) with get, set\n    member val Whc = initializer.InitDM(memcells, memcells) with get, set\n    member val Wxf = initializer.InitDM(memcells, inputs) with get, set\n    member val Whf = initializer.InitDM(memcells, memcells) with get, set\n    member val Wxo = initializer.InitDM(memcells, inputs) with get, set\n    member val Who = initializer.InitDM(memcells, memcells) with get, set\n    member val bi = DV.zeroCreate memcells with get, set\n    member val bc = DV.zeroCreate memcells with get, set\n    member val bf = DV.zeroCreate memcells with get, set\n    member val bo = DV.zeroCreate memcells with get, set\n    member val c = DV.zeroCreate memcells with get, set\n    member val h = DV.zeroCreate memcells with get, set\n\n    override l.Init() =\n        l.Wxi <- initializer.InitDM(l.Wxi)\n        l.Whi <- initializer.InitDM(l.Whi)\n        l.Wxc <- initializer.InitDM(l.Wxc)\n        l.Whc <- initializer.InitDM(l.Whc)\n        l.Wxf <- initializer.InitDM(l.Wxf)\n        l.Whf <- initializer.InitDM(l.Whf)\n        l.Wxo <- initializer.InitDM(l.Wxo)\n        l.Who <- initializer.InitDM(l.Who)\n        l.bi <- DV.zeroCreate memcells\n        l.bc <- DV.zeroCreate memcells\n        l.bf <- DV.zeroCreate memcells\n        l.bo <- DV.zeroCreate memcells\n        l.c <- DV.zeroCreate memcells\n        l.h <- DV.zeroCreate memcells\n    override l.Reset() =\n        l.c <- DV.zeroCreate memcells\n        l.h <- DV.zeroCreate memcells\n    override l.Run (x:DM) =\n        let y = x |> DM.mapCols (fun x ->\n                                    let i = sigmoid((l.Wxi * x) + (l.Whi * l.h) + l.bi)\n                                    let c' = tanh((l.Wxc * x) + (l.Whc * l.h) + l.bc)\n                                    let f = sigmoid((l.Wxf * x) + (l.Whf * l.h) + l.bf)\n                                    l.c <- (i .* c') + (f .* l.c)\n                                    let o = sigmoid((l.Wxo * x) + (l.Who * l.h) + l.bo)\n                                    l.h <- o .* tanh l.c\n                                    l.h)\n        l.h <- primalDeep l.h\n        l.c <- primalDeep l.c\n        y\n    override l.Encode() = [l.Wxi; l.Whi; l.Wxc; l.Whc; l.Wxf; l.Whf; l.Wxo; l.Who] |> List.map DM.toDV |> List.append [l.bi; l.bc; l.bf; l.bo] |> Seq.fold DV.append DV.Zero\n    override l.EncodeLength = l.Wxi.Length + l.Whi.Length + l.Wxc.Length + l.Whc.Length + l.Wxf.Length + l.Whf.Length + l.Wxo.Length + l.Who.Length + l.bi.Length + l.bc.Length + l.bf.Length + l.bo.Length\n    override l.Decode w =\n        let ww = w |> DV.split [l.bi.Length; l.bc.Length; l.bf.Length; l.bo.Length; l.Wxi.Length; l.Whi.Length; l.Wxc.Length; l.Whc.Length; l.Wxf.Length; l.Whf.Length; l.Wxo.Length; l.Who.Length] |> Array.ofSeq\n        l.bi <- ww.[0]\n        l.bc <- ww.[1]\n        l.bf <- ww.[2]\n        l.bo <- ww.[3]\n        l.Wxi <- ww.[4] |> DM.ofDV l.Wxi.Rows\n        l.Whi <- ww.[5] |> DM.ofDV l.Whi.Rows\n        l.Wxc <- ww.[6] |> DM.ofDV l.Wxc.Rows\n        l.Whc <- ww.[7] |> DM.ofDV l.Whc.Rows\n        l.Wxf <- ww.[8] |> DM.ofDV l.Wxf.Rows\n        l.Whf <- ww.[9] |> DM.ofDV l.Whf.Rows\n        l.Wxo <- ww.[10] |> DM.ofDV l.Wxo.Rows\n        l.Who <- ww.[11] |> DM.ofDV l.Who.Rows\n        l.c <- DV.zeroCreate memcells\n        l.h <- DV.zeroCreate memcells\n    override l.ToString() =\n        \"Hype.Neural.LSTM\\n\"\n            + \"   \" + inputs.ToString() + \" -> \" + memcells.ToString() + \" -> \" + memcells.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Wxi : %i x %i\\n\" l.Wxi.Rows l.Wxi.Cols\n            + sprintf \"   Whi : %i x %i\\n\" l.Whi.Rows l.Whi.Cols\n            + sprintf \"   Wxc : %i x %i\\n\" l.Wxc.Rows l.Wxc.Cols\n            + sprintf \"   Whc : %i x %i\\n\" l.Whc.Rows l.Whc.Cols\n            + sprintf \"   Wxf : %i x %i\\n\" l.Wxf.Rows l.Wxf.Cols\n            + sprintf \"   Whf : %i x %i\\n\" l.Whf.Rows l.Whf.Cols\n            + sprintf \"   Wxo : %i x %i\\n\" l.Wxo.Rows l.Wxo.Cols\n            + sprintf \"   Who : %i x %i\\n\" l.Who.Rows l.Who.Cols\n            + sprintf \"   bi  : %i\\n\" l.bi.Length\n            + sprintf \"   bc  : %i\\n\" l.bc.Length\n            + sprintf \"   bf  : %i\\n\" l.bf.Length\n            + sprintf \"   bo  : %i\" l.bo.Length\n    override l.ToStringFull() =\n        \"Hype.Neural.LSTM\\n\"\n            + \"   \" + inputs.ToString() + \" -> \" + memcells.ToString() + \" -> \" + memcells.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Wxi:\\n%O\\n\" l.Wxi\n            + sprintf \"   Whi:\\n%O\\n\" l.Whi\n            + sprintf \"   Wxc:\\n%O\\n\" l.Wxc\n            + sprintf \"   Whc:\\n%O\\n\" l.Whc\n            + sprintf \"   Wxf:\\n%O\\n\" l.Wxf\n            + sprintf \"   Whf:\\n%O\\n\" l.Whf\n            + sprintf \"   Wxo:\\n%O\\n\" l.Wxo\n            + sprintf \"   Who:\\n%O\\n\" l.Who\n            + sprintf \"   bi:\\n%O\\n\" l.bi\n            + sprintf \"   bc:\\n%O\\n\" l.bc\n            + sprintf \"   bf:\\n%O\\n\" l.bf\n            + sprintf \"   bo:\\n%O\" l.bo\n    override l.Visualize() =\n        \"Hype.Neural.LSTM\\n\"\n            + \"   \" + inputs.ToString() + \" -> \" + memcells.ToString() + \" -> \" + memcells.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Wxi:\\n%s\\n\" (l.Wxi.Visualize())\n            + sprintf \"   Whi:\\n%s\\n\" (l.Whi.Visualize())\n            + sprintf \"   Wxc:\\n%s\\n\" (l.Wxc.Visualize())\n            + sprintf \"   Whc:\\n%s\\n\" (l.Whc.Visualize())\n            + sprintf \"   Wxf:\\n%s\\n\" (l.Wxf.Visualize())\n            + sprintf \"   Whf:\\n%s\\n\" (l.Whf.Visualize())\n            + sprintf \"   Wxo:\\n%s\\n\" (l.Wxo.Visualize())\n            + sprintf \"   Who:\\n%s\\n\" (l.Who.Visualize())\n            + sprintf \"   bi:\\n%s\\n\" (l.bi.Visualize())\n            + sprintf \"   bc:\\n%s\\n\" (l.bc.Visualize())\n            + sprintf \"   bf:\\n%s\\n\" (l.bf.Visualize())\n            + sprintf \"   bo:\\n%s\" (l.bo.Visualize())\n\n\n/// Gated recurrent unit layer\ntype GRU(inputs:int, memcells:int) =\n    inherit Layer()\n    let initializer = Initializer.InitStandard\n\n    member val Wxz = initializer.InitDM(memcells, inputs) with get, set\n    member val Whz = initializer.InitDM(memcells, memcells) with get, set\n    member val Wxr = initializer.InitDM(memcells, inputs) with get, set\n    member val Whr = initializer.InitDM(memcells, memcells) with get, set\n    member val Wxh = initializer.InitDM(memcells, inputs) with get, set\n    member val Whh = initializer.InitDM(memcells, memcells) with get, set\n    member val bz = DV.zeroCreate memcells with get, set\n    member val br = DV.zeroCreate memcells with get, set\n    member val bh = DV.zeroCreate memcells with get, set\n    member val h = DV.zeroCreate memcells with get, set\n\n    override l.Init() =\n        l.Wxz <- initializer.InitDM(l.Wxz)\n        l.Whz <- initializer.InitDM(l.Whz)\n        l.Wxr <- initializer.InitDM(l.Wxr)\n        l.Whr <- initializer.InitDM(l.Whr)\n        l.Wxh <- initializer.InitDM(l.Wxh)\n        l.Whh <- initializer.InitDM(l.Whh)\n        l.bz <- DV.zeroCreate memcells\n        l.br <- DV.zeroCreate memcells\n        l.bh <- DV.zeroCreate memcells\n        l.h <- DV.zeroCreate memcells\n    override l.Reset() =\n        l.h <- DV.zeroCreate memcells\n    override l.Run(x:DM) =\n        let y = x |> DM.mapCols (fun x ->\n                                    let z = sigmoid(l.Wxz * x + l.Whz * l.h + l.bz)\n                                    let r = sigmoid(l.Wxr * x + l.Whr * l.h + l.br)\n                                    let h' = tanh(l.Wxh * x + l.Whh * (l.h .* r))\n                                    l.h <- (1.f - z) .* h' + z .* l.h\n                                    l.h)\n        l.h <- primalDeep l.h\n        y\n    override l.Encode() = [l.Wxz; l.Whz; l.Wxr; l.Whr; l.Wxh; l.Whh] |> List.map DM.toDV |> List.append [l.bz; l.br; l.bh] |> Seq.fold DV.append DV.Zero\n    override l.EncodeLength = l.Wxz.Length + l.Whz.Length + l.Wxr.Length + l.Whr.Length + l.Wxh.Length + l.Whh.Length + l.bz.Length + l.br.Length + l.bh.Length\n    override l.Decode w =\n        let ww = w |> DV.split [l.bz.Length; l.br.Length; l.bh.Length; l.Wxz.Length; l.Whz.Length; l.Wxr.Length; l.Whr.Length; l.Wxh.Length; l.Whh.Length] |> Array.ofSeq\n        l.bz <- ww.[0]\n        l.br <- ww.[1]\n        l.bh <- ww.[2]\n        l.Wxz <- ww.[3] |> DM.ofDV l.Wxh.Rows\n        l.Whz <- ww.[4] |> DM.ofDV l.Whz.Rows\n        l.Wxr <- ww.[5] |> DM.ofDV l.Wxr.Rows\n        l.Whr <- ww.[6] |> DM.ofDV l.Whr.Rows\n        l.Wxh <- ww.[7] |> DM.ofDV l.Wxh.Rows\n        l.Whh <- ww.[8] |> DM.ofDV l.Whh.Rows\n    override l.ToString() =\n        \"Hype.Neural.GRU\\n\"\n            + \"   \" + inputs.ToString() + \" -> \" + memcells.ToString() + \" -> \" + memcells.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Wxz : %i x %i\\n\" l.Wxz.Rows l.Wxz.Cols\n            + sprintf \"   Whz : %i x %i\\n\" l.Whz.Rows l.Whz.Cols\n            + sprintf \"   Wxr : %i x %i\\n\" l.Wxr.Rows l.Wxr.Cols\n            + sprintf \"   Whr : %i x %i\\n\" l.Whr.Rows l.Whr.Cols\n            + sprintf \"   Wxh : %i x %i\\n\" l.Wxh.Rows l.Wxh.Cols\n            + sprintf \"   Whh : %i x %i\\n\" l.Whh.Rows l.Whh.Cols\n            + sprintf \"   bz : %i\\n\" l.bz.Length\n            + sprintf \"   br : %i\\n\" l.br.Length\n            + sprintf \"   bh : %i\\n\" l.bh.Length\n    override l.ToStringFull() =\n        \"Hype.Neural.GRU\\n\"\n            + \"   \" + inputs.ToString() + \" -> \" + memcells.ToString() + \" -> \" + memcells.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Wxz:\\n%O\\n\" l.Wxz\n            + sprintf \"   Whz:\\n%O\\n\" l.Whz\n            + sprintf \"   Wxr:\\n%O\\n\" l.Wxr\n            + sprintf \"   Whr:\\n%O\\n\" l.Whr\n            + sprintf \"   Wxh:\\n%O\\n\" l.Wxh\n            + sprintf \"   Whh:\\n%O\\n\" l.Whh\n            + sprintf \"   bz:\\n%O\\n\" l.bz\n            + sprintf \"   br:\\n%O\\n\" l.br\n            + sprintf \"   bh:\\n%O\\n\" l.bh\n    override l.Visualize() =\n        \"Hype.Neural.GRU\\n\"\n            + \"   \" + inputs.ToString() + \" -> \" + memcells.ToString() + \" -> \" + memcells.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Wxz:\\n%s\\n\" (l.Wxz.Visualize())\n            + sprintf \"   Whz:\\n%s\\n\" (l.Whz.Visualize())\n            + sprintf \"   Wxr:\\n%s\\n\" (l.Wxr.Visualize())\n            + sprintf \"   Whr:\\n%s\\n\" (l.Whr.Visualize())\n            + sprintf \"   Wxh:\\n%s\\n\" (l.Wxh.Visualize())\n            + sprintf \"   Whh:\\n%s\\n\" (l.Whh.Visualize())\n            + sprintf \"   bz:\\n%s\\n\" (l.bz.Visualize())\n            + sprintf \"   br:\\n%s\\n\" (l.br.Visualize())\n            + sprintf \"   bh:\\n%s\\n\" (l.bh.Visualize())\n\n/// Long short-term memory layer (alternative implementation)\ntype LSTMAlt(inputs:int, memcells:int) =\n    inherit Layer()\n    let initializer = Initializer.InitTanh\n\n    member val Wxh = initializer.InitDM(4 * memcells, inputs) with get, set\n    member val Whh = initializer.InitDM(4 * memcells, memcells) with get, set\n    member val b = DV.zeroCreate (4 * memcells) with get, set\n    member val c = DV.zeroCreate memcells with get, set\n    member val h = DV.zeroCreate memcells with get, set\n\n    override l.Init() =\n        l.Wxh <- initializer.InitDM(l.Wxh)\n        l.Whh <- initializer.InitDM(l.Whh)\n        l.b <- DV.zeroCreate (4 * memcells)\n        l.c <- DV.zeroCreate memcells\n        l.h <- DV.zeroCreate memcells\n    override l.Reset() =\n        l.c <- DV.zeroCreate memcells\n        l.h <- DV.zeroCreate memcells\n    override l.Run(x:DM) =\n        let y = x |> DM.mapCols (fun x ->\n                                    let x2h = l.Wxh * x\n                                    let h2h = l.Whh * l.h\n                                    let pre = x2h + h2h + l.b\n                                    let pretan = tanh pre.[..memcells - 1]\n                                    let presig = sigmoid pre.[memcells..]\n                                    let c' = pretan\n                                    let i = presig.[..memcells - 1]\n                                    let f = presig.[memcells..(2 * memcells) - 1]\n                                    let o = presig.[(2 * memcells)..]\n                                    l.c <- (i .* c') + (f .* l.c)\n                                    l.h <- o .* tanh l.c\n                                    l.h)\n        l.h <- primalDeep l.h\n        l.c <- primalDeep l.c\n        y\n    override l.Encode() = [l.Wxh |> DM.toDV; l.Whh |> DM.toDV; l.b] |> Seq.fold DV.append DV.Zero\n    override l.EncodeLength = l.Wxh.Length + l.Whh.Length + l.b.Length\n    override l.Decode w =\n        let ww = w |> DV.split [l.Wxh.Length; l.Whh.Length; l.b.Length] |> Array.ofSeq\n        l.Wxh <- ww.[0] |> DM.ofDV l.Wxh.Rows\n        l.Whh <- ww.[1] |> DM.ofDV l.Whh.Rows\n        l.b <- ww.[2]\n    override l.ToString() =\n        \"Hype.Neural.LSTMAlt\\n\"\n            + \"   \" + inputs.ToString() + \" -> \" + memcells.ToString() + \" -> \" + memcells.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Wxh : %i x %i\\n\" l.Wxh.Rows l.Wxh.Cols\n            + sprintf \"   Whh : %i x %i\\n\" l.Whh.Rows l.Whh.Cols\n            + sprintf \"   b : %i\\n\" l.b.Length\n    override l.ToStringFull() =\n        \"Hype.Neural.LSTMAlt\\n\"\n            + \"   \" + inputs.ToString() + \" -> \" + memcells.ToString() + \" -> \" + memcells.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Wxh:\\n%O\\n\" l.Wxh\n            + sprintf \"   Whh:\\n%O\\n\" l.Whh\n            + sprintf \"   b:\\n%O\\n\" l.b\n    override l.Visualize() =\n        \"Hype.Neural.LSTMAlt\\n\"\n            + \"   \" + inputs.ToString() + \" -> \" + memcells.ToString() + \" -> \" + memcells.ToString() + \"\\n\"\n            + sprintf \"   Learnable parameters: %i\\n\" l.EncodeLength\n            + sprintf \"   Init: %O\\n\" initializer\n            + sprintf \"   Wxh:\\n%s\\n\" (l.Wxh.Visualize())\n            + sprintf \"   Whh:\\n%s\\n\" (l.Whh.Visualize())\n            + sprintf \"   b:\\n%s\\n\" (l.b.Visualize())"
  },
  {
    "path": "src/Hype/Optimize.fs",
    "content": "﻿//\n// This file is part of\n// Hype: Compositional Machine Learning and Hyperparameter Optimization\n//\n// Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter)\n//\n// Hype is released under the MIT license.\n// (See accompanying LICENSE file.)\n//\n// Written by:\n//\n//   Atilim Gunes Baydin\n//   atilimgunes.baydin@nuim.ie\n//\n//   Barak A. Pearlmutter\n//   barak@cs.nuim.ie\n//\n//   Brain and Computation Lab\n//   Hamilton Institute & Department of Computer Science\n//   National University of Ireland Maynooth\n//   Maynooth, Co. Kildare\n//   Ireland\n//\n//   www.bcl.hamilton.ie\n//\n\n/// Optimization namespace\nnamespace Hype\n\nopen Hype\nopen DiffSharp.AD.Float32\nopen DiffSharp.Util\n\n/// Learning rate schemes\ntype LearningRate =\n    /// Constant\n    | Constant    of D\n    /// 1 / t decay, a = a0 / (1 + kt). Initial value, decay rate\n    | Decay       of D * D\n    /// Exponential decay, a = a0 * Exp(-kt). Initial value, decay rate\n    | ExpDecay    of D * D\n    /// Scheduled learning rate vector, its length overrides Params.Epochs\n    | Schedule    of DV\n    /// Backtracking line search. Initial value, c, rho\n    | Backtrack   of D * D * D\n    /// Strong Wolfe line search. lmax, c1, c2\n    | StrongWolfe of D * D * D\n    /// Adagrad. Initial value\n    | AdaGrad     of D\n    /// RMSProp. Initial value, decay rate\n    | RMSProp     of D * D\n    static member DefaultConstant    = Constant (D 0.001f)\n    static member DefaultDecay       = Decay (D 1.f, D 0.1f)\n    static member DefaultExpDecay    = ExpDecay (D 1.f, D 0.1f)\n    static member DefaultBacktrack   = Backtrack (D 1.f, D 0.0001f, D 0.5f)\n    static member DefaultStrongWolfe = StrongWolfe (D 1.f, D 0.0001f, D 0.5f)\n    static member DefaultAdaGrad     = AdaGrad (D 0.001f)\n    static member DefaultRMSProp     = RMSProp (D 0.001f, D 0.9f)\n    override l.ToString() =\n        match l with\n        | Constant a                 -> sprintf \"Constant a = %A\" a\n        | Decay (a0, k)              -> sprintf \"1/t decay a0 = %A, k = %A\" a0 k\n        | ExpDecay (a0, k)           -> sprintf \"Exponential decay a = %A, k = %A\" a0 k\n        | Schedule a                 -> sprintf \"Scheduled of length %A\" a.Length\n        | Backtrack (a0, c, r)       -> sprintf \"Backtracking a0 = %A, c = %A, r = %A\" a0 c r\n        | StrongWolfe (amax, c1, c2) -> sprintf \"Strong Wolfe amax = %A, c1 = %A, c2 = %A\" amax c1 c2\n        | AdaGrad (a0)               -> sprintf \"AdaGrad a0 = %A\" a0\n        | RMSProp (a0, k)            -> sprintf \"RMSProp a0 = %A, k = %A\" a0 k\n    member l.Func =\n        let loopLimit = 500\n        match l with\n        | Constant a -> fun _ _ _ _ _ _ _ -> box a\n        | Decay (a0, k) -> fun i _ _ _ _ _ _ -> box (a0 / (1.f + k * i))\n        | ExpDecay (a0, k) -> fun i _ _ _ _ _ _ -> box (a0 * exp (-k * i))\n        | Schedule a -> fun i _ _ _ _ _ _ -> box a.[i % a.Length]\n        | Backtrack (a0, c, r) ->\n            fun _ w f v g _ p ->\n                let mutable a = a0\n                let mutable i = 0\n                let mutable found = false\n                while not found do\n                    if f (w + a * p) < v + c * a * (p * g) then \n                        found <- true\n                    else\n                        a <- r * a\n                    i <- i + 1\n                    if i > loopLimit then\n                        found <- true\n                        Util.printLog \"*** BACKTRACKING DID NOT CONVERGE ***\"\n                box a\n        | StrongWolfe (amax, c1, c2) ->\n            fun _ w f v g _ p ->\n                let v0 = v\n                let gp0 = g * p\n                let inline zoom a1 a2 =\n                    let mutable al = a1\n                    let mutable ah = a2\n                    let mutable a' = a1\n                    let mutable v'al = f (w + al * p)\n                    let mutable i = 0\n                    let mutable found = false\n                    while not found do\n                        a' <- (al + ah) / D 2.f\n                        let v', gg = grad' f (w + a' * p)\n                        if (v' > v0 + c1 * a' * gp0) || (v' >= v'al) then\n                            ah <- a'\n                        else\n                            let gp' = gg * p\n                            if abs gp' <= -c2 * gp0 then\n                                found <- true\n                            elif gp' * (ah - al) >= D 0.f then\n                                ah <- al\n                                al <- a'\n                                v'al <- v'\n                        i <- i + 1\n                        if i > loopLimit then\n                            found <- true\n                            Util.printLog \"*** STRONG WOLFE (ZOOM) DID NOT CONVERGE ***\"\n                    a'\n                            \n                let mutable v = v0\n                let mutable v' = v0\n                let mutable gp' = gp0\n                let mutable a = D 0.f\n                let mutable a' = Rnd.UniformD(amax)\n                let mutable a'' = a'\n                let mutable i = 1\n                let mutable found = false\n                while not found do\n                    let vv, gg = grad' f (w + a' * p)\n                    v' <- vv\n                    gp' <- gg * p\n                    if (v' > v0 + c1 * a' * gp0) || ((i > 1) && (v' >= v)) then\n                        a'' <- zoom a a'\n                        found <- true\n                    elif (abs gp') <= (-c2 * gp0) then\n                        a'' <- a'\n                        found <- true\n                    elif gp' >= D 0.f then\n                        a'' <- zoom a' a\n                        found <- true\n                    else\n                        a <- a'\n                        v <- v'\n                        a' <- Rnd.UniformD(a', amax)\n                        i <- i + 1\n                    if i > loopLimit then\n                        found <- true\n                        Util.printLog \"*** STRONG WOLFE DID NOT CONVERGE ***\"\n                box a''\n        | AdaGrad (a0) ->\n            fun _ _ _ _ g (gcache:DV ref) _ ->\n                gcache := !gcache + (g .* g)\n                box (a0 / sqrt (!gcache + 1e-8f))\n        | RMSProp (a0, k) ->\n            fun _ _ _ _ g (gcache:DV ref) _ ->\n                gcache := (k * !gcache) + (1.f - k) * (g .* g)\n                box (a0 / sqrt (!gcache + 1e-6f))\n\n/// Training batch configuration\ntype Batch =\n    | Full\n    /// Minibatch of given size\n    | Minibatch of int\n    /// Minibatch with size 1, SGD\n    | Stochastic\n    override b.ToString() =\n        match b with\n        | Full        -> \"Full\"\n        | Minibatch n -> sprintf \"Minibatches of %A\" n\n        | Stochastic  -> \"Stochastic (minibatch of 1)\"\n    member b.Func =\n        match b with\n        | Full -> fun (d:Dataset) _ -> d\n        | Minibatch n ->    fun d i -> d.[(n * i)..((n * i) + n - 1)]\n        | Stochastic ->     fun d i -> d.[i..i]\n\n/// Gradient-based optimization methods\ntype Method =\n    /// Gradient descent\n    | GD\n    /// Conjugate gradient\n    | CG\n    /// Conjugate descent\n    | CD\n    /// Nonlinear conjugate gradient\n    | NonlinearCG\n    /// Dai & Yuan conjugate gradient\n    | DaiYuanCG\n    /// Newton conjugate gradient\n    | NewtonCG\n    /// Exact Newton\n    | Newton\n    override o.ToString() =\n        match o with\n        | GD          -> \"Gradient descent\"\n        | CG          -> \"Conjugate gradient\"\n        | CD          -> \"Conjugate descent\"\n        | DaiYuanCG   -> \"Dai & Yuan conjugate gradient\"\n        | NonlinearCG -> \"Nonlinear conjugate gradient\"\n        | NewtonCG    -> \"Newton conjugate gradient\"\n        | Newton      -> \"Exact Newton\"\n    member o.Func =\n        match o with\n        | GD ->\n            fun w (f:DV->D) _ _ gradclip ->\n                let v', g' = grad' f w\n                let g' = gradclip g'\n                let p' = -g'\n                v', g', p'\n        /// Hestenes and Stiefel 1952\n        | CG ->\n            fun w f g p gradclip ->\n                let v', g' = grad' f w\n                let g' = gradclip g'\n                let y = g' - g\n                let b = (g' * y) / (p * y)\n                let p' = -g' + b * p\n                v', g', p'\n        /// Fletcher 1987\n        | CD ->\n            fun w f g p gradclip ->\n                let v', g' = grad' f w\n                let g' = gradclip g'\n                let b = (DV.normSq g') / (-p * g)\n                let p' = -g' + b * p\n                v', g', p'\n        /// Dai and Yuan 1999\n        | DaiYuanCG ->\n            fun w f g p gradclip ->\n                let v', g' = grad' f w\n                let g' = gradclip g'\n                let y = g' - g\n                let b = (DV.normSq g') / (p * y)\n                let p' = -g' + b * p\n                v', g', p'\n        /// Fletcher and Reeves 1964\n        | NonlinearCG ->\n            fun w f g p gradclip ->\n                let v', g' = grad' f w\n                let g' = gradclip g'\n                let b = (DV.normSq g') / (DV.normSq g)\n                let p' = -g' + b * p\n                v', g', p'\n        | NewtonCG ->\n            fun w f _ p gradclip ->\n                let v', g' = grad' f w\n                let g' = gradclip g'\n                let hv = hessianv f w p\n                let b = (g' * hv) / (p * hv)\n                let p' = -g' + b * p\n                v', g', p'\n        | Newton ->\n            fun w f _ _ gradclip ->\n                let v', g', h' = gradhessian' f w\n                let g' = gradclip g'\n                let p' = -DM.solveSymmetric h' g'\n                v', g', p'\n\n/// Momentum configuration\ntype Momentum =\n    /// Default momentum\n    | Momentum of D \n    /// Nesterov momentum\n    | Nesterov of D \n    | NoMomentum\n    static member DefaultMomentum = Momentum (D 0.9f)\n    static member DefaultNesterov = Nesterov (D 0.9f)\n    override m.ToString() =\n        match m with\n        | Momentum m -> sprintf \"Standard %A\" m\n        | Nesterov m -> sprintf \"Nesterov %A\" m\n        | NoMomentum -> \"None\"\n    member m.Func =\n        match m with\n        | Momentum m -> fun (u:DV) (u':DV) -> (m * u) + u'\n        | Nesterov m -> fun u u' -> (m * m * u) + (m + D 1.f) * u'\n        | NoMomentum -> fun _ u' -> u'\n\n/// Loss function configuration\ntype Loss =\n    /// L1 norm, least absolute deviations\n    | L1Loss\n    /// L2 norm\n    | L2Loss\n    /// L2 norm squared, least squares\n    | Quadratic \n    /// Cross entropy after linear layer\n    | CrossEntropyOnLinear\n    /// Cross entropy after softmax layer\n    | CrossEntropyOnSoftmax\n    override l.ToString() =\n        match l with\n        | L1Loss -> \"L1 norm, least absolute deviations\"\n        | L2Loss -> \"L2 norm\"\n        | Quadratic -> \"L2 norm squared, least squares\"\n        | CrossEntropyOnLinear -> \"Cross entropy after linear layer\"\n        | CrossEntropyOnSoftmax -> \"Cross entropy after softmax layer\"\n    member l.Func =\n        match l with\n        | L1Loss -> fun (d:Dataset) (f:DM->DM) -> ((d.Y - (f d.X)) |> DM.toCols |> Seq.sumBy DV.l1norm) / d.Length\n        | L2Loss -> fun d f -> ((d.Y - (f d.X)) |> DM.toCols |> Seq.sumBy DV.l2norm) / d.Length\n        | Quadratic -> fun d f -> ((d.Y - (f d.X)) |> DM.toCols |> Seq.sumBy DV.l2normSq) / d.Length\n        | CrossEntropyOnLinear -> fun d f -> ((f d.X) |> DM.mapiCols (fun i v -> toDV [(logsumexp v) - v.[d.Yi.[i]]]) |> DM.sum) / d.Length\n        | CrossEntropyOnSoftmax -> fun d f -> -((f d.X) |> DM.mapiCols (fun i v -> toDV [(DV.standardBasis v.Length d.Yi.[i]) * log v]) |> DM.sum) / d.Length\n\n/// Regularization configuration\ntype Regularization =\n    /// L1 regularization\n    | L1Reg of D\n    /// L2 regularization\n    | L2Reg of D \n    | NoReg\n    static member DefaultL1Reg = L1Reg (D 0.0001f)\n    static member DefaultL2Reg = L2Reg (D 0.0001f)\n    override r.ToString() =\n        match r with\n        | L1Reg l -> sprintf \"L1 lambda = %A\" l\n        | L2Reg l -> sprintf \"L2 lambda = %A\" l\n        | NoReg -> \"None\"\n    member r.Func =\n        match r with\n        | L1Reg l -> fun (w:DV) -> l * (DV.l1norm w)\n        | L2Reg l -> fun w -> l * (DV.l2normSq w)\n        | NoReg -> fun w -> D 0.f\n\n/// Gradient clipping configuration\ntype GradientClipping =\n    /// Norm clipping\n    | NormClip of D \n    | NoClip\n    static member DefaultNormClip = NormClip (D 1.f)\n    override g.ToString() =\n        match g with\n        | NormClip threshold -> sprintf \"Norm clipping threshold = %A\" threshold\n        | NoClip -> \"None\"\n    member g.Func =\n        match g with\n        | NormClip threshold -> fun (g:DV) -> let ng = DV.norm g in if ng > threshold then (threshold / ng) * g else g\n        | NoClip -> id\n\n/// Early stopping configuration\ntype EarlyStopping =\n    /// Stagnation patience, overfitting patience\n    | Early of int * int\n    | NoEarly\n    static member DefaultEarly = Early (750, 10)\n    override e.ToString() =\n        match e with\n        | Early(s, o) -> sprintf \"Stagnation thresh. = %A, overfit. thresh. = %A\" s o\n        | NoEarly -> \"None\"\n\n/// Record type holding optimization or training parameters\ntype Params =\n    {Epochs : int\n     Method: Method\n     LearningRate : LearningRate\n     Momentum : Momentum\n     Loss : Loss\n     Regularization : Regularization\n     GradientClipping : GradientClipping\n     Batch : Batch\n     EarlyStopping : EarlyStopping\n     ImprovementThreshold : D\n     Silent : bool\n     ReturnBest : bool\n     ValidationInterval : int\n     LoggingFunction : int->DV->D->unit}\n[<CompilationRepresentation(CompilationRepresentationFlags.ModuleSuffix)>] \nmodule Params =\n     let Default = {Epochs = 100\n                    LearningRate = LearningRate.DefaultRMSProp\n                    Momentum = NoMomentum\n                    Loss = L2Loss\n                    Regularization = Regularization.DefaultL2Reg\n                    GradientClipping = NoClip\n                    Method = GD\n                    Batch = Full\n                    EarlyStopping = NoEarly\n                    ImprovementThreshold = D 0.995f\n                    Silent = false\n                    ReturnBest = true\n                    ValidationInterval = 10\n                    LoggingFunction = fun _ _ _ -> ()}\n\n/// Main optimization module\ntype Optimize =\n    /// Minimize vector-to-scalar function `f`, starting from initial parameter vector `w0`. Uses the default optimization configuration in `Params.Default`.\n    static member Minimize (f:DV->D, w0:DV) = Optimize.Minimize(f, w0, Params.Default)\n    /// Minimize vector-to-scalar function `f`, starting from initial parameter vector `w0`. Uses the optimization configuration given in `par`.\n    static member Minimize (f:DV->D, w0:DV, par:Params) =\n        let dir = par.Method.Func\n        let lr = par.LearningRate.Func\n        let gradclip = par.GradientClipping.Func\n        let mom = par.Momentum.Func\n        let iters = \n            match par.LearningRate with\n            | Schedule a -> a.Length\n            | _ -> par.Epochs\n\n        if not par.Silent then\n            Util.printLog \"--- Minimization started\"\n            Util.printLog (sprintf \"Parameters     : %A\" w0.Length)\n            Util.printLog (sprintf \"Iterations     : %A\" iters)\n            Util.printLog (sprintf \"Valid. interval: %i\" par.ValidationInterval)\n            Util.printLog (sprintf \"Method         : %O\" par.Method)\n            Util.printLog (sprintf \"Learning rate  : %O\" par.LearningRate)\n            Util.printLog (sprintf \"Momentum       : %O\" par.Momentum)\n            Util.printLog (sprintf \"Gradient clip. : %O\" par.GradientClipping)\n            Util.printLog (sprintf \"Early stopping : %O\" par.EarlyStopping)\n            Util.printLog (sprintf \"Improv. thresh.: %A\" par.ImprovementThreshold)\n            Util.printLog (sprintf \"Return best    : %A\" par.ReturnBest)\n\n        let mutable i = 0\n        let mutable w = w0\n        let l, g = grad' f w0\n        let mutable l = l\n        let mutable l' = l\n        let mutable g = g\n        let mutable p = -g\n        let mutable u = DV.ZeroN g.Length\n        let gcache = ref DV.Zero\n\n        let l0 = l\n        let mutable wbest = w0\n        let mutable lbest = l0\n        let mutable repllast = l0\n        let mutable replbest = l0\n        let mutable replbestchar = \" \"\n\n        let mutable whist = []\n        let mutable lhist = []\n\n        let ldiffchar l = if l < D 0.f then \"↓\" elif l > D 0.f then \"↑\" else \"-\"\n\n        let ichars = iters.ToString().Length\n        \n        let mutable stagnation = -par.ValidationInterval\n        let mutable earlystop = false\n\n        let isNice (v:D) =\n            let vf = float32 v\n            if System.Single.IsNaN(vf) then false\n            elif System.Single.IsInfinity(vf) then false\n            elif System.Single.IsNegativeInfinity(vf) then false\n            elif System.Single.IsPositiveInfinity(vf) then false\n            else true\n\n        let mutable diverged = false\n\n        let start = System.DateTime.Now\n\n        while (i < iters) && (not earlystop) do\n            let l'', g', p' = dir w f g p gradclip\n            l' <- l''\n\n            if (not (isNice l')) then\n                if not par.Silent then Util.printLog \"*** MINIMIZATION DIVERGED: Function value is out of bounds ***\"\n                earlystop <- true\n                diverged <- true\n\n            if (l' < par.ImprovementThreshold * lbest) && (not diverged) then\n                wbest <- w\n                lbest <- l'\n\n            whist <- [w] @ whist\n            lhist <- [l] @ lhist\n\n            if i % par.ValidationInterval = 0 then\n                let repldiff = l' - repllast\n                if l' < par.ImprovementThreshold * replbest then\n                    replbest <- l'\n                    replbestchar <- \"▼\" \n                    stagnation <- 0\n                else \n                    replbestchar <- \" \"\n                    stagnation <- stagnation + par.ValidationInterval\n                    match par.EarlyStopping with\n                    | Early(s, _) ->\n                        if stagnation >= s then\n                            if not par.Silent then Util.printLog \"*** EARLY STOPPING TRIGGERED: Stagnation ***\"\n                            earlystop <- true\n                    | _ -> ()\n\n                if not par.Silent then \n                    match par.EarlyStopping with\n                    | Early(s, _) ->\n                        Util.printLog (sprintf \"%*i/%i | %O [%s%s] | Stag:%*i\" ichars (i + 1) iters l' (ldiffchar repldiff) replbestchar (s.ToString().Length) stagnation)\n                    | _ ->\n                        Util.printLog (sprintf \"%*i/%i | %O [%s%s]\" ichars (i + 1) iters l' (ldiffchar repldiff) replbestchar)\n\n                repllast <- l'\n                par.LoggingFunction i w l'\n\n            let mutable u' = DV.Zero\n            match lr i w f l' g' gcache p' with\n            | :? D as a -> u' <- a * p'; // A scalar learning rate\n            | :? DV as a -> u' <- a .* p'; // Vector of independent learning rates\n\n            u' <- mom u u'\n\n            w <- w + u'\n            l <- l'\n            g <- g'\n            p <- p' // Or, p <- u'\n            u <- u'\n            i <- i + 1\n\n        if not diverged then\n            let l'', _, _ = dir w f g p gradclip\n            l' <- l''\n            if l' < par.ImprovementThreshold * lbest then\n                wbest <- w\n                lbest <- l'\n\n        let duration = System.DateTime.Now.Subtract(start)\n\n        let wfinal = if par.ReturnBest || diverged then wbest else w\n        let lfinal = if par.ReturnBest || diverged then lbest else l'\n\n        let lchg = (lfinal - l0)\n        let lchgs = lchg / (float32 duration.TotalSeconds)\n        let es = (float i) / (duration.TotalSeconds)\n        let em = (float i) / (duration.TotalMinutes)\n\n        if not par.Silent then\n            Util.printLog (sprintf \"Duration       : %A\" duration)\n            Util.printLog (sprintf \"Value initial  : %O\" (primal l0))\n            Util.printLog (sprintf \"Value final    : %O %s\" (primal lfinal) (if par.ReturnBest then \"(Best)\" else \"(Last)\"))\n            Util.printLog (sprintf \"Value change   : %O (%.2f %%)\" (primal lchg) (float32 (100 * lchg /l0)))\n            Util.printLog (sprintf \"Value chg. / s : %O\" (primal lchgs))\n            Util.printLog (sprintf \"Iter. / s      : %A\" es)\n            Util.printLog (sprintf \"Iter. / min    : %A\" em)\n            Util.printLog \"--- Minimization finished\"\n        wfinal, lfinal, (whist |> List.rev |> List.toArray), (lhist |> List.rev |> List.toArray)\n\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the default optimization configuration in `Params.Default`\n    static member Train (f:DV->DV->D,  w0:DV, d:Dataset) = Optimize.Train((fun w v -> toDV [f w v]), w0, d)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the optimization configuration given in `par`.\n    static member Train (f:DV->DV->D,  w0:DV, d:Dataset, par:Params) = Optimize.Train((fun w v -> toDV [f w v]), w0, d, par)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d` and also monitoring the loss for the validation data given in dataset `v`. Uses the default optimization configuration in `Params.Default`\n    static member Train (f:DV->DV->D,  w0:DV, d:Dataset, v:Dataset) = Optimize.Train((fun w v -> toDV [f w v]), w0, d, v)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d` and also monitoring the loss for the validation data given in dataset `v`. Uses the optimization configuration given in `par`.\n    static member Train (f:DV->DV->D,  w0:DV, d:Dataset, v:Dataset, par:Params) = Optimize.Train((fun w v -> toDV [f w v]), w0, d, v, par)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the default optimization configuration in `Params.Default`.\n    static member Train (f:DV->DV->DV, w0:DV, d:Dataset) = Optimize.Train(f, w0, d, Dataset.empty, Params.Default)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the optimization configuration given in `par`.\n    static member Train (f:DV->DV->DV, w0:DV, d:Dataset, par:Params) = Optimize.Train(f, w0, d, Dataset.empty, par)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`, and also monitoring the loss for the validation data given in dataset `v`. Uses the default optimization configuration in `Params.Default`.\n    static member Train (f:DV->DV->DV, w0:DV, d:Dataset, v:Dataset) = Optimize.Train(f, w0, d, v, Params.Default)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`, and also monitoring the loss for the validation data given in dataset `v`. Uses the optimization configuration given in `par`.\n    static member Train (f:DV->DV->DV, w0:DV, d:Dataset, v:Dataset, par:Params) = Optimize.Train (f >> DM.mapCols, w0, d, v, par)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the default optimization configuration in `Params.Default`.\n    static member Train (f:DV->DM->DM, w0:DV, d:Dataset) = Optimize.Train(f, w0, d, Dataset.empty, Params.Default)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the optimization configuration given in `par`.\n    static member Train (f:DV->DM->DM, w0:DV, d:Dataset, par:Params) = Optimize.Train(f, w0, d, Dataset.empty, par)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`, and also monitoring the loss for the validation data given in dataset `v`. Uses the default optimization configuration in `Params.Default`.\n    static member Train (f:DV->DM->DM, w0:DV, d:Dataset, v:Dataset) = Optimize.Train(f, w0, d, v, Params.Default)\n    /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`, and also monitoring the loss for the validation data given in dataset `v`. Uses the optimization configuration given in `par`.\n    static member Train (f:DV->DM->DM, w0:DV, d:Dataset, v:Dataset, par:Params) =\n        let b = par.Batch.Func\n        let dir = par.Method.Func\n        let lr = par.LearningRate.Func\n        let gradclip = par.GradientClipping.Func\n        let mom = par.Momentum.Func\n        let reg = par.Regularization.Func\n        let epochs =\n            match par.LearningRate with\n                | Schedule l -> l.Length\n                | _ -> par.Epochs\n        let loss = par.Loss.Func\n        let batches, batchsize =\n            match par.Batch with\n            | Full -> 1, d.Length\n            | Minibatch n -> d.Length / n, n\n            | Stochastic -> d.Length, 1\n        let iters = epochs * batches\n\n        if not par.Silent then\n            Util.printLog \"--- Training started\"\n            Util.printLog (sprintf \"Parameters     : %A\" w0.Length)\n            Util.printLog (sprintf \"Iterations     : %A\" iters)\n            Util.printLog (sprintf \"Epochs         : %A\" epochs)\n            Util.printLog (sprintf \"Batches        : %O (%A per epoch)\" par.Batch batches)\n            Util.printLog (sprintf \"Training data  : %i\" d.Length)\n            if Dataset.isEmpty v then\n                Util.printLog (sprintf \"Validation data: None\")\n            else\n                Util.printLog (sprintf \"Validation data: %i\" v.Length)\n            Util.printLog (sprintf \"Valid. interval: %i\" par.ValidationInterval)\n            Util.printLog (sprintf \"Method         : %O\" par.Method)\n            Util.printLog (sprintf \"Learning rate  : %O\" par.LearningRate)\n            Util.printLog (sprintf \"Momentum       : %O\" par.Momentum)\n            Util.printLog (sprintf \"Loss           : %O\" par.Loss)\n            Util.printLog (sprintf \"Regularizer    : %O\" par.Regularization)\n            Util.printLog (sprintf \"Gradient clip. : %O\" par.GradientClipping)\n            Util.printLog (sprintf \"Early stopping : %O\" par.EarlyStopping)\n            Util.printLog (sprintf \"Improv. thresh.: %A\" par.ImprovementThreshold)\n            Util.printLog (sprintf \"Return best    : %A\" par.ReturnBest)\n\n        let q i w = (loss (b d i) (f w)) + reg w\n\n        let qvalid =\n            if Dataset.isEmpty v then\n                fun _ -> D 0.f\n            else\n                fun w -> (loss v (f w)) + reg w\n\n        // i  : epoch\n        // w  : previous weights\n        // w' : new weights\n        // l  : previous loss\n        // l' : new loss\n        // g  : previous gradient\n        // g' : next gradient\n        // p  : previous direction\n        // p' : next direction\n        // u  : previous velocity\n        // u' : next velocity\n\n        let mutable epoch = 0\n        let mutable batch = 0\n        let mutable w = w0\n        let l, g = grad' (q 0) w0\n        let mutable l = l\n        let mutable l' = l\n        let mutable g = g\n        let mutable p = -g\n        let mutable u = DV.ZeroN g.Length\n        let gcache = ref DV.Zero\n\n        let l0 = l\n        let mutable wbest = w0\n        let mutable lbest = l0\n        let mutable repllast= l0\n        let mutable replbest = l0\n        let mutable replbestchar = \" \"\n\n        let mutable repvllast = if Dataset.isEmpty v then D 0.f else qvalid w0\n        let mutable repvlbest = repvllast\n        let mutable repvlbestchar = \" \"\n\n        let ldiffchar l = if l < D 0.f then \"↓\" elif l > D 0.f then \"↑\" else \"-\"\n        \n        let mutable whist = []\n        let mutable lhist = []\n\n        let mutable stagnation = -par.ValidationInterval\n        let mutable overfitting = 0\n        let mutable validlimproved = false\n        let mutable earlystop = false\n\n        let echars = epochs.ToString().Length\n        let bchars = batches.ToString().Length\n        let ichars = (epochs * d.Length).ToString().Length\n\n        let isNice (v:D) =\n            let vf = float32 v\n            if System.Single.IsNaN(vf) then false\n            elif System.Single.IsInfinity(vf) then false\n            elif System.Single.IsNegativeInfinity(vf) then false\n            elif System.Single.IsPositiveInfinity(vf) then false\n            else true\n\n        let mutable diverged = false\n\n        let start = System.DateTime.Now\n\n        while (epoch < epochs) && (not earlystop) do\n            batch <- 0\n            while (batch < batches) && (not earlystop) do\n                let l'', g', p' = dir w (q batch) g p gradclip\n                l' <- l''\n\n                if (not (isNice l')) then\n                    if not par.Silent then Util.printLog \"*** TRAINING DIVERGED: Loss is out of bounds ***\"\n                    earlystop <- true\n                    diverged <- true\n\n                whist <- w :: whist\n                lhist <- l :: lhist\n\n                if (l' < par.ImprovementThreshold * lbest) && (not diverged) then\n                    wbest <- w\n                    lbest <- l'\n                    if not (Dataset.isEmpty v) then\n                        if not validlimproved then \n                            overfitting <- overfitting + 1\n                            match par.EarlyStopping with\n                            | Early(_, o) ->\n                                if overfitting >= o then \n                                    if not par.Silent then Util.printLog \"*** EARLY STOPPING TRIGGERED: Overfitting ***\"\n                                    earlystop <- true\n                            | _ -> ()\n\n                if batch % par.ValidationInterval = 0 then\n                    let repldiff = l' - repllast\n                    if l' < par.ImprovementThreshold * replbest then\n                        replbest <- l'\n                        replbestchar <- \"▼\" \n                    else \n                        replbestchar <- \" \"\n                        if Dataset.isEmpty v then\n                            stagnation <- stagnation + par.ValidationInterval\n                            match par.EarlyStopping with\n                            | Early(s, _) ->\n                                if stagnation >= s then\n                                    if not par.Silent then Util.printLog \"*** EARLY STOPPING TRIGGERED: Stagnation of training loss ***\"\n                                    earlystop <- true\n                            | _ -> ()\n                    repllast <- l'\n\n                    if Dataset.isEmpty v then\n                        if not par.Silent then\n                            match par.EarlyStopping with\n                            | Early(s, _) ->\n                                Util.printLog (sprintf \"%*i/%i | Batch %*i/%i | %O [%s%s] | Stag:%*i\" echars (epoch + 1) epochs bchars (batch + 1) batches l' (ldiffchar repldiff) replbestchar (s.ToString().Length) stagnation)\n                            | _ ->\n                                Util.printLog (sprintf \"%*i/%i | Batch %*i/%i | %O [%s%s]\" echars (epoch + 1) epochs bchars (batch + 1) batches l' (ldiffchar repldiff) replbestchar)\n                    else\n                        let vl' = qvalid w\n                        let repvldiff = vl' - repvllast\n                        if vl' < par.ImprovementThreshold * repvlbest then\n                            repvlbest <- vl'\n                            repvlbestchar <- \"▼\"\n                            validlimproved <- true\n                            stagnation <- 0\n                            overfitting <- 0\n                        else\n                            repvlbestchar <- \" \"\n                            validlimproved <- false\n                            stagnation <- stagnation + par.ValidationInterval\n                            match par.EarlyStopping with\n                                | Early(s, _) -> \n                                    if stagnation >= s then \n                                        if not par.Silent then Util.printLog \"*** EARLY STOPPING TRIGGERED: Stagnation of validation loss ***\"\n                                        earlystop <- true\n                                | _ -> ()\n\n                        if not par.Silent then\n                            match par.EarlyStopping with\n                            | Early(s, o) -> \n                                Util.printLog (sprintf \"%*i/%i | Batch %*i/%i | %O [%s%s] | Valid %O [%s%s] | Stag:%*i Ovfit:%*i\" echars (epoch + 1) epochs bchars (batch + 1) batches l' (ldiffchar repldiff) replbestchar vl' (ldiffchar repvldiff) repvlbestchar (s.ToString().Length) stagnation (o.ToString().Length) overfitting)\n                            | _ ->\n                                Util.printLog (sprintf \"%*i/%i | Batch %*i/%i | %O [%s%s] | Valid %O [%s%s]\" echars (epoch + 1) epochs bchars (batch + 1) batches l' (ldiffchar repldiff) replbestchar vl' (ldiffchar repvldiff) repvlbestchar)\n                        repvllast <- vl'\n                        par.LoggingFunction epoch w l'\n\n                let mutable u' = DV.Zero\n                match lr epoch w (q batch) l' g' gcache p' with\n                | :? D as a -> u' <- a * p'  // A scalar learning rate\n                | :? DV as a -> u' <- a .* p' // Vector of independent learning rates\n\n                u' <- mom u u'\n\n                w <- w + u'\n                l <- l'\n                g <- g'\n                p <- p' // Or, p <- u'\n                u <- u'\n                batch <- batch + 1\n                let iter = batches * epoch + batch\n                if iter >= iters then earlystop <- true\n\n            epoch <- epoch + 1\n\n        if not diverged then\n            let l'', _, _ = dir w (q 0) g p gradclip\n            l' <- l''\n            if l' < par.ImprovementThreshold * lbest then\n                wbest <- w\n                lbest <- l'\n\n        let duration = System.DateTime.Now.Subtract(start)\n          \n        let wfinal = if par.ReturnBest || diverged then wbest else w\n        let lfinal = if par.ReturnBest || diverged then lbest else l'\n\n        let lchg = (lfinal - l0)\n        let lchgs = lchg / (float32 duration.TotalSeconds)\n        let es = (float epoch) / (duration.TotalSeconds)\n        let em = (float epoch) / (duration.TotalMinutes)\n        if not par.Silent then\n            Util.printLog (sprintf \"Duration       : %A\" duration)\n            Util.printLog (sprintf \"Loss initial   : %O\" (primal l0))\n            Util.printLog (sprintf \"Loss final     : %O %s\" (primal lfinal) (if par.ReturnBest then \"(Best)\" else \"(Last)\"))\n            Util.printLog (sprintf \"Loss change    : %O (%.2f %%)\" (primal lchg) (float32 (100 * (lchg) / l0)))\n            Util.printLog (sprintf \"Loss chg. / s  : %O\" (primal lchgs))\n            Util.printLog (sprintf \"Epochs / s     : %A\" es)\n            Util.printLog (sprintf \"Epochs / min   : %A\" em)\n            Util.printLog \"--- Training finished\"\n        wfinal, lfinal, (whist |> List.rev |> List.toArray), (lhist |> List.rev |> List.toArray)\n        "
  },
  {
    "path": "src/Hype/app.config",
    "content": "﻿<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<configuration>\n  <runtime>\n    \n  <assemblyBinding xmlns=\"urn:schemas-microsoft-com:asm.v1\">\n  <dependentAssembly>\n    <Paket>True</Paket>\n    <assemblyIdentity name=\"System.Security.Cryptography.Pkcs\" publicKeyToken=\"b03f5f7f11d50a3a\" culture=\"neutral\" />\n    <bindingRedirect oldVersion=\"0.0.0.0-65535.65535.65535.65535\" newVersion=\"4.0.3.2\" />\n  </dependentAssembly>\n  <dependentAssembly>\n    <Paket>True</Paket>\n    <assemblyIdentity name=\"System.ServiceModel.Duplex\" publicKeyToken=\"b03f5f7f11d50a3a\" culture=\"neutral\" />\n    <bindingRedirect oldVersion=\"0.0.0.0-65535.65535.65535.65535\" newVersion=\"4.5.0.3\" />\n  </dependentAssembly>\n  <dependentAssembly>\n    <Paket>True</Paket>\n    <assemblyIdentity name=\"System.ServiceModel.Http\" publicKeyToken=\"b03f5f7f11d50a3a\" culture=\"neutral\" />\n    <bindingRedirect oldVersion=\"0.0.0.0-65535.65535.65535.65535\" newVersion=\"4.5.0.3\" />\n  </dependentAssembly>\n  <dependentAssembly>\n    <Paket>True</Paket>\n    <assemblyIdentity name=\"System.ServiceModel.NetTcp\" publicKeyToken=\"b03f5f7f11d50a3a\" culture=\"neutral\" />\n    <bindingRedirect oldVersion=\"0.0.0.0-65535.65535.65535.65535\" newVersion=\"4.5.0.3\" />\n  </dependentAssembly>\n  <dependentAssembly>\n    <Paket>True</Paket>\n    <assemblyIdentity name=\"System.ServiceModel.Primitives\" publicKeyToken=\"b03f5f7f11d50a3a\" culture=\"neutral\" />\n    <bindingRedirect oldVersion=\"0.0.0.0-65535.65535.65535.65535\" newVersion=\"4.5.0.3\" />\n  </dependentAssembly>\n  <dependentAssembly>\n    <Paket>True</Paket>\n    <assemblyIdentity name=\"System.ServiceModel.Security\" publicKeyToken=\"b03f5f7f11d50a3a\" culture=\"neutral\" />\n    <bindingRedirect oldVersion=\"0.0.0.0-65535.65535.65535.65535\" newVersion=\"4.5.0.3\" />\n  </dependentAssembly>\n  <dependentAssembly>\n    <Paket>True</Paket>\n    <assemblyIdentity name=\"System.ServiceModel.Syndication\" publicKeyToken=\"cc7b13ffcd2ddd51\" culture=\"neutral\" />\n    <bindingRedirect oldVersion=\"0.0.0.0-65535.65535.65535.65535\" newVersion=\"4.0.0.0\" />\n  </dependentAssembly>\n  <dependentAssembly>\n    <Paket>True</Paket>\n    <assemblyIdentity name=\"FSharp.Core\" publicKeyToken=\"b03f5f7f11d50a3a\" culture=\"neutral\" />\n    <bindingRedirect oldVersion=\"0.0.0.0-65535.65535.65535.65535\" newVersion=\"4.7.0.0\" />\n  </dependentAssembly>\n</assemblyBinding></runtime>\n</configuration>"
  },
  {
    "path": "src/Hype/paket.references",
    "content": "DiffSharp\nFSharp.Core\nSystem.Drawing.Common"
  }
]