Repository: hypelib/Hype Branch: master Commit: e467e926da4c Files: 42 Total size: 398.3 KB Directory structure: gitextract__bdmbe28/ ├── .gitattributes ├── .gitignore ├── .paket/ │ ├── Paket.Restore.targets │ └── paket.targets ├── Hype.sln ├── LICENSE.txt ├── README.md ├── Roadmap.txt ├── docs/ │ ├── .gitignore │ ├── BuildDocs.fsx │ └── input/ │ ├── FeedforwardNets.fsx │ ├── HMC.fsx │ ├── Optimization.fsx │ ├── RecurrentNets.fsx │ ├── Regression.fsx │ ├── Training.fsx │ ├── download.fsx │ ├── files/ │ │ └── misc/ │ │ ├── style.css │ │ ├── style_light.css │ │ └── tips.js │ ├── housing.data │ ├── index.fsx │ ├── resources/ │ │ └── Hype.pspimage │ └── templates/ │ ├── docpage.cshtml │ ├── reference/ │ │ ├── module.cshtml │ │ ├── namespaces.cshtml │ │ ├── part-members.cshtml │ │ ├── part-nested.cshtml │ │ └── type.cshtml │ ├── template.cshtml │ └── template.html ├── paket.dependencies └── src/ └── Hype/ ├── AssemblyInfo.fs ├── Classifier.fs ├── Hype.fs ├── Hype.fsproj ├── Inference.fs ├── NLP.fs ├── Neural.fs ├── Optimize.fs ├── app.config └── paket.references ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitattributes ================================================ ############################################################################### # Set default behavior to automatically normalize line endings. ############################################################################### * text=auto ############################################################################### # Set default behavior for command prompt diff. # # This is need for earlier builds of msysgit that does not have it on by # default for csharp files. # Note: This is only used by command line ############################################################################### #*.cs diff=csharp ############################################################################### # Set the merge driver for project and solution files # # Merging from the command prompt will add diff markers to the files if there # are conflicts (Merging from VS is not affected by the settings below, in VS # the diff markers are never inserted). Diff markers may cause the following # file extensions to fail to load in VS. An alternative would be to treat # these files as binary and thus will always conflict and require user # intervention with every merge. To do so, just uncomment the entries below ############################################################################### #*.sln merge=binary #*.csproj merge=binary #*.vbproj merge=binary #*.vcxproj merge=binary #*.vcproj merge=binary #*.dbproj merge=binary #*.fsproj merge=binary #*.lsproj merge=binary #*.wixproj merge=binary #*.modelproj merge=binary #*.sqlproj merge=binary #*.wwaproj merge=binary ############################################################################### # behavior for image files # # image files are treated as binary by default. ############################################################################### #*.jpg binary #*.png binary #*.gif binary ############################################################################### # diff behavior for common document formats # # Convert binary document formats to text before diffing them. This feature # is only available from the command line. Turn it on by uncommenting the # entries below. ############################################################################### #*.doc diff=astextplain #*.DOC diff=astextplain #*.docx diff=astextplain #*.DOCX diff=astextplain #*.dot diff=astextplain #*.DOT diff=astextplain #*.pdf diff=astextplain #*.PDF diff=astextplain #*.rtf diff=astextplain #*.RTF diff=astextplain ================================================ FILE: .gitignore ================================================ ## Ignore Visual Studio temporary files, build results, and ## files generated by popular Visual Studio add-ons. # User-specific files *.suo *.user *.sln.docstates # Build results [Dd]ebug/ [Rr]elease/ x64/ build/ [Bb]in/ [Oo]bj/ # Enable "build/" folder in the NuGet Packages folder since NuGet packages use it for MSBuild targets !packages/*/build/ # MSTest test Results [Tt]est[Rr]esult*/ [Bb]uild[Ll]og.* *_i.c *_p.c *.ilk *.meta *.obj *.pch *.pdb *.pgc *.pgd *.rsp *.sbr *.tlb *.tli *.tlh *.tmp *.tmp_proj *.log *.vspscc *.vssscc .builds *.pidb *.log *.scc # Visual C++ cache files ipch/ *.aps *.ncb *.opensdf *.sdf *.cachefile # Visual Studio profiler *.psess *.vsp *.vspx # Guidance Automation Toolkit *.gpState # ReSharper is a .NET coding add-in _ReSharper*/ *.[Rr]e[Ss]harper # TeamCity is a build add-in _TeamCity* # DotCover is a Code Coverage Tool *.dotCover # NCrunch *.ncrunch* .*crunch*.local.xml # Installshield output folder [Ee]xpress/ # DocProject is a documentation generator add-in DocProject/buildhelp/ DocProject/Help/*.HxT DocProject/Help/*.HxC DocProject/Help/*.hhc DocProject/Help/*.hhk DocProject/Help/*.hhp DocProject/Help/Html2 DocProject/Help/html # Click-Once directory publish/ # Publish Web Output *.Publish.xml # NuGet Packages Directory ## TODO: If you have NuGet Package Restore enabled, uncomment the next line packages/ # Windows Azure Build Output csx *.build.csdef # Windows Store app package directory AppPackages/ # Others sql/ *.Cache ClientBin/ [Ss]tyle[Cc]op.* ~$* *~ *.dbmdl *.[Pp]ublish.xml *.pfx *.publishsettings # RIA/Silverlight projects Generated_Code/ # Backup & report files from converting an old project file to a newer # Visual Studio version. Backup files are not needed, because we have git ;-) _UpgradeReport_Files/ Backup*/ UpgradeLog*.XML UpgradeLog*.htm # SQL Server files App_Data/*.mdf App_Data/*.ldf #LightSwitch generated files GeneratedArtifacts/ _Pvt_Extensions/ ModelManifest.xml # ========================= # Windows detritus # ========================= # Windows image file caches Thumbs.db ehthumbs.db # Folder config file Desktop.ini # Recycle Bin used on file shares $RECYCLE.BIN/ # Mac desktop service store files .DS_Store ================================================ FILE: .paket/Paket.Restore.targets ================================================ $(MSBuildAllProjects);$(MSBuildThisFileFullPath) $(MSBuildVersion) 15.0.0 false true true $(MSBuildThisFileDirectory) $(MSBuildThisFileDirectory)..\ $(PaketRootPath)paket-files\paket.restore.cached $(PaketRootPath)paket.lock classic proj assembly native /Library/Frameworks/Mono.framework/Commands/mono mono $(PaketRootPath)paket.bootstrapper.exe $(PaketToolsPath)paket.bootstrapper.exe $([System.IO.Path]::GetDirectoryName("$(PaketBootStrapperExePath)"))\ "$(PaketBootStrapperExePath)" $(MonoPath) --runtime=v4.0.30319 "$(PaketBootStrapperExePath)" true true True False $(BaseIntermediateOutputPath.TrimEnd('\').TrimEnd('\/')) $(PaketRootPath)paket $(PaketToolsPath)paket $(PaketRootPath)paket.exe $(PaketToolsPath)paket.exe <_DotnetToolsJson Condition="Exists('$(PaketRootPath)/.config/dotnet-tools.json')">$([System.IO.File]::ReadAllText("$(PaketRootPath)/.config/dotnet-tools.json")) <_ConfigContainsPaket Condition=" '$(_DotnetToolsJson)' != ''">$(_DotnetToolsJson.Contains('"paket"')) <_ConfigContainsPaket Condition=" '$(_ConfigContainsPaket)' == ''">false <_PaketCommand>dotnet paket $(PaketToolsPath)paket $(PaketBootStrapperExeDir)paket paket <_PaketExeExtension>$([System.IO.Path]::GetExtension("$(PaketExePath)")) <_PaketCommand Condition=" '$(_PaketCommand)' == '' AND '$(_PaketExeExtension)' == '.dll' ">dotnet "$(PaketExePath)" <_PaketCommand Condition=" '$(_PaketCommand)' == '' AND '$(OS)' != 'Windows_NT' AND '$(_PaketExeExtension)' == '.exe' ">$(MonoPath) --runtime=v4.0.30319 "$(PaketExePath)" <_PaketCommand Condition=" '$(_PaketCommand)' == '' ">"$(PaketExePath)" true $(NoWarn);NU1603;NU1604;NU1605;NU1608 false true $([System.IO.File]::ReadAllText('$(PaketRestoreCacheFile)')) $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `": "`)[0].Replace(`"`, ``).Replace(` `, ``)) $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `": "`)[1].Replace(`"`, ``).Replace(` `, ``)) %(PaketRestoreCachedKeyValue.Value) %(PaketRestoreCachedKeyValue.Value) true false true true $(PaketIntermediateOutputPath)\$(MSBuildProjectFile).paket.references.cached $(MSBuildProjectFullPath).paket.references $(MSBuildProjectDirectory)\$(MSBuildProjectName).paket.references $(MSBuildProjectDirectory)\paket.references false true true references-file-or-cache-not-found $([System.IO.File]::ReadAllText('$(PaketReferencesCachedFilePath)')) $([System.IO.File]::ReadAllText('$(PaketOriginalReferencesFilePath)')) references-file false false true target-framework '$(TargetFramework)' or '$(TargetFrameworks)' files @(PaketResolvedFilePaths) false true $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',').Length) $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[0]) $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[1]) $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[4]) $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[5]) %(PaketReferencesFileLinesInfo.PackageVersion) All runtime runtime true true $(PaketIntermediateOutputPath)/$(MSBuildProjectFile).paket.clitools $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[0]) $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[1]) %(PaketCliToolFileLinesInfo.PackageVersion) false <_NuspecFilesNewLocation Include="$(PaketIntermediateOutputPath)\$(Configuration)\*.nuspec"/> $(MSBuildProjectDirectory)/$(MSBuildProjectFile) true false true false true false true false true $(PaketIntermediateOutputPath)\$(Configuration) $(PaketIntermediateOutputPath) <_NuspecFiles Include="$(AdjustedNuspecOutputPath)\*.$(PackageVersion.Split(`+`)[0]).nuspec"/> ================================================ FILE: .paket/paket.targets ================================================ true true $(MSBuildThisFileDirectory) $(MSBuildThisFileDirectory)..\ $(PaketToolsPath)paket.exe $(PaketToolsPath)paket.bootstrapper.exe "$(PaketExePath)" mono --runtime=v4.0.30319 "$(PaketExePath)" "$(PaketBootStrapperExePath)" mono --runtime=v4.0.30319 $(PaketBootStrapperExePath) $(PaketCommand) restore $(PaketBootStrapperCommand) RestorePackages; $(BuildDependsOn); ================================================ FILE: Hype.sln ================================================  Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio Version 16 VisualStudioVersion = 16.0.29009.5 MinimumVisualStudioVersion = 10.0.40219.1 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = ".paket", ".paket", "{B7FB3383-EF19-4645-986C-72D50C08F292}" ProjectSection(SolutionItems) = preProject paket.dependencies = paket.dependencies EndProjectSection EndProject Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "Hype", "src\Hype\Hype.fsproj", "{C923664D-182E-48D5-BB30-F1505D7D28DF}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "docs", "docs", "{56DA870A-0ED4-47A2-B78B-34A8D4D6AD28}" ProjectSection(SolutionItems) = preProject docs\BuildDocs.fsx = docs\BuildDocs.fsx docs\input\download.fsx = docs\input\download.fsx docs\input\FeedforwardNets.fsx = docs\input\FeedforwardNets.fsx docs\input\HMC.fsx = docs\input\HMC.fsx docs\input\index.fsx = docs\input\index.fsx docs\input\Optimization.fsx = docs\input\Optimization.fsx docs\input\RecurrentNets.fsx = docs\input\RecurrentNets.fsx docs\input\Regression.fsx = docs\input\Regression.fsx docs\input\Training.fsx = docs\input\Training.fsx EndProjectSection EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU Debug|x64 = Debug|x64 Release|Any CPU = Release|Any CPU Release|x64 = Release|x64 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {C923664D-182E-48D5-BB30-F1505D7D28DF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {C923664D-182E-48D5-BB30-F1505D7D28DF}.Debug|Any CPU.Build.0 = Debug|Any CPU {C923664D-182E-48D5-BB30-F1505D7D28DF}.Debug|x64.ActiveCfg = Debug|Any CPU {C923664D-182E-48D5-BB30-F1505D7D28DF}.Debug|x64.Build.0 = Debug|Any CPU {C923664D-182E-48D5-BB30-F1505D7D28DF}.Release|Any CPU.ActiveCfg = Release|Any CPU {C923664D-182E-48D5-BB30-F1505D7D28DF}.Release|Any CPU.Build.0 = Release|Any CPU {C923664D-182E-48D5-BB30-F1505D7D28DF}.Release|x64.ActiveCfg = Release|Any CPU {C923664D-182E-48D5-BB30-F1505D7D28DF}.Release|x64.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {028AF435-B43C-4E8E-8A82-4A65AF666086} EndGlobalSection EndGlobal ================================================ FILE: LICENSE.txt ================================================ The MIT License (MIT) Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ Hype: Compositional Machine Learning and Hyperparameter Optimization -------------------------------------------------------------------- Hype is a proof-of-concept deep learning library, where you can perform optimization on compositional machine learning systems of many components, even when such components themselves internally perform optimization. It is developed by [Atılım Güneş Baydin](http://www.cs.nuim.ie/~gunes/) and [Barak A. Pearlmutter](http://bcl.hamilton.ie/~barak/), at the [Brain and Computation Lab](http://www.bcl.hamilton.ie/), National University of Ireland Maynooth. This work is supported by Science Foundation Ireland grant 09/IN.1/I2637. Please visit the [project website](http://hypelib.github.io/Hype/) for documentation and tutorials. You can come and join the Gitter chat room, if you want to chat with us: [![Join the chat at https://gitter.im/hypelib/Hype](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/hypelib/Hype?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) ### Project statistics [![Issue Stats](http://issuestats.com/github/hypelib/Hype/badge/pr?style=flat-square)](http://issuestats.com/github/hypelib/Hype) [![Issue Stats](http://issuestats.com/github/hypelib/Hype/badge/issue?style=flat-square)](http://issuestats.com/github/hypelib/Hype) ### Current build status [![Build status](https://ci.appveyor.com/api/projects/status/w1xgcleb1x4f30c0?svg=true)](https://ci.appveyor.com/project/gbaydin/hype) ### License Hype is released under the MIT license. ================================================ FILE: Roadmap.txt ================================================ - CUDA backend (DiffSharp) - Example for Hamiltonian MCMC - Probabilistic inference - Convolutional neural networks (ideally with DiffSharp tensor) - Saving and loading models using a standard format - Improve code comments - Add references to research papers where relevant - Add ability to read and write MATLAB files (scipy.io loadmat, savemat) - Add ability to read and write FSL nifti files for fMRI (PyMVPA2, SampleAttributes, fmri_dataset, poly_detrend, zscore) - Add ability to read and write standard image/video formats (OpenCV, MATLAB) - Better integration with graph libraries (box plots, bar graphs, confusion matrix plots, write to .png support) ================================================ FILE: docs/.gitignore ================================================ output/ ================================================ FILE: docs/BuildDocs.fsx ================================================ // // This file is part of // Hype: Compositional Machine Learning and Hyperparameter Optimization // // Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter) // // Hype is released under the MIT license. // (See accompanying LICENSE file.) // // Written by: // // Atilim Gunes Baydin // atilimgunes.baydin@nuim.ie // // Barak A. Pearlmutter // barak@cs.nuim.ie // // Brain and Computation Lab // Hamilton Institute & Department of Computer Science // National University of Ireland Maynooth // Maynooth, Co. Kildare // Ireland // // www.bcl.hamilton.ie // #r "../packages/FSharp.Compiler.Service/lib/net40/FSharp.Compiler.Service.dll" #r "../packages/FSharpVSPowerTools.Core/lib/net45/FSharpVSPowerTools.Core.dll" #r "../packages/FSharp.Formatting/lib/net40/CSharpFormat.dll" #r "../packages/FSharp.Formatting/lib/net40/FSharp.CodeFormat.dll" #r "../packages/FSharp.Formatting/lib/net40/FSharp.Literate.dll" #r "../packages/FSharp.Formatting/lib/net40/FSharp.MetadataFormat.dll" #r "../packages/FSharp.Formatting/lib/net40/FSharp.Markdown.dll" open System.IO open FSharp.Literate open FSharp.MetadataFormat // // Setup output directory structure and copy static files // let source = __SOURCE_DIRECTORY__ let docs = Path.Combine(source, "") let relative subdir = Path.Combine(docs, subdir) if not (Directory.Exists(relative "output")) then Directory.CreateDirectory(relative "output") |> ignore if not (Directory.Exists(relative "output/img")) then Directory.CreateDirectory (relative "output/img") |> ignore if not (Directory.Exists(relative "output/misc")) then Directory.CreateDirectory (relative "output/misc") |> ignore if not (Directory.Exists(relative "output/reference")) then Directory.CreateDirectory (relative "output/reference") |> ignore for fileInfo in DirectoryInfo(relative "input/files/misc").EnumerateFiles() do fileInfo.CopyTo(Path.Combine(relative "output/misc", fileInfo.Name), true) |> ignore for fileInfo in DirectoryInfo(relative "input/files/img").EnumerateFiles() do fileInfo.CopyTo(Path.Combine(relative "output/img", fileInfo.Name), true) |> ignore // // Generate documentation // let tags = ["project-name", "Hype"; "project-author", "Atılım Güneş Baydin"; "project-github", "http://github.com/hypelib/Hype"; "project-nuget", "https://www.nuget.org/packages/hype"; "root", ""] Literate.ProcessScriptFile(relative "input/index.fsx", relative "input/templates/template.html", relative "output/index.html", replacements = tags) Literate.ProcessScriptFile(relative "input/download.fsx", relative "input/templates/template.html", relative "output/download.html", replacements = tags) Literate.ProcessScriptFile(relative "input/Optimization.fsx", relative "input/templates/template.html", relative "output/optimization.html", replacements = tags) Literate.ProcessScriptFile(relative "input/Training.fsx", relative "input/templates/template.html", relative "output/training.html", replacements = tags) Literate.ProcessScriptFile(relative "input/Regression.fsx", relative "input/templates/template.html", relative "output/regression.html", replacements = tags) Literate.ProcessScriptFile(relative "input/FeedforwardNets.fsx", relative "input/templates/template.html", relative "output/feedforwardnets.html", replacements = tags) Literate.ProcessScriptFile(relative "input/RecurrentNets.fsx", relative "input/templates/template.html", relative "output/recurrentnets.html", replacements = tags) Literate.ProcessScriptFile(relative "input/HMC.fsx", relative "input/templates/template.html", relative "output/hmc.html", replacements = tags) // // Generate API reference // let library = relative "../src/Hype/bin/Debug/Hype.dll" let layoutRoots = [relative "input/templates"; relative "input/templates/reference" ] MetadataFormat.Generate(library, relative "output/reference", layoutRoots, tags, markDownComments = true, libDirs = [relative "../src/Hype/bin/Debug/"]) ================================================ FILE: docs/input/FeedforwardNets.fsx ================================================ (*** hide ***) #r "../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll" #r "../../src/Hype/bin/Release/netstandard2.0/Hype.dll" #I "../../packages/R.NET.Community/lib/net40/" #I "../../packages/R.NET.Community.FSharp/lib/net40/" #I "../../packages/RProvider" #load "RProvider.fsx" fsi.ShowDeclarationValues <- true (** Feedforward neural networks =========================== In this example, we implement a softmax classifier network with several hidden layers. Also see the [regression example](regression.html) for some relevant basics. We again demonstrate the library with the [MNIST](http://yann.lecun.com/exdb/mnist/) database, this time using the full training set of 60,000 examples for building a classifier with 10 outputs representing the class probabilities of an input image belonging to one of the ten categories. ### Loading the data We load the data and form the training, validation, and test datasets. The datasets are shuffled and the input data are normalized. *) open Hype open Hype.Neural open DiffSharp.AD.Float32 open DiffSharp.Util let MNIST = Dataset(Util.LoadMNISTPixels("C:/datasets/MNIST/train-images.idx3-ubyte", 60000), Util.LoadMNISTLabels("C:/datasets/MNIST/train-labels.idx1-ubyte", 60000)).NormalizeX() let MNISTtrain = MNIST.[..58999].Shuffle() let MNISTvalid = MNIST.[59000..].Shuffle() let MNISTtest = Dataset(Util.LoadMNISTPixels("C:/datasets/MNIST/t10k-images.idx3-ubyte", 10000), Util.LoadMNISTLabels("C:/datasets/MNIST/t10k-labels.idx1-ubyte", 10000)).NormalizeX().Shuffle() (**
val MNISTtrain : Dataset = Hype.Dataset
   X: 784 x 59000
   Y: 1 x 59000
val MNISTvalid : Dataset = Hype.Dataset
   X: 784 x 1000
   Y: 1 x 1000
val MNISTtest : Dataset = Hype.Dataset
   X: 784 x 10000
   Y: 1 x 10000
*) MNISTtrain.[..5].VisualizeXColsAsImageGrid(28) |> printfn "%s" (** [lang=cs] Hype.Dataset X: 784 x 6 Y: 1 x 6 X's columns reshaped to (28 x 28), presented in a (2 x 3) grid: DM : 56 x 84 ·▴█ ■■♦█· █■ ▪███■▪ -██■- ·■███♦● ·●██■▪ -♦▪ ·████♦ -♦█♦ -♦■▪· █■█● ■█· ·●██♦ ██●· ██· ·▴ ·●██▪ -■██▪ ■█▪ ·■▪ ·▪■■▪ ·███▪ ·▴· ♦█▪ ♦■▪ ·■■▴ ♦███●▴▴♦██● ██▴ █ ■█· ■█■█■██████■ ■█▪ ■█· - ██▴█████████▴ ·██· ●█▪ █▪●■████■███▴ ·███♦· ·■■ █▴●- · ●█· ●♦♦♦█▪ ■█· █●■ ▪█ ▴█- ·█● ███· ·●■■██ ■█ ·█● █●■▴▴██████● ██ ●█▴ ♦█████████▴ ▴█▪ -♦██♦ ▪█████♦▪· ● █■ ▴■■▴ ▪▪▪ █ ·■♦ ■♦▪♦█▪ ·■ ·♦ ▪■♦- ·●●█■· ██· ♦█● ██■■█▪-●· ▪■·▴● ██▴ ♦██▪ ■█- ▴███■ █♦ -██ ♦██● ▪█· ▪██▪ ▪█♦ ■▴ -██ ♦██■ █● ♦██▴ ♦█- ██■ ▴█♦ ▴██■ ▴█ ·██■· ♦█- ███ ♦█♦ -███ ▴█·▴██♦ ▪██■███· ♦█· ·██● ████▴ ■██●■█· ♦█▴ ··-███● ▪██♦ · ♦█· ♦██■■■■██████● ·■███ ██ ▪█████████■♦█● ♦██■●█ █■ ●██████▴- █■ ▴██■- ●█ ▴█▪ ·███♦· ██ ♦█■▴ █● ██▴ ▪▪ ██▪ ███ ■█· ██▴ ●██ ●████■■██- ██ ▴██ -▪▪▪▪▪▪▪- ██ ██ -██ ■█ -█● ●█· -█▪ ·█ █ ### Defining the model We define a neural network with 3 layers: (1) a hidden layer with 300 units, followed by ReLU activation, (2) a hidden layer with 100 units, followed by ReLU activation, (3) a final layer with 10 units, followed by softmax transformation. *) let n = FeedForward() n.Add(Linear(28 * 28, 300, Initializer.InitReLU)) n.Add(reLU) n.Add(Linear(300, 100, Initializer.InitReLU)) n.Add(reLU) n.Add(Linear(100, 10)) n.Add(fun m -> m |> DM.mapCols softmax) // Note the free inline implementation of the layer n.ToString() |> printfn "%s" (** [lang=cs] Hype.Neural.FeedForward Learnable parameters: 266610 (0) -> (1) -> (2) -> (3) -> (4) -> (5) (0): Hype.Neural.Linear 784 -> 300 Learnable parameters: 235500 Init: ReLU W : 300 x 784 b : 300 (1): Hype.Neural.Activation (2): Hype.Neural.Linear 300 -> 100 Learnable parameters: 30100 Init: ReLU W : 100 x 300 b : 100 (3): Hype.Neural.Activation (4): Hype.Neural.Linear 100 -> 10 Learnable parameters: 1010 Init: Standard W : 10 x 100 b : 10 (5): Hype.Neural.Activation *) (** ### Freely implementing transformation layers Now let's have a closer look at how we implemented the nonlinear transformations between the linear layers. You might think that the instances of **reLU** in **n.Add(reLU)** above refer to a particular layer structure previously implemented as a layer module within the library. They don't. **reLU** is just a matrix-to-matrix elementwise function. **An important thing to note** here is that the activation/transformation layers added with, for example, **n.Add(reLU)**, can be **any matrix-to-matrix function that you can express in the language,** unlike commonly seen in many machine learning frameworks where you are asked to select a particular layer type that has been implemented beforehand with it's (1) forward evaluation code and (2) reverse gradient code w.r.t. layer inputs, and (3) reverse gradient code w.r.t. any layer parameters. In such a setting, a new layer design would require you to add a new layer type to the system and carefully implement these components. Here, because the system is based on nested AD, you can freely use any matrix-to-matrix transformation as a layer, and the forward and/or reverse AD operations of your code will be handled automatically by the underlying system. For example, you can write a layer like this: *) n.Add(fun w -> let min = DM.Min(w) let range = DM.Max(w) - min (w - min) / range) (** which will be a normalization layer, scaling the values to be between 0 and 1. In the above model, this is how the softmax layer is implemented as a mapping of the vector-to-vector **softmax** function to the columns of a matrix. *) n.Add(fun m -> m |> DM.mapCols softmax) (** In this particular example, the output matrix has 10 rows (for the 10 target classes) and each column (a vector of size 10) is individually passed through the **softmax** function. The output matrix would have as many columns as the input matrix, representing the class probabilities of each input. *) (** ### Weight initialization schemes When layers with learnable weights are created, the weights are initialized using one of the following schemes. The correct initialization would depend on the activation function immediately following the layer and would take the fan-in/fan-out of the layer into account. If a specific scheme is not specified, the **InitStandard** scheme is used by default. These implementations are based on existing machine learning literature, such as _"Glorot, Xavier, and Yoshua Bengio. "Understanding the difficulty of training deep feedforward neural networks." International conference on artificial intelligence and statistics. 2010"_. *) type Initializer = | InitUniform of D * D | InitNormal of D * D | InitRBM of D | InitReLU | InitSigmoid | InitTanh | InitStandard | InitCustom of (int->int->D) override i.ToString() = match i with | InitUniform(min, max) -> sprintf "Uniform min=%A max=%A" min max | InitNormal(mu, sigma) -> sprintf "Normal mu=%A sigma=%A" mu sigma | InitRBM sigma -> sprintf "RBM sigma=%A" sigma | InitReLU -> "ReLU" | InitSigmoid -> "Sigmoid" | InitTanh -> "Tanh" | InitStandard -> "Standard" | InitCustom f -> "Custom" member i.InitDM(m, n) = let fanOut, fanIn = m, n match i with | InitUniform(min, max) -> Rnd.UniformDM(m, n, min, max) | InitNormal(mu, sigma) -> Rnd.NormalDM(m, n, mu, sigma) | InitRBM sigma -> Rnd.NormalDM(m, n, D 0.f, sigma) | InitReLU -> Rnd.NormalDM(m, n, D 0.f, sqrt (D 2.f / (float32 fanIn))) | InitSigmoid -> let r = D 4.f * sqrt (D 6.f / (fanIn + fanOut)) in Rnd.UniformDM(m, n, -r, r) | InitTanh -> let r = sqrt (D 6.f / (fanIn + fanOut)) in Rnd.UniformDM(m, n, -r, r) | InitStandard -> let r = (D 1.f) / sqrt (float32 fanIn) in Rnd.UniformDM(m, n, -r, r) | InitCustom f -> DM.init m n (fun _ _ -> f fanIn fanOut) member i.InitDM(m:DM) = i.InitDM(m.Rows, m.Cols) (** ### Training Before training, let's visualize the weights of the first layer in a grid where each row of the weight matrix of the first layer is shown as a 28-by-28 image. It is an image of random weights, as expected. *) let l = (n.[0] :?> Linear) l.VisualizeWRowsAsImageGrid(28) |> printfn "%s" (**
Hype.Neural.Linear
    784 -> 300
    Learnable parameters: 235500
    Init: ReLU
    W's rows reshaped to (28 x 28), presented in a (17 x 18) grid:
Chart

Now let's train the network with the training and validation datasets we've prepared, using RMSProp, Nesterov momentum, and cross-entropy loss. *) let p = {Params.Default with Epochs = 2 EarlyStopping = Early (400, 100) ValidationInterval = 10 Batch = Minibatch 100 Loss = CrossEntropyOnSoftmax Momentum = Nesterov (D 0.9f) LearningRate = RMSProp (D 0.001f, D 0.9f)} let _, lhist = n.Train(MNISTtrain, MNISTvalid, p) (**
[12/11/2015 22:42:07] --- Training started
[12/11/2015 22:42:07] Parameters     : 266610
[12/11/2015 22:42:07] Iterations     : 1180
[12/11/2015 22:42:07] Epochs         : 2
[12/11/2015 22:42:07] Batches        : Minibatches of 100 (590 per epoch)
[12/11/2015 22:42:07] Training data  : 59000
[12/11/2015 22:42:07] Validation data: 1000
[12/11/2015 22:42:07] Valid. interval: 10
[12/11/2015 22:42:07] Method         : Gradient descent
[12/11/2015 22:42:07] Learning rate  : RMSProp a0 = D 0.00100000005f, k = D 0.899999976f
[12/11/2015 22:42:07] Momentum       : Nesterov D 0.899999976f
[12/11/2015 22:42:07] Loss           : Cross entropy after softmax layer
[12/11/2015 22:42:07] Regularizer    : L2 lambda = D 9.99999975e-05f
[12/11/2015 22:42:07] Gradient clip. : None
[12/11/2015 22:42:07] Early stopping : Stagnation thresh. = 400, overfit. thresh. = 100
[12/11/2015 22:42:07] Improv. thresh.: D 0.995000005f
[12/11/2015 22:42:07] Return best    : true
[12/11/2015 22:42:07] 1/2 | Batch   1/590 | D  2.383214e+000 [- ] | Valid D  2.411374e+000 [- ] | Stag:  0 Ovfit:  0
[12/11/2015 22:42:08] 1/2 | Batch  11/590 | D  6.371681e-001 [↓▼] | Valid D  6.128169e-001 [↓▼] | Stag:  0 Ovfit:  0
[12/11/2015 22:42:08] 1/2 | Batch  21/590 | D  4.729548e-001 [↓▼] | Valid D  4.779414e-001 [↓▼] | Stag:  0 Ovfit:  0
[12/11/2015 22:42:09] 1/2 | Batch  31/590 | D  4.792733e-001 [↑ ] | Valid D  3.651254e-001 [↓▼] | Stag:  0 Ovfit:  0
[12/11/2015 22:42:10] 1/2 | Batch  41/590 | D  2.977416e-001 [↓▼] | Valid D  3.680202e-001 [↑ ] | Stag: 10 Ovfit:  0
[12/11/2015 22:42:10] 1/2 | Batch  51/590 | D  4.242567e-001 [↑ ] | Valid D  3.525212e-001 [↓▼] | Stag:  0 Ovfit:  0
[12/11/2015 22:42:11] 1/2 | Batch  61/590 | D  2.464822e-001 [↓▼] | Valid D  3.365663e-001 [↓▼] | Stag:  0 Ovfit:  0
[12/11/2015 22:42:11] 1/2 | Batch  71/590 | D  6.299557e-001 [↑ ] | Valid D  3.981607e-001 [↑ ] | Stag: 10 Ovfit:  0
...
[12/11/2015 22:43:21] 2/2 | Batch 521/590 | D  1.163270e-001 [↓ ] | Valid D  2.264248e-001 [↓ ] | Stag: 50 Ovfit:  0
[12/11/2015 22:43:21] 2/2 | Batch 531/590 | D  2.169427e-001 [↑ ] | Valid D  2.203927e-001 [↓ ] | Stag: 60 Ovfit:  0
[12/11/2015 22:43:22] 2/2 | Batch 541/590 | D  2.233351e-001 [↑ ] | Valid D  2.353653e-001 [↑ ] | Stag: 70 Ovfit:  0
[12/11/2015 22:43:22] 2/2 | Batch 551/590 | D  3.425132e-001 [↑ ] | Valid D  2.559682e-001 [↑ ] | Stag: 80 Ovfit:  0
[12/11/2015 22:43:23] 2/2 | Batch 561/590 | D  2.768238e-001 [↓ ] | Valid D  2.412431e-001 [↓ ] | Stag: 90 Ovfit:  0
[12/11/2015 22:43:24] 2/2 | Batch 571/590 | D  2.550858e-001 [↓ ] | Valid D  2.726600e-001 [↑ ] | Stag:100 Ovfit:  0
[12/11/2015 22:43:24] 2/2 | Batch 581/590 | D  2.308137e-001 [↓ ] | Valid D  2.466903e-001 [↓ ] | Stag:110 Ovfit:  0
[12/11/2015 22:43:25] Duration       : 00:01:17.5011734
[12/11/2015 22:43:25] Loss initial   : D  2.383214e+000
[12/11/2015 22:43:25] Loss final     : D  1.087980e-001 (Best)
[12/11/2015 22:43:25] Loss change    : D -2.274415e+000 (-95.43 %)
[12/11/2015 22:43:25] Loss chg. / s  : D -2.934685e-002
[12/11/2015 22:43:25] Epochs / s     : 0.02580606089
[12/11/2015 22:43:25] Epochs / min   : 1.548363654
[12/11/2015 22:43:25] --- Training finished
Chart

*) (*** hide ***) open RProvider open RProvider.graphics open RProvider.grDevices let ll = lhist |> Array.map (float32>>float) namedParams[ "x", box ll "pch", box 19 "col", box "darkblue" "type", box "l" "xlab", box "Iteration" "ylab", box "Loss" "width", box 700 "height", box 500 ] |> R.plot|> ignore (** Now let's visualize the weights of the first layer in the grid. We see that the network has learned the problem domain. *) let l = (n.[0] :?> Linear) l.VisualizeWRowsAsImageGrid(28) |> printfn "%s" (**
Chart

*) (** ### Building the softmax classifier As explained in [regression](regression.html), we just construct an instance of **SoftmaxClassifier** with the trained neural network as its parameter. Please see the [API reference](reference/index.html) and the [source code](https://github.com/hypelib/Hype/blob/master/src/Hype/Classifier.fs) for a better understanding of how classifiers are implemented. *) let cc = SoftmaxClassifier(n) (** Testing class predictions for 10 random elements from the MNIST test set. *) let pred = cc.Classify(MNISTtest.X.[*,0..9]);; let real = MNISTtest.Yi.[0..9] (**
val pred : int [] = [|5; 1; 9; 2; 6; 0; 0; 5; 7; 6|]
val real : int [] = [|5; 1; 9; 2; 6; 0; 0; 5; 7; 6|]
Let's compute the classification error for the whole MNIST test set of 10,000 examples. *) cc.ClassificationError(MNISTtest) (**
val it : float32 = 0.0502999984f
The classification error is around 5%. This can be lowered some more by training the model for more than 2 epochs as we did. Classifying a single digit: *) let cls = cc.Classify(MNISTtest.X.[*,0]);; MNISTtest.X.[*,0] |> DV.visualizeAsDM 28 |> printfn "%s" (** [lang=cs] val cls : int = 5 DM : 28 x 28 · ▴●██♦- ▴♦██■▴- ♦█■■███▪· ■████■- ♦███▪ ♦██♦ ██● ■█▪ ██· -▴■●- ▴██████■███- ♦██♦▪ ▪█■- ▪· ▴█● -██ ♦█● ■█■ -●██■· -▴▪■███▪ ███████●- Classifying many digits at the same time: *) let clss = cc.Classify(MNISTtest.X.[*,0..4]);; MNISTtest.[0..4].VisualizeXColsAsImageGrid(28) |> printfn "%s" (** [lang=cs] val clss : int [] = [|5; 1; 9; 2; 6|] Hype.Dataset X: 784 x 5 Y: 1 x 5 X's columns reshaped to (28 x 28), presented in a (2 x 3) grid: DM : 56 x 84 ██♦ · ██ ▴●██♦- ██▴ -♦█▪ ▴♦██■▴- ♦██ ●█████● ♦█■■███▪· ██♦ ■███♦♦██ ■████■- ███ ■██♦ ■█▴ ♦███▪ ▴███ ·██♦ ●██ ♦██♦ ███ ▪██ ■█■ ██● ▴██▴ ·██· ·♦██▴ ■█▪ ███ ███♦♦████▴ ██· -▴■●- ███♦ ▴████████· ▴██████■███- ███ ▴ ·-●- ■██ ♦██♦▪ ▪█■- ♦██▴ ██■ ▪· ▴█● ▴██♦ -██▴ -██ ███▴ -██· ♦█● ♦██▴ ■██· ■█■ ███ ███ -●██■· ♦██▴ ▴██● -▴▪■███▪ ██♦ ███ ███████●- ♦█ -██■ -██♦ -██· ▴●█♦ ●██ -████▴ ▪████● ▴████ ▴██████▴ ▴███■ ■██▪▴██▴ ███▪ ▴██● ▴█■ ■██▴ ·███ ██- ·♦██▴ ♦██● ▪█▪ -███▴ ███ ██ ███♦ ███ █♦ ███▪ █♦· █♦ ●██■ ▴▴▴ · ██ ███ -██-■█████▪ - ██ ██■ ●███████████- ·██■♦- ██ ▴██▴ ███●- ▪██▴ ♦█████■███ ▪██ ·██- ·██▪ ■█████████ ███▪·██▴ ♦██ ♦█████████♦▪ ▪██████▴ ·♦██· -███████████■●●●· ▪███████████████▴ ■██████■■■█████▴ -▪██████████♦- ·████■ ▴████■ ·▴▴▴▴▴▴· -■█- ■■▴ Nested optimization of training hyperparameters ----------------------------------------------- As we've seen in [optimization](optimization.html), nested AD allows us to apply gradient-based optimization to functions that also internally perform optimization. This gives us the possibility of optimizing the hyperparameters of training. We can, for example, compute the gradient of the final loss of a training procedure with respect to the continuous hyperparameters of the training such as learning rates, momentum parameters, regularization coefficients, or initialization conditions. As an example, let's train a neural network with a learning rate schedule of 50 elements, and optimize this schedule vector with another level of optimization on top of the training. *) let train lrschedule = Rnd.Seed(123) n.Init() let p = {Params.Default with LearningRate = Schedule lrschedule Loss = CrossEntropyOnSoftmax ValidationInterval = 1 Silent = true ReturnBest = false Batch = Full} let loss, _ = n.Train(MNISTvalid.[..20], p) loss let hypertrain epochs = let p = {Params.Default with Epochs = epochs LearningRate = RMSProp(D 0.01f, D 0.9f) ValidationInterval = 1} let lr, _, _, _ = Optimize.Minimize(train, DV.create 50 (D 0.1f), p) lr let lr = hypertrain 50 (*** hide ***) open RProvider open RProvider.graphics open RProvider.grDevices let lrlr = lr |> DV.toArray |> Array.map (float32>>float) namedParams[ "x", box lrlr "pch", box 19 "col", box "darkblue" "type", box "o" "xlab", box "Iteration" "ylab", box "Learning rate" "width", box 700 "height", box 500 ] |> R.plot|> ignore (**
Chart

*) ================================================ FILE: docs/input/HMC.fsx ================================================ (*** hide ***) #r "../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll" #r "../../src/Hype/bin/Release/netstandard2.0/Hype.dll" #I "../../packages/R.NET.Community/lib/net40/" #I "../../packages/R.NET.Community.FSharp/lib/net40/" #I "../../packages/RProvider" #load "RProvider.fsx" fsi.ShowDeclarationValues <- false (** Markov Chain Monte Carlo ======================== Documentation coming soon. *) ================================================ FILE: docs/input/Optimization.fsx ================================================ (*** hide ***) #r "../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll" #r "../../src/Hype/bin/Release/netstandard2.0/Hype.dll" #I "../../packages/R.NET.Community/lib/net40/" #I "../../packages/R.NET.Community.FSharp/lib/net40/" #I "../../packages/RProvider" #load "RProvider.fsx" (** Optimization ============ Hype provides a highly configurable and modular gradient-based optimization functionality. This works similar to many other machine learning libraries. **Here's the novelty:** Thanks to nested AD, gradient-based optimization can be combined with any code, including code which internally takes derivatives of a function to produce its output. In other words, you can optimize the value of a function that is internally optimizing another function, or using derivatives for any other purpose (e.g. running particle simulations, adaptive control), up to any level. In such a compositional optimization setting, all arising higher-order derivatives are handled for you through **nested instantiations of forward and/or reverse AD**. In any case, you only need to write your algorithms as usual, **only implementing a regular forward algorithm**. Let's explain this through a basic example from the article _"Jeffrey Mark Siskind and Barak A. Pearlmutter. Nesting forward-mode AD in a functional framework. Higher Order and Symbolic Computation 21(4):361-76, 2008. doi:10.1007/s10990-008-9037-1"_, where a parameter of a physics simulation using the gradient of an electric potential is optimized with Newton's method using the Hessian of an error, requiring third-order nesting of derivatives. Optimizing a physics simulation ------------------------------- Consider a charged particle traveling in a plane with position $\mathbf{x}(t)$, velocity $\dot{\mathbf{x}}(t)$, initial position $\mathbf{x}(0)=(0, 8)$, and initial velocity $\dot{\mathbf{x}}(0)=(0.75, 0)$. The particle is accelerated by an electric field formed by a pair of repulsive bodies, $$$ p(\mathbf{x}; w) = \| \mathbf{x} - (10, 10 - w)\|^{-1} + \| \mathbf{x} - (10, 0)\|^{-1} where $w$ is a parameter of this simple particle simulation, adjusting the location of one of the repulsive bodies. We can simulate the time evolution of this system by using a naive Euler ODE integration $$$ \begin{eqnarray*} \ddot{\mathbf{x}}(t) &=& \left. -\nabla_{\mathbf{x}} p(\mathbf{x}) \right|_{\mathbf{x}=\mathbf{x}(t)}\\ \dot{\mathbf{x}}(t + \Delta t) &=& \dot{\mathbf{x}}(t) + \Delta t \ddot{\mathbf{x}}(t)\\ \mathbf{x}(t + \Delta t) &=& \mathbf{x}(t) + \Delta t \dot{\mathbf{x}}(t) \end{eqnarray*} where $\Delta t$ is an integration time step. For a given parameter $w$, the simulation starts with $t=0$ and finishes when the particle hits the $x$-axis, at position $\mathbf{x}(t_f)$ at time $t_f$. When the particle hits the $x$-axis, we calculate an error $E(w) = x_0 (t_f)^2$, the squared horizontal distance of the particle from the origin. We then minimize this error using Newton's method, which finds the optimal value of $w$ so that the particle eventually hits the $x$-axis at the origin. $$$ w^{(i+1)} = w^{(i)} - \frac{E'(w^{(i)})}{E''(w^{(i)})} In other words, the code calculating the trajectory of the particle internally computes the gradient of the electric potential $p(\mathbf{x}; w)$, and, at the same time, the final position of the trajectory $\mathbf{x}(t_f)$ is used to compute an error, and the gradient and Hessian of this error are computed during the optimization procedure. Here's how it goes. *) open Hype open DiffSharp.AD.Float32 let dt = D 0.1f let x0 = toDV [0.; 8.] let v0 = toDV [0.75; 0.] let p w (x:DV) = (1.f / DV.norm (x - toDV [D 10.f + w * D 0.f; D 10.f - w])) + (1.f / DV.norm (x - toDV [10.; 0.])) let trajectory (w:D) = (x0, v0) |> Seq.unfold (fun (x, v) -> let a = -grad (p w) x let v = v + dt * a let x = x + dt * v Some(x, (x, v))) |> Seq.takeWhile (fun x -> x.[1] > D 0.f) let error (w:DV) = let xf = trajectory w.[0] |> Seq.last xf.[0] * xf.[0] let w, l, whist, lhist = Optimize.Minimize(error, toDV [0.], {Params.Default with Method = Newton; LearningRate = Constant (D 1.f) ValidationInterval = 1; Epochs = 10}) (**
[25/12/2015 23:53:10] --- Minimization started
[25/12/2015 23:53:10] Parameters     : 1
[25/12/2015 23:53:10] Iterations     : 10
[25/12/2015 23:53:10] Valid. interval: 1
[25/12/2015 23:53:10] Method         : Exact Newton
[25/12/2015 23:53:10] Learning rate  : Constant a = D 1.0f
[25/12/2015 23:53:10] Momentum       : None
[25/12/2015 23:53:10] Gradient clip. : None
[25/12/2015 23:53:10] Early stopping : None
[25/12/2015 23:53:10] Improv. thresh.: D 0.995000005f
[25/12/2015 23:53:10] Return best    : true
[25/12/2015 23:53:10]  1/10 | D  2.535113e+000 [- ]
[25/12/2015 23:53:10]  2/10 | D  7.528733e-002 [↓▼]
[25/12/2015 23:53:10]  3/10 | D  1.592970e-002 [↓▼]
[25/12/2015 23:53:10]  4/10 | D  4.178338e-003 [↓▼]
[25/12/2015 23:53:10]  5/10 | D  1.382800e-008 [↓▼]
[25/12/2015 23:53:11]  6/10 | D  3.274181e-011 [↓▼]
[25/12/2015 23:53:11]  7/10 | D  1.151079e-012 [↓▼]
[25/12/2015 23:53:11]  8/10 | D  1.151079e-012 [- ]
[25/12/2015 23:53:11]  9/10 | D  1.151079e-012 [- ]
[25/12/2015 23:53:11] 10/10 | D  3.274181e-011 [↑ ]
[25/12/2015 23:53:11] Duration       : 00:00:00.9201285
[25/12/2015 23:53:11] Value initial  : D  2.535113e+000
[25/12/2015 23:53:11] Value final    : D  1.151079e-012 (Best)
[25/12/2015 23:53:11] Value change   : D -2.535113e+000 (-100.00 %)
[25/12/2015 23:53:11] Value chg. / s : D -2.755173e+000
[25/12/2015 23:53:11] Iter. / s      : 10.86804723
[25/12/2015 23:53:11] Iter. / min    : 652.0828341
[25/12/2015 23:53:11] --- Minimization finished

val whist : DV [] =
  [|DV [|0.0f|]; DV [|0.20767726f|]; DV [|0.17457059f|]; DV [|0.190040559f|];
    DV [|0.182180524f|]; DV [|0.182166189f|]; DV [|0.182166889f|];
    DV [|0.182166755f|]; DV [|0.182166621f|]; DV [|0.182166487f|]|]
val w : DV = DV [|0.182166889f|]
val lhist : D [] =
  [|D 2.5351131f; D 2.5351131f; D 0.0752873272f; D 0.0159297027f;
    D 0.00417833822f; D 1.38279992e-08f; D 3.27418093e-11f; D 1.15107923e-12f;
    D 1.15107923e-12f; D 1.15107923e-12f|]
val l : D = D 1.15107923e-12f
*) (*** hide ***) open RProvider open RProvider.graphics open RProvider.grDevices R.plot_new (namedParams [ ]) let t = trajectory (whist.[1].[0]) let tx, ty = t |> Seq.toArray |> Array.map (fun v -> v.[0] |> float32 |> float, v.[1] |> float32 |> float) |> Array.unzip namedParams[ "x", box tx "y", box ty "pch", box 1 "xlab", box "" "ylab", box "" "col", box "darkblue" "type", box "l" "lty", box 4 "width", box 700 "height", box 500 ] |> R.lines |> ignore (**
Chart

Optimization parameters ----------------------- As another example, let's optimize the Beale function $$$ f(\mathbf{x}) = (1.5 - x_1 + x_1 x_2)^2 + (2.25 - x_1 + x_1 x_2^2)^2 + (2.625 - x_1 + x_1 x_2^3)^2 starting from $\mathbf{x} = (1, 1.5)$, using RMSProp. The optimum is at $(3, 0.5)$ *) let beale (x:DV) = (1.5f - x.[0] + (x.[0] * x.[1])) ** 2.f + (2.25f - x.[0] + x.[0] * x.[1] ** 2.f) ** 2.f + (2.625f - x.[0] + x.[0] * x.[1] ** 3.f) ** 2.f let wopt, lopt, whist, lhist = Optimize.Minimize(beale, toDV [1.; 1.5], {Params.Default with Epochs = 3000; LearningRate = RMSProp (D 0.01f, D 0.9f)}) (**
[12/11/2015 01:22:59] --- Minimization started
[12/11/2015 01:22:59] Parameters     : 2
[12/11/2015 01:22:59] Iterations     : 3000
[12/11/2015 01:22:59] Valid. interval: 10
[12/11/2015 01:22:59] Method         : Gradient descent
[12/11/2015 01:22:59] Learning rate  : RMSProp a0 = D 0.00999999978f, k = D 0.899999976f
[12/11/2015 01:22:59] Momentum       : None
[12/11/2015 01:22:59] Gradient clip. : None
[12/11/2015 01:22:59] Early stopping : None
[12/11/2015 01:22:59] Improv. thresh.: D 0.995000005f
[12/11/2015 01:22:59] Return best    : true
[12/11/2015 01:22:59]    1/3000 | D  4.125000e+001 [- ]
[12/11/2015 01:22:59]   11/3000 | D  2.655878e+001 [↓▼]
[12/11/2015 01:22:59]   21/3000 | D  2.154373e+001 [↓▼]
[12/11/2015 01:22:59]   31/3000 | D  1.841705e+001 [↓▼]
[12/11/2015 01:22:59]   41/3000 | D  1.624916e+001 [↓▼]
[12/11/2015 01:22:59]   51/3000 | D  1.465973e+001 [↓▼]
[12/11/2015 01:22:59]   61/3000 | D  1.334291e+001 [↓▼]
...
[12/11/2015 01:22:59] 2921/3000 | D  9.084024e-004 [- ]
[12/11/2015 01:22:59] 2931/3000 | D  9.084024e-004 [- ]
[12/11/2015 01:22:59] 2941/3000 | D  9.084024e-004 [- ]
[12/11/2015 01:22:59] 2951/3000 | D  9.084024e-004 [- ]
[12/11/2015 01:22:59] 2961/3000 | D  9.084024e-004 [- ]
[12/11/2015 01:22:59] 2971/3000 | D  9.084024e-004 [- ]
[12/11/2015 01:22:59] 2981/3000 | D  9.084024e-004 [- ]
[12/11/2015 01:22:59] 2991/3000 | D  9.084024e-004 [- ]
[12/11/2015 01:22:59] Duration       : 00:00:00.3142646
[12/11/2015 01:22:59] Value initial  : D  4.125000e+001
[12/11/2015 01:22:59] Value final    : D  8.948371e-004 (Best)
[12/11/2015 01:22:59] Value change   : D -4.124910e+001 (-100.00 %)
[12/11/2015 01:22:59] Value chg. / s : D -1.312560e+002
[12/11/2015 01:22:59] Iter. / s      : 9546.09587
[12/11/2015 01:22:59] Iter. / min    : 572765.7522
[12/11/2015 01:22:59] --- Minimization finished

val wopt : DV = DV [|2.99909306f; 0.50039643f|]
*) (*** hide ***) open RProvider open RProvider.graphics open RProvider.grDevices R.plot_new (namedParams [ ]) let ll = lhist |> Array.map (float32>>float) namedParams[ "x", box ll "pch", box 19 "col", box "darkblue" "type", box "o" "xlab", box "Iteration" "ylab", box "Function value" "width", box 700 "height", box 500 ] |> R.plot|> ignore (**
Chart

*) (*** hide ***) let contourplot3d (f:DV->D) (xmin, xmax) (ymin, ymax) = let res = 100 let xstep = ((xmax - xmin) / float res) let ystep = ((ymax - ymin) / float res) let x = [|xmin .. xstep .. xmax|] let y = [|ymin .. ystep .. ymax|] let z = Array2D.init x.Length y.Length (fun i j -> f (toDV [x.[i]; y.[j]])) |> Array2D.map (float32>>float) namedParams [ "x", box x "y", box y "z", box z "labels", box "" "levels", box [|0..5..200|]] |> R.contour contourplot3d beale (-4.5,4.5) (-4.5,4.5) let xx, yy = whist |> Array.map (fun v -> v.[0] |> float32 |> float, v.[1] |> float32 |> float) |> Array.unzip namedParams[ "x", box xx "y", box yy "col", box "blue"] |> R.lines namedParams[ "x", box (xx |>Array.last) "y", box (yy |> Array.last) "pch", box 16 "col", box "blue"] |> R.points (**
Chart

Each instantiation of gradient-based optimization is controlled through a collection of parameters, using the **Hype.Params** type. If you do not supply any parameters to optimization, the default parameter set **Params.Default** is used. The default parameters look like this: *) module Params = let Default = {Epochs = 100 LearningRate = LearningRate.DefaultRMSProp Momentum = NoMomentum Loss = L2Loss Regularization = Regularization.DefaultL2Reg GradientClipping = NoClip Method = GD Batch = Full EarlyStopping = NoEarly ImprovementThreshold = D 0.995f Silent = false ReturnBest = true ValidationInterval = 10 LoggingFunction = fun _ _ _ -> ()} (** If you want to change only a specific element of the parameter type, you can do so by extending the **Params.Default** value and overwriting only the parts you need to change, such as this: *) let p = {Params.Default with Epochs = 5000 LearningRate = LearningRate.AdaGrad (D 0.001f) Momentum = Nesterov (D 0.9f)} (** ### Optimization method *) type Method = | GD // Gradient descent | CG // Conjugate gradient | CD // Conjugate descent | NonlinearCG // Nonlinear conjugate gradient | DaiYuanCG // Dai & Yuan conjugate gradient | NewtonCG // Newton conjugate gradient | Newton // Exact Newton (** ### Learning rate *) type LearningRate = | Constant of D // Constant | Decay of D * D // 1 / t decay, a = a0 / (1 + kt). Initial value, decay rate | ExpDecay of D * D // Exponential decay, a = a0 * Exp(-kt). Initial value, decay rate | Schedule of DV // Scheduled learning rate vector, its length overrides Params.Epochs | Backtrack of D * D * D // Backtracking line search. Initial value, c, rho | StrongWolfe of D * D * D // Strong Wolfe line search. lmax, c1, c2 | AdaGrad of D // Adagrad. Initial value | RMSProp of D * D // RMSProp. Initial value, decay rate static member DefaultConstant = Constant (D 0.001f) static member DefaultDecay = Decay (D 1.f, D 0.1f) static member DefaultExpDecay = ExpDecay (D 1.f, D 0.1f) static member DefaultBacktrack = Backtrack (D 1.f, D 0.0001f, D 0.5f) static member DefaultStrongWolfe = StrongWolfe (D 1.f, D 0.0001f, D 0.5f) static member DefaultAdaGrad = AdaGrad (D 0.001f) static member DefaultRMSProp = RMSProp (D 0.001f, D 0.9f) (** ### Momentum *) type Momentum = | Momentum of D // Default momentum | Nesterov of D // Nesterov momentum | NoMomentum static member DefaultMomentum = Momentum (D 0.9f) static member DefaultNesterov = Nesterov (D 0.9f) (** ### Gradient clipping *) type GradientClipping = | NormClip of D // Norm clipping | NoClip static member DefaultNormClip = NormClip (D 1.f) (** Finally, looking at the [API reference](reference/index.html) and the [source code](https://github.com/hypelib/Hype/blob/master/src/Hype/Optimize.fs) of the optimization module can give you a better idea of the optimization algorithms currently implemented. *) ================================================ FILE: docs/input/RecurrentNets.fsx ================================================ (*** hide ***) #r "../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll" #r "../../src/Hype/bin/Release/netstandard2.0/Hype.dll" #I "../../packages/R.NET.Community/lib/net40/" #I "../../packages/R.NET.Community.FSharp/lib/net40/" #I "../../packages/RProvider" #load "RProvider.fsx" fsi.ShowDeclarationValues <- false (** Recurrent neural networks ========================= In this example we build a recurrent neural network (RNN) for a language modeling task and train it with a short passage of text for a quick demonstration. Hype currently has three RNN models implemented as **Hype.Neural** layers, which can be combined freely with other layer types, explained, for example, in the [neural networks](feedforwardnets.html) page. **Hype.Neural.Recurrent** implements the "vanilla" RNN layer, **Hype.Neural.LSTM** implements the LSTM layer, and **Hype.Neural.GRU** implements the gated recurrent unit (GRU) layer. ### Language modeling RNNs are well suited for constructing [language models,](https://en.wikipedia.org/wiki/Language_model) where we need to predict the probability of a word (or token) given the history of the tokens that came before it. Here, we will use an LSTM-based RNN to construct a word-level language model from a short passage of text, for a basic demonstration of usage. This model can be scaled to larger problems. State-of-the-art models of this type can require considerable computing resources and training time. The text is from the beginning of Virgil's Aeneid, Book I. *) let text = "I sing of arms and the man, he who, exiled by fate, first came from the coast of Troy to Italy, and to Lavinian shores – hurled about endlessly by land and sea, by the will of the gods, by cruel Juno’s remorseless anger, long suffering also in war, until he founded a city and brought his gods to Latium: from that the Latin people came, the lords of Alba Longa, the walls of noble Rome. Muse, tell me the cause: how was she offended in her divinity, how was she grieved, the Queen of Heaven, to drive a man, noted for virtue, to endure such dangers, to face so many trials? Can there be such anger in the minds of the gods?" (** Hype provides a simple **Hype.NLP.Language** type for tokenizing text. You can look at the [API reference](reference/index.html) and the [source code](https://github.com/hypelib/Hype/blob/master/src/Hype/NLP.fs) for a better understanding of its usage. *) open Hype open Hype.Neural open Hype.NLP open DiffSharp.AD.Float32 open DiffSharp.Util let lang = Language(text) lang.Tokens |> printfn "%A" lang.Length |> printfn "%A" (** These are the tokens extracted from the text, including some of the punctuation marks. When we are sampling from the RNN language model, we will make use of the "." token for signaling the end of a sentence. The puncutation marks are configurable when you are constructing the **Language** instance. If they are not provided, a default set is used.
[|","; "."; ":"; "?"; "Alba"; "Can"; "Heaven"; "I"; "Italy"; "Juno’s"; "Latin";
  "Latium"; "Lavinian"; "Longa"; "Muse"; "Queen"; "Rome"; "Troy"; "a"; "about";
  "also"; "and"; "anger"; "arms"; "be"; "brought"; "by"; "came"; "cause"; "city";
  "coast"; "cruel"; "dangers"; "divinity"; "drive"; "endlessly"; "endure";
  "exiled"; "face"; "fate"; "first"; "for"; "founded"; "from"; "gods"; "grieved";
  "he"; "her"; "his"; "how"; "hurled"; "in"; "land"; "long"; "lords"; "man";
  "many"; "me"; "minds"; "noble"; "noted"; "of"; "offended"; "people";
  "remorseless"; "sea"; "she"; "shores"; "sing"; "so"; "such"; "suffering";
  "tell"; "that"; "the"; "there"; "to"; "trials"; "until"; "virtue"; "walls";
  "war"; "was"; "who"; "will"; "–"|]
  
  86
There are 86 tokens in this language instance. Now let's transform the full text to a dataset, using the **Language** instance holding these tokens. The text will be encoded in a matrix where each column is a representation of each word as a _one-hot_ vector. *) let text' = lang.EncodeOneHot(text) text'.Visualize() |> printfn "%s" (**
DM : 86 x 145
Out of these 145 words, we will construct a dataset where the inputs are the first 144 words and the target outputs are the 144 words starting with a one word shift. This means that, for each word, we want the output (the prediction) to be the following word in our text passage. *) let data = Dataset(text'.[*, 0..(text'.Cols - 2)], text'.[*, 1..(text'.Cols - 1)]) (**
val data : Dataset = Hype.Dataset
   X: 86 x 144
   Y: 86 x 144
RNNs, and especially the LSTM variety that we will use, can make predictions that take long-term dependencies and contextual information into account. When the language model is trained with a large enough text corpus and the network has enough capacity, state-of-the-art RNN language models are able to learn complex grammatical relations. For our quick demonstration, we use a linear word embedding layer of 20 units, an LSTM of 100 units and a final linear layer of 86 units (the size of our vocabulary) followed by **softmax** activation. *) let dim = lang.Length // Vocabulary size, here 86 let n = FeedForward() n.Add(Linear(dim, 20)) n.Add(LSTM(20, 100)) n.Add(Linear(100, dim)) n.Add(DM.mapCols softmax) (** You can also easily stack multiple RNNs on top of each other. *) let n = FeedForward() n.Add(Linear(dim, 20)) n.Add(LSTM(20, 100)) n.Add(LSTM(100, 100)) n.Add(Linear(100, dim)) n.Add(DM.mapCols softmax) (** We will observe the the performance of our RNN during training by sampling random sentences from the language model. Remember that the final output of the network, through the softmax activation, is a vector of word probabilities. When we are sampling, we start with a word, supply this to the network, and use the resulting probabilities at the output to sample from the vocabulary where words with higher probability are more likely to be selected. We then continue by giving the network the last sampled word and repeating this until we hit an "end of sentence" token (we use "." here) or reach a limit of maximum sentence length. This is how we would sample a sentence starting with a specific word. *) n.Reset() for i = 0 to 5 do lang.Sample(n.Run, "I", [|"."|], 30) // Use "." as the stop token, limit maximum sentence length to 30. |> printfn "%s" (** Because the model is not trained, we get sequences of random words from the vocabulary.
I be: she dangers Latium endlessly gods remorseless divinity tell and his offended lords trials? about war trials and anger shores so anger Alba a Alba sing her
I? came exiled – suffering shores anger came Latium people sing sing remorseless who brought war walls endlessly anger me founded his.
I – will long of in offended cruel until Queen Italy who anger lords Queen in Longa Muse who people about suffering Italy also grieved cruel hurled who me about
I endlessly city first by face, a Heaven me hurled sea such long noted she noted many sea city anger I noted remorseless cause Queen to remorseless Italy coast
I sea noted noble me minds long sing cause people in walls Italy by Longa first, for grieved sea many walls Troy came was endlessly of in Latium Latium
I and Latin of many suffering Alba Latium war.
We set a training cycle where we run one epoch of training followed by sampling one sentence starting with the word "I". In each epoch, we run through the whole training dataset. With a larger training corpus, we could also run the training with minibatches by stating this in the parameter set (commented out below). Like the sample sentences above, at the beginning of training, we see mostly random orderings of words. As the training progresses, the cross-entropy loss for our dataset is decreasing and the sentences start exhibiting meaningful word patterns. *) for i = 0 to 1000 do let par = {Params.Default with //Batch = Minibatch 10 LearningRate = LearningRate.RMSProp(D 0.01f, D 0.9f) Loss = CrossEntropyOnSoftmax Epochs = 1 Silent = true // Suppress the regular printing of training progress ReturnBest = false} let loss, _ = Layer.Train(n, data, par) printfn "Epoch: %*i | Loss: %O | Sample: %s" 3 i loss (lang.Sample(n.Run, "I", [|"."|], 30)) (** Here is a selection of sentences demonstrating the progress of training.
Epoch:   0 | Loss: D  4.478101e+000 | Sample: I Queen drive she Alba endlessly Queen the by how tell his from grieved war her there drive people – lords coast he.
Epoch:  10 | Loss: D  4.102071e+000 | Sample: I people to,, Rome how the he of – sing fate, Muse, by,, Muse the of man Queen Latin and in her cause:
Epoch:  30 | Loss: D  3.438288e+000 | Sample: I walls long to first dangers she her, to founded to virtue sea first Can dangers a founded about Can Queen lords from sea by remorseless founded endlessly Latium
Epoch:  40 | Loss: D  2.007577e+000 | Sample: I Alba gods Alba Rome, the walls Alba Muse Rome anger me the the of the gods to who man me first founded offended endlessly until also grieved long
Epoch:  50 | Loss: D  9.753818e-001 | Sample: I sing people cruel: me the of Rome.
Epoch:  60 | Loss: D  3.944587e-001 | Sample: I sing sing Troy to so hurled endlessly by land sea, by to – hurled about by the of arms, by Juno’s such anger long also in her
Epoch:  70 | Loss: D  2.131431e-001 | Sample: I sing of and the of Longa, by Juno’s anger was in her of Heaven, to a city brought his gods to a gods to Lavinian hurled to
Epoch:  80 | Loss: D  1.895453e-001 | Sample: I sing, by will the of Rome.
Epoch:  90 | Loss: D  1.799535e-001 | Sample: I sing? there Muse the of the of the of arms by the: how she offended in the of? a, he shores hurled by land to
Epoch: 100 | Loss: D  1.733837e-001 | Sample: I sing arms the of Alba gods who, by Juno’s Rome such anger the of the of arms and, by, by from the coast Rome.
Epoch: 110 | Loss: D  1.682917e-001 | Sample: I sing Troy by, by from the of arms and, by, by from came, by Juno’s anger long in the of the of arms cruel Muse
Epoch: 120 | Loss: D  1.639529e-001 | Sample: I sing arms the of Rome.
Epoch: 130 | Loss: D  1.600647e-001 | Sample: I sing arms and, by Juno’s remorseless there and the of the of arms and, by Alba coast Troy to a – his gods by of the of
Epoch: 140 | Loss: D  1.564835e-001 | Sample: I sing arms by the of Rome.
Epoch: 150 | Loss: D  1.531392e-001 | Sample: I sing arms cruel, exiled by coast, he a city in the of the of arms.
Epoch: 160 | Loss: D  1.499920e-001 | Sample: I sing arms cruel man, by the trials arms to shores hurled endlessly by the of gods Italy, me the of Rome.
Epoch: 200 | Loss: D  1.390327e-001 | Sample: I sing arms and, by Juno’s such of the of the of arms Italy, by from the sing arms walls of the of Rome.
Epoch: 230 | Loss: D  1.322940e-001 | Sample: I sing arms the man he, tell from the of arms Italy, by fate, by the of Troy Italy, by fate first from the of the
Epoch: 260 | Loss: D  1.264137e-001 | Sample: I sing brought Muse Muse the of Heaven, by shores remorseless there he in the of arms cruel, by fate, he from the gods to Italy,
Epoch: 420 | Loss: D  1.131158e-001 | Sample: I sing of arms the of Heaven, by Juno’s remorseless hurled such in the of arms.
Epoch: 680 | Loss: D  9.938217e-002 | Sample: I of arms the man he, exiled fate, he virtue, to a? Can be such in the of the of of the of arms.
Epoch: 923 | Loss: D  9.283429e-002 | Sample: I sing of arms and the man he, by fate came from the of to Italy, by the, by Juno’s anger of Rome.
*) ================================================ FILE: docs/input/Regression.fsx ================================================ (*** hide ***) #r "../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll" #r "../../src/Hype/bin/Release/netstandard2.0/Hype.dll" #I "../../packages/R.NET.Community/lib/net40/" #I "../../packages/R.NET.Community.FSharp/lib/net40/" #I "../../packages/RProvider" #load "RProvider.fsx" fsi.ShowDeclarationValues <- true (** Regression ========== In this example we implement a logistic regression based binary classifier and train it to distinguish between the [MNIST](http://yann.lecun.com/exdb/mnist/) digits of 0 and 1. ### Loading the data First, let's start by loading the MNIST training and testing data and arranging these into training, validation, and testing sets. *) open Hype open Hype.Neural open DiffSharp.AD.Float32 open DiffSharp.Util let MNIST = Dataset(Util.LoadMNISTPixels("C:/datasets/MNIST/train-images.idx3-ubyte", 60000), Util.LoadMNISTLabels("C:/datasets/MNIST/train-labels.idx1-ubyte", 60000) |> toDV |> DM.ofDV 1).NormalizeX() let MNISTtrain = MNIST.[..58999] let MNISTvalid = MNIST.[59000..] let MNISTtest = Dataset(Util.LoadMNISTPixels("C:/datasets/MNIST/t10k-images.idx3-ubyte", 10000), Util.LoadMNISTLabels("C:/datasets/MNIST/t10k-labels.idx1-ubyte", 10000) |> toDV |> DM.ofDV 1).NormalizeX() (** We shuffle the columns of the datasets and filter them to only keep the digits of 0 and 1. *) let MNISTtrain01 = MNISTtrain.Shuffle().Filter(fun (x, y) -> y.[0] <= D 1.f) let MNISTvalid01 = MNISTvalid.Shuffle().Filter(fun (x, y) -> y.[0] <= D 1.f) let MNISTtest01 = MNISTtest.Shuffle().Filter(fun (x, y) -> y.[0] <= D 1.f) (**
val MNISTtrain01 : Dataset = Hype.Dataset
   X: 784 x 12465
   Y: 1 x 12465
val MNISTvalid01 : Dataset = Hype.Dataset
   X: 784 x 200
   Y: 1 x 200
val MNISTtest01 : Dataset = Hype.Dataset
   X: 784 x 2115
   Y: 1 x 2115
We can visualize individual digits from the dataset. *) MNISTtrain.X.[*,9] |> DV.visualizeAsDM 28 |> printfn "%s" MNISTtrain.Y.[*,9] (** [lang=cs] DM : 28 x 28 ♦♦ ▪█▪ ▴██· ♦█♦ ● ·█■ ■█ ■█· ♦█ ██· ▴█■ ●█♦ ■█ ▪█■ ■█▪ ▴██- -███♦▴ ♦█▪ ·███■██♦■█■ ·██■ ♦█████■ ♦■- ♦████▪ - ██· ▪█■ ▴█■ ■█▴ ■█▪ ▴█· val it : DV = DV [|4.0f|] We can also visualize a series of digits in grid layout. *) MNISTtrain.[..5].VisualizeXColsAsImageGrid(28) |> printfn "%s" (** [lang=cs] Hype.Dataset X: 784 x 6 Y: 1 x 6 X's columns reshaped to (28 x 28), presented in a (2 x 3) grid: DM : 56 x 84 ▪█▪ ▴▴● ●██▴ █████ ■ -▪●█████■●██♦ ■███■█ · ▴● ██████████-·· ■███♦·██▴ ▴● ▪♦ ■█████♦●██ ●██████-♦█● ■● █▪ ·▪-██♦ ▪ ███♦-█■ ·█● ■● ●█▴ ▪█· ███● ·▴ ██ █● ♦█ ▴█♦ ●█■♦· ██● ▴█● ■█ ♦█· ●██· ██♦ ▪█▴ ●█■ █■▪- ██ ██♦ ▪█ ·●██· ·███▴ ♦█♦ ██♦ ▪█· ▴▪▪███●██ ●██▪ ·██- ██▪ ▪██♦♦♦████♦▪· ■█ -██♦ ·█■ ▴█● ▴●●●●●- -█■ ███ ·█■ ▴█■· ●█▴ ▴●██♦ ·█▪ ●█● ●█ ▪■████● ·█■ -██▪ ●█ -■████♦· ·██▪ ·●■█■● ●█- ■████♦· ·███■■███♦▴ ●█- ●■████♦· ♦█████■▪ ●█▪ ●■█████▴ ▴███▪ ●█▪ ▴███■▴▴ -█▪ ▴██ -▴ -███ ▪♦███▪ ▴███ ▪♦██-·▪ ▪███■■█■ ██■ ·■██♦♦███● ▪████■ ██ ■██- ██♦ ●██▴ -████■ ██ ▪██♦ -██● ·██■ ●██■● ·██ ███ ▴██▪ ■██· ▴ -██ ♦██▴ ▴██● ·██▴ ▪██ -██● ■█● ♦██● -▴▴▴♦█♦ ·██♦ ██ ▴♦████· ●█████■ ███▪ ■█████■■█■ ■██■●███■▴ ▪███ ██■▴ ♦█▪ ·██▴ ♦████· ■██● ██- ▴██● ♦██▴●██● ███♦ ·██ ▴██- ♦█■ ·●███■ ███· ██ -██· ·●██▴ ·●● ▪███ ·██ ■██♦■███▴ ■██▪ -██ ♦████●▴ ██■ ██ -▪▴ ██■ ■█ ♦█■ -█♦ ●█● ▪█ *) MNISTtrain01.[..5].VisualizeXColsAsImageGrid(28) |> printfn "%s" (** [lang=cs] Hype.Dataset X: 784 x 6 Y: 1 x 6 X's columns reshaped to (28 x 28), presented in a (2 x 3) grid: DM : 56 x 84 ▴●███- ·♦██ ·▪■█████■ ████● -▪██████ ▪████████- ●████■ ▪██████████ ●████▪ ●███▴ ■████● ■███■▪▪▴-███● ████● ▴███· ██████ -███■ ██████ ███▪ ██▴ -██████ ■██▴ ████▴♦- ■██· ●█● ▪█████■ ■██ ■█▴█■ ●██· ▴█● ██████ ▪██● - ■█- -██▪ ▴█● ██████ ██■ ▪█▴ ██■ ▴█● ■█████▴ ▴██- ■█▴ ·██ ▴█● ██████- ▴██ ██▴ ■█■ ▴█● █████▴ ▴█■ ██ ■█▪ ▴█● ♦████- ▴█▪ ▴█▪ ██- ●█● ♦█████ ▴█▪ ●█· ██▴ ██· █████♦ -██ -█▪ ███ ●██· ●█████· ██■ █- -███● ▴██● ·█████ ███ ■■ ■█████♦●●■███■ ♦█████ ██- -■█· ▴■█████████♦ █████♦ ●██● -■██▪ ·▪▪●█♦▪▴ ▴████· ●████████■- ▪█████▪- ■▴ ····· ▴■ ▪■█████· ♦♦♦♦♦-· ■█ ▴█████████- ▪████████■▪ ██ ███████████· ■███████████■ ▪█■ ▴■████♦♦ ▴████ █████████ ♦███ ██- ▴████♦▴ ■███· ▪███● ● ·██■ █● ▴████■ ♦███- ██■ -██· ▪█▴ ████▪ ████ ▪██ ██♦ ██ ███- ████ ●█♦ ▪██ ▪██ ·███· ████ ██▴ ▴██ ●██ ·███♦ ▪████ ██ ♦█▪ ██● ·███ ▪████· ██- ██ ██· ●███ ·●█████· ♦█● ■██ ·██- ████· ■██████▴ ▪█■ ■██- █♦ ████■▴▴▴■████████▪ -██● ▴███♦ -█♦ ■██████████████♦ ♦██♦-▪ ▪♦██■- ■█♦ ·████████████▪▴ ♦███████████● ■█- -█████████▴ ▴■█■■■■■■· █■ ▪ ▪▪ ♦▴ ### Defining the model Let's now create our linear regression model. We implement this using the **Hype.Neural** module, as a linear layer with $28 \times 28 = 784$ inputs and one output. The output of the layer is passed through the sigmoid function. *) let n = Neural.FeedForward() n.Add(Linear(28 * 28, 1)) n.Add(sigmoid) (** We can visualize the initial state of the linear model weights before the training. For information of about weight initialization parameters, please see the [neural networks example](feedforwardnets.html). *) let l = (n.[0] :?> Linear) l.VisualizeWRowsAsImageGrid(28) |> printfn "%s" (** [lang=cs] Hype.Neural.Linear 784 -> 1 Learnable parameters: 785 Init: Standard W's rows reshaped to (28 x 28), presented in a (1 x 1) grid: DM : 28 x 28 ▴▪●●-█▴♦♦● ·▴█● ● ▴· ●●●●▪· ■ █- ▴●●▪ ■♦· ■▪■▪ █ ♦■●■ ♦■ █♦●▪●♦ ♦■ ♦ ■ ▪- ■ ■▪ ■♦■♦ █ ▪● ♦▪▴··■█ -▴●▪▪● ██··▴●●█▪♦■ -·█■ ▪- ··▪· ██ - ▪ ♦ ▪● ▪■█♦- ▴▪ ▴· ▪·● - ●●▴▴ ▪■ ▴█ ▪▴·▴▴·♦■■♦·■■ ♦▴ ▪■ ▪▪▴■·■--▪♦- ·♦▪■ ♦·● ·▴·♦▪♦●▪··▴·▪ ● ▪ █ ▴▪·♦▪ ■ ▴ ♦█▴ - ♦●■ █▪■●▪█■▴●--█ ♦■ ●■▴♦ ●· █· ▴· -█-▪●■■-■ █-·▪▴-▴█ ♦ █●·♦█▪▪●●■ - · - █ ■♦·●▪▴♦ -▴ - ■♦· ♦ - ■█ ▪- ▪■●♦█▴-█▪■ ■♦▪█■▪■ - ●♦█▴♦♦ ♦ ▴▪▴▴♦-▴♦♦█ ▴ ▪·● ·█▪■■█ ●· ●· -●■●·· ▴ --▴ ·♦█▴ ♦♦■ ▴▪●▪- · -♦●♦ ■ · ■ ■■▪---♦■·●▴▪-▪▴· ▪●● ·♦■ ▪♦▴ ▴ -♦●■█·█ ● ♦▪●■- ·■♦-▪▴■▴ ●-■● ···●█▴▪ -█·▪ ♦▴ ● ● ·█ █▴ ·♦---■▴·█■■▴ ▴■ - █ - ▪ ●█·▴♦▪ ■ ▪■ ■··· ▴ ■ ♦♦- █▪♦-- ▴ ▴ ··█▴● ■♦ ■·■■▪▴-·█♦●■ ▴ ♦ ♦▴■♦ ■ ●♦▪ ·█▪- ■●▴▪▴▪ ▪ ▴▪ · ▪▴▴··♦ ▪█♦■ ·♦ ■▪ ♦ ▴·●█▪· ·▪▴ · ■♦▪■ ▪■● ♦ ··· ·▪█■· ▪■● ●▴▪ ·■● -█●█·▪■▴ ▴▴♦ ■ ■ ▴ b: DV : 1 ### Training Let's train the model for 10 epochs (full passes through the training data), with a minibatch size of 100, using the training and validation sets we've defined. The validation set will make sure that we're not overfitting the model. *) let p = {Params.Default with Epochs = 10; Batch = Minibatch 100; EarlyStopping = EarlyStopping.DefaultEarly} n.Train(MNISTtrain01, MNISTvalid01, p) (**
[12/11/2015 20:21:12] --- Training started
[12/11/2015 20:21:12] Parameters     : 785
[12/11/2015 20:21:12] Iterations     : 1240
[12/11/2015 20:21:12] Epochs         : 10
[12/11/2015 20:21:12] Batches        : Minibatches of 100 (124 per epoch)
[12/11/2015 20:21:12] Training data  : 12465
[12/11/2015 20:21:12] Validation data: 200
[12/11/2015 20:21:12] Valid. interval: 10
[12/11/2015 20:21:12] Method         : Gradient descent
[12/11/2015 20:21:12] Learning rate  : RMSProp a0 = D 0.00100000005f, k = D 0.899999976f
[12/11/2015 20:21:12] Momentum       : None
[12/11/2015 20:21:12] Loss           : L2 norm
[12/11/2015 20:21:12] Regularizer    : L2 lambda = D 9.99999975e-05f
[12/11/2015 20:21:12] Gradient clip. : None
[12/11/2015 20:21:12] Early stopping : Stagnation thresh. = 750, overfit. thresh. = 10
[12/11/2015 20:21:12] Improv. thresh.: D 0.995000005f
[12/11/2015 20:21:12] Return best    : true
[12/11/2015 20:21:12]  1/10 | Batch   1/124 | D  4.748471e-001 [- ] | Valid D  4.866381e-001 [- ] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch  11/124 | D  2.772053e-001 [↓▼] | Valid D  3.013612e-001 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch  21/124 | D  2.178165e-001 [↓▼] | Valid D  2.304372e-001 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch  31/124 | D  2.009703e-001 [↓▼] | Valid D  1.799015e-001 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch  41/124 | D  1.352896e-001 [↓▼] | Valid D  1.405802e-001 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch  51/124 | D  1.182899e-001 [↓▼] | Valid D  1.108390e-001 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch  61/124 | D  1.124191e-001 [↓▼] | Valid D  8.995526e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch  71/124 | D  8.975799e-002 [↓▼] | Valid D  7.361954e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch  81/124 | D  5.031444e-002 [↓▼] | Valid D  5.941865e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch  91/124 | D  5.063754e-002 [↑ ] | Valid D  4.927430e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch 101/124 | D  3.842642e-002 [↓▼] | Valid D  4.095582e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch 111/124 | D  4.326219e-002 [↑ ] | Valid D  3.452797e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  1/10 | Batch 121/124 | D  2.585407e-002 [↓▼] | Valid D  2.788338e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  2/10 | Batch   1/124 | D  3.069563e-002 [↑ ] | Valid D  2.663207e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  2/10 | Batch  11/124 | D  1.765305e-002 [↓▼] | Valid D  2.332163e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  2/10 | Batch  21/124 | D  2.314118e-002 [↑ ] | Valid D  1.902804e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  2/10 | Batch  31/124 | D  3.177435e-002 [↑ ] | Valid D  1.691620e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  2/10 | Batch  41/124 | D  2.219648e-002 [↓ ] | Valid D  1.455527e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  2/10 | Batch  51/124 | D  1.205402e-002 [↓▼] | Valid D  1.240637e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  2/10 | Batch  61/124 | D  3.891717e-002 [↑ ] | Valid D  1.189688e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  2/10 | Batch  71/124 | D  2.114762e-002 [↓ ] | Valid D  1.083007e-002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  2/10 | Batch  81/124 | D  5.075417e-003 [↓▼] | Valid D  9.630994e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:12]  2/10 | Batch  91/124 | D  1.343214e-002 [↑ ] | Valid D  8.666289e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  2/10 | Batch 101/124 | D  6.054885e-003 [↓ ] | Valid D  8.039203e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  2/10 | Batch 111/124 | D  1.964125e-002 [↑ ] | Valid D  7.339509e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  2/10 | Batch 121/124 | D  4.401092e-003 [↓▼] | Valid D  6.376633e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch   1/124 | D  7.068173e-003 [↑ ] | Valid D  6.426438e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch  11/124 | D  3.763680e-003 [↓▼] | Valid D  6.076077e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch  21/124 | D  9.855231e-003 [↑ ] | Valid D  5.091224e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch  31/124 | D  1.263964e-002 [↑ ] | Valid D  4.641499e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch  41/124 | D  1.205439e-002 [↓ ] | Valid D  4.599225e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch  51/124 | D  2.941387e-003 [↓▼] | Valid D  4.381890e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch  61/124 | D  2.546543e-002 [↑ ] | Valid D  4.439059e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch  71/124 | D  9.878366e-003 [↓ ] | Valid D  4.358966e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch  81/124 | D  1.868963e-003 [↓▼] | Valid D  3.960044e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch  91/124 | D  7.171181e-003 [↑ ] | Valid D  3.634899e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch 101/124 | D  2.681098e-003 [↓ ] | Valid D  3.636524e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch 111/124 | D  1.502046e-002 [↑ ] | Valid D  3.393996e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  3/10 | Batch 121/124 | D  2.381395e-003 [↓ ] | Valid D  3.178693e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  4/10 | Batch   1/124 | D  3.185510e-003 [↑ ] | Valid D  3.240891e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:13]  4/10 | Batch  11/124 | D  2.029225e-003 [↓ ] | Valid D  3.163968e-003 [↓ ] | Stag: 20 Ovfit: 0
[12/11/2015 20:21:13]  4/10 | Batch  21/124 | D  6.450378e-003 [↑ ] | Valid D  2.772849e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  4/10 | Batch  31/124 | D  7.448227e-003 [↑ ] | Valid D  2.572560e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:13]  4/10 | Batch  41/124 | D  9.700718e-003 [↑ ] | Valid D  2.693694e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:13]  4/10 | Batch  51/124 | D  1.799919e-003 [↓▼] | Valid D  2.737873e-003 [↑ ] | Stag: 20 Ovfit: 1
[12/11/2015 20:21:13]  4/10 | Batch  61/124 | D  1.919956e-002 [↑ ] | Valid D  2.778393e-003 [↑ ] | Stag: 30 Ovfit: 3
[12/11/2015 20:21:13]  4/10 | Batch  71/124 | D  5.462923e-003 [↓ ] | Valid D  2.870561e-003 [↑ ] | Stag: 40 Ovfit: 3
[12/11/2015 20:21:13]  4/10 | Batch  81/124 | D  1.455469e-003 [↓▼] | Valid D  2.632472e-003 [↓ ] | Stag: 50 Ovfit: 4
[12/11/2015 20:21:14]  4/10 | Batch  91/124 | D  5.270801e-003 [↑ ] | Valid D  2.455564e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:14]  4/10 | Batch 101/124 | D  2.057914e-003 [↓ ] | Valid D  2.511977e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:14]  4/10 | Batch 111/124 | D  1.314815e-002 [↑ ] | Valid D  2.393763e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:14]  4/10 | Batch 121/124 | D  2.033168e-003 [↓ ] | Valid D  2.358985e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:14]  5/10 | Batch   1/124 | D  2.199435e-003 [↑ ] | Valid D  2.389120e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:14]  5/10 | Batch  11/124 | D  1.668178e-003 [↓ ] | Valid D  2.356529e-003 [↓ ] | Stag: 20 Ovfit: 0
[12/11/2015 20:21:14]  5/10 | Batch  21/124 | D  5.649061e-003 [↑ ] | Valid D  2.151499e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:14]  5/10 | Batch  31/124 | D  5.264180e-003 [↓ ] | Valid D  2.038927e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:14]  5/10 | Batch  41/124 | D  8.416546e-003 [↑ ] | Valid D  2.145057e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:14]  5/10 | Batch  51/124 | D  1.564733e-003 [↓ ] | Valid D  2.208556e-003 [↑ ] | Stag: 20 Ovfit: 0
[12/11/2015 20:21:14]  5/10 | Batch  61/124 | D  1.581773e-002 [↑ ] | Valid D  2.233998e-003 [↑ ] | Stag: 30 Ovfit: 0
[12/11/2015 20:21:14]  5/10 | Batch  71/124 | D  3.898179e-003 [↓ ] | Valid D  2.347554e-003 [↑ ] | Stag: 40 Ovfit: 0
[12/11/2015 20:21:14]  5/10 | Batch  81/124 | D  1.395002e-003 [↓▼] | Valid D  2.182974e-003 [↓ ] | Stag: 50 Ovfit: 1
[12/11/2015 20:21:14]  5/10 | Batch  91/124 | D  4.450763e-003 [↑ ] | Valid D  2.069927e-003 [↓ ] | Stag: 60 Ovfit: 1
[12/11/2015 20:21:14]  5/10 | Batch 101/124 | D  1.927794e-003 [↓ ] | Valid D  2.129479e-003 [↑ ] | Stag: 70 Ovfit: 1
[12/11/2015 20:21:14]  5/10 | Batch 111/124 | D  1.238949e-002 [↑ ] | Valid D  2.059099e-003 [↓ ] | Stag: 80 Ovfit: 1
[12/11/2015 20:21:14]  5/10 | Batch 121/124 | D  1.969593e-003 [↓ ] | Valid D  2.072177e-003 [↑ ] | Stag: 90 Ovfit: 1
[12/11/2015 20:21:14]  6/10 | Batch   1/124 | D  1.885590e-003 [↓ ] | Valid D  2.087292e-003 [↑ ] | Stag:100 Ovfit: 1
[12/11/2015 20:21:14]  6/10 | Batch  11/124 | D  1.577425e-003 [↓ ] | Valid D  2.074389e-003 [↓ ] | Stag:110 Ovfit: 1
[12/11/2015 20:21:14]  6/10 | Batch  21/124 | D  5.410788e-003 [↑ ] | Valid D  1.943973e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:14]  6/10 | Batch  31/124 | D  4.188792e-003 [↓ ] | Valid D  1.863442e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:14]  6/10 | Batch  41/124 | D  7.516511e-003 [↑ ] | Valid D  1.951990e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:14]  6/10 | Batch  51/124 | D  1.510475e-003 [↓ ] | Valid D  2.003860e-003 [↑ ] | Stag: 20 Ovfit: 0
[12/11/2015 20:21:14]  6/10 | Batch  61/124 | D  1.375423e-002 [↑ ] | Valid D  2.020531e-003 [↑ ] | Stag: 30 Ovfit: 0
[12/11/2015 20:21:14]  6/10 | Batch  71/124 | D  3.260145e-003 [↓ ] | Valid D  2.129138e-003 [↑ ] | Stag: 40 Ovfit: 0
[12/11/2015 20:21:15]  6/10 | Batch  81/124 | D  1.402565e-003 [↓ ] | Valid D  2.002138e-003 [↓ ] | Stag: 50 Ovfit: 0
[12/11/2015 20:21:15]  6/10 | Batch  91/124 | D  3.999386e-003 [↑ ] | Valid D  1.920336e-003 [↓ ] | Stag: 60 Ovfit: 0
[12/11/2015 20:21:15]  6/10 | Batch 101/124 | D  1.929424e-003 [↓ ] | Valid D  1.976652e-003 [↑ ] | Stag: 70 Ovfit: 0
[12/11/2015 20:21:15]  6/10 | Batch 111/124 | D  1.205915e-002 [↑ ] | Valid D  1.926643e-003 [↓ ] | Stag: 80 Ovfit: 0
[12/11/2015 20:21:15]  6/10 | Batch 121/124 | D  1.978536e-003 [↓ ] | Valid D  1.951888e-003 [↑ ] | Stag: 90 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch   1/124 | D  1.769614e-003 [↓ ] | Valid D  1.959661e-003 [↑ ] | Stag:100 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch  11/124 | D  1.555518e-003 [↓ ] | Valid D  1.955613e-003 [↓ ] | Stag:110 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch  21/124 | D  5.217655e-003 [↑ ] | Valid D  1.861573e-003 [↓ ] | Stag:120 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch  31/124 | D  3.625835e-003 [↓ ] | Valid D  1.796666e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch  41/124 | D  6.929778e-003 [↑ ] | Valid D  1.872346e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch  51/124 | D  1.502809e-003 [↓ ] | Valid D  1.913079e-003 [↑ ] | Stag: 20 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch  61/124 | D  1.241405e-002 [↑ ] | Valid D  1.924762e-003 [↑ ] | Stag: 30 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch  71/124 | D  2.962820e-003 [↓ ] | Valid D  2.024504e-003 [↑ ] | Stag: 40 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch  81/124 | D  1.421725e-003 [↓ ] | Valid D  1.919308e-003 [↓ ] | Stag: 50 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch  91/124 | D  3.717377e-003 [↑ ] | Valid D  1.854433e-003 [↓ ] | Stag: 60 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch 101/124 | D  1.973184e-003 [↓ ] | Valid D  1.907719e-003 [↑ ] | Stag: 70 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch 111/124 | D  1.190252e-002 [↑ ] | Valid D  1.867085e-003 [↓ ] | Stag: 80 Ovfit: 0
[12/11/2015 20:21:15]  7/10 | Batch 121/124 | D  2.006255e-003 [↓ ] | Valid D  1.894716e-003 [↑ ] | Stag: 90 Ovfit: 0
[12/11/2015 20:21:15]  8/10 | Batch   1/124 | D  1.721533e-003 [↓ ] | Valid D  1.898627e-003 [↑ ] | Stag:100 Ovfit: 0
[12/11/2015 20:21:15]  8/10 | Batch  11/124 | D  1.553262e-003 [↓ ] | Valid D  1.897926e-003 [↓ ] | Stag:110 Ovfit: 0
[12/11/2015 20:21:15]  8/10 | Batch  21/124 | D  5.004487e-003 [↑ ] | Valid D  1.823838e-003 [↓ ] | Stag:120 Ovfit: 0
[12/11/2015 20:21:15]  8/10 | Batch  31/124 | D  3.308986e-003 [↓ ] | Valid D  1.768821e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:15]  8/10 | Batch  41/124 | D  6.563510e-003 [↑ ] | Valid D  1.835302e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:15]  8/10 | Batch  51/124 | D  1.507999e-003 [↓ ] | Valid D  1.868091e-003 [↑ ] | Stag: 20 Ovfit: 0
[12/11/2015 20:21:15]  8/10 | Batch  61/124 | D  1.148601e-002 [↑ ] | Valid D  1.876653e-003 [↑ ] | Stag: 30 Ovfit: 0
[12/11/2015 20:21:16]  8/10 | Batch  71/124 | D  2.807777e-003 [↓ ] | Valid D  1.968064e-003 [↑ ] | Stag: 40 Ovfit: 0
[12/11/2015 20:21:16]  8/10 | Batch  81/124 | D  1.440011e-003 [↓ ] | Valid D  1.876611e-003 [↓ ] | Stag: 50 Ovfit: 0
[12/11/2015 20:21:16]  8/10 | Batch  91/124 | D  3.522004e-003 [↑ ] | Valid D  1.821817e-003 [↓ ] | Stag: 60 Ovfit: 0
[12/11/2015 20:21:16]  8/10 | Batch 101/124 | D  2.031282e-003 [↓ ] | Valid D  1.872902e-003 [↑ ] | Stag: 70 Ovfit: 0
[12/11/2015 20:21:16]  8/10 | Batch 111/124 | D  1.182362e-002 [↑ ] | Valid D  1.836957e-003 [↓ ] | Stag: 80 Ovfit: 0
[12/11/2015 20:21:16]  8/10 | Batch 121/124 | D  2.035742e-003 [↓ ] | Valid D  1.864137e-003 [↑ ] | Stag: 90 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch   1/124 | D  1.699795e-003 [↓ ] | Valid D  1.865989e-003 [↑ ] | Stag:100 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch  11/124 | D  1.556397e-003 [↓ ] | Valid D  1.866347e-003 [↑ ] | Stag:110 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch  21/124 | D  4.788828e-003 [↑ ] | Valid D  1.804229e-003 [↓ ] | Stag:120 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch  31/124 | D  3.119682e-003 [↓ ] | Valid D  1.756223e-003 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch  41/124 | D  6.336636e-003 [↑ ] | Valid D  1.816257e-003 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch  51/124 | D  1.516153e-003 [↓ ] | Valid D  1.843593e-003 [↑ ] | Stag: 20 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch  61/124 | D  1.080968e-002 [↑ ] | Valid D  1.850113e-003 [↑ ] | Stag: 30 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch  71/124 | D  2.720124e-003 [↓ ] | Valid D  1.934669e-003 [↑ ] | Stag: 40 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch  81/124 | D  1.455176e-003 [↓ ] | Valid D  1.852409e-003 [↓ ] | Stag: 50 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch  91/124 | D  3.375944e-003 [↑ ] | Valid D  1.804057e-003 [↓ ] | Stag: 60 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch 101/124 | D  2.093168e-003 [↓ ] | Valid D  1.853583e-003 [↑ ] | Stag: 70 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch 111/124 | D  1.178356e-002 [↑ ] | Valid D  1.820183e-003 [↓ ] | Stag: 80 Ovfit: 0
[12/11/2015 20:21:16]  9/10 | Batch 121/124 | D  2.061530e-003 [↓ ] | Valid D  1.846045e-003 [↑ ] | Stag: 90 Ovfit: 0
[12/11/2015 20:21:16] 10/10 | Batch   1/124 | D  1.689459e-003 [↓ ] | Valid D  1.846794e-003 [↑ ] | Stag:100 Ovfit: 0
[12/11/2015 20:21:16] 10/10 | Batch  11/124 | D  1.560583e-003 [↓ ] | Valid D  1.847311e-003 [↑ ] | Stag:110 Ovfit: 0
[12/11/2015 20:21:16] 10/10 | Batch  21/124 | D  4.588457e-003 [↑ ] | Valid D  1.792883e-003 [↓ ] | Stag:120 Ovfit: 0
[12/11/2015 20:21:16] 10/10 | Batch  31/124 | D  3.001853e-003 [↓ ] | Valid D  1.750141e-003 [↓ ] | Stag:130 Ovfit: 0
[12/11/2015 20:21:16] 10/10 | Batch  41/124 | D  6.195725e-003 [↑ ] | Valid D  1.805622e-003 [↑ ] | Stag:140 Ovfit: 0
[12/11/2015 20:21:16] 10/10 | Batch  51/124 | D  1.524289e-003 [↓ ] | Valid D  1.829196e-003 [↑ ] | Stag:150 Ovfit: 0
[12/11/2015 20:21:17] 10/10 | Batch  61/124 | D  1.029841e-002 [↑ ] | Valid D  1.834366e-003 [↑ ] | Stag:160 Ovfit: 0
[12/11/2015 20:21:17] 10/10 | Batch  71/124 | D  2.667856e-003 [↓ ] | Valid D  1.913492e-003 [↑ ] | Stag:170 Ovfit: 0
[12/11/2015 20:21:17] 10/10 | Batch  81/124 | D  1.467351e-003 [↓ ] | Valid D  1.837669e-003 [↓ ] | Stag:180 Ovfit: 0
[12/11/2015 20:21:17] 10/10 | Batch  91/124 | D  3.261143e-003 [↑ ] | Valid D  1.793646e-003 [↓ ] | Stag:190 Ovfit: 0
[12/11/2015 20:21:17] 10/10 | Batch 101/124 | D  2.153974e-003 [↓ ] | Valid D  1.842048e-003 [↑ ] | Stag:200 Ovfit: 0
[12/11/2015 20:21:17] 10/10 | Batch 111/124 | D  1.176465e-002 [↑ ] | Valid D  1.810117e-003 [↓ ] | Stag:210 Ovfit: 0
[12/11/2015 20:21:17] 10/10 | Batch 121/124 | D  2.082179e-003 [↓ ] | Valid D  1.834467e-003 [↑ ] | Stag:220 Ovfit: 0
[12/11/2015 20:21:17] Duration       : 00:00:05.2093910
[12/11/2015 20:21:17] Loss initial   : D  4.748471e-001
[12/11/2015 20:21:17] Loss final     : D  1.395002e-003 (Best)
[12/11/2015 20:21:17] Loss change    : D -4.734521e-001 (-99.71 %)
[12/11/2015 20:21:17] Loss chg. / s  : D -9.088434e-002
[12/11/2015 20:21:17] Epochs / s     : 1.919610181
[12/11/2015 20:21:17] Epochs / min   : 115.1766109
[12/11/2015 20:21:17] --- Training finished

After a 5-second training, we can see that the characteristics of the problem domain (distinguishing between the digits of 0 and 1) is captured in the model weights. *) let l = (n.[0] :?> Linear) l.VisualizeWRowsAsImageGrid(28) |> printfn "%s" (** [lang=cs] Hype.Neural.Linear 784 -> 1 Learnable parameters: 785 Init: Standard W's rows reshaped to (28 x 28), presented in a (1 x 1) grid: DM : 28 x 28 ---------------------------- ---------------------------- ------------▴▴▴▴▴----------- ---------▴--▴▴▴▴▴▴-▴-------- --------▴▴▪▴▪▪▪▴-▴▴▴▪▪▪▴---- ------▴-▴▴▴▴▪▪▴▴-·-▴▪▪▪▪▴--- ------▴--▴▴-▴▴--···▴▪▴▴▴▴--- ----▴---------▴▴-· --------- ---------··---▴▪--·-·····--- -------······▴▪▪▪▴-······--- ------····· ▴●●●▴· ·--- -----·· · ▪♦■♦▪ ·--- -----· · ●■■♦▴ ·--- -----· ·♦██♦· ·--- -----· ▴■██● ·--- ----· ▪██■▪ ·--- ----· -●█■♦- ·--- ----· ▴♦█■●· ···--- ----· ·▴♦♦♦●· ····---- ----· ·▴▪●●●▪·· ····--▴-- ----······-▴▪▪▪▴--------▴--- -----▴▴----·--▴▴-▴▴-▴▴------ -----▴▪▪▴-· ··--▴▴▪▴▴▴▴----- ----▴▪▪▪▪▴-· ·-▴▴▪▴▴▴▴------ -----▴▪▴▴▴▴·---▴▴▴▴▴-------- ------------▴▴▴▴------------ ---------------------------- ---------------------------- b: DV : 1 ### Classifier You can create classifiers by instantiating types such as **LogisticClassifier** or **SoftmaxClassifier**, and passing a classification function of the form **DM->DM**in the constructor. Alternatively, you can directly pass the model we have just trained. Please see the [API reference](reference/index.html) and the [source code](https://github.com/hypelib/Hype/blob/master/src/Hype/Classifier.fs) for a better understanding of how classifiers are implemented. *) let cc = LogisticClassifier(n) (** Let's test the class predictions for 10 random elements from the MNIST test set, which, if you remember, we've filtered to have only 0s and 1s. *) let pred = cc.Classify(MNISTtest01.X.[*,0..9]);; let real = MNISTtest01.Y.[*, 0..9] |> DM.toDV |> DV.toArray |> Array.map (float32>>int) (**
val pred : int [] = [|1; 0; 1; 0; 1; 0; 0; 1; 1; 1|]
val real : int [] = [|1; 0; 1; 0; 1; 0; 0; 1; 1; 1|]
The classifier seems to be working well. We can compute the classification error for a given dataset. *) let error = cc.ClassificationError(MNISTtest01);; (**
val error : float32 = 0.000472813234f
The classification error is 0.047%. Finally, this is how you would classify single digits. *) let cls = cc.Classify(MNISTtest01.X.[*,0]);; MNISTtest01.X.[*,0] |> DV.visualizeAsDM 28 |> printfn "%s" (** [lang=cs] val cls : int = 1 DM : 28 x 28 ♦ ●♦ █ ■· ▪█- ▴█- ■♦ ♦█· -█▪ █▪ ●▪ ▪█ ▪█- ▪█▴ ▪█■ █■ ██ ▪█ █▴ █● And this is how you would classify many digits efficiently at the same time, by running them through the model together as the columns of an input matrix. *) let clss = cc.Classify(MNISTtest01.X.[*,5..9]);; MNISTtest01.[5..9].VisualizeXColsAsImageGrid(28) |> printfn "%s" (** [lang=cs] val clss : int [] = [|0; 0; 1; 1; 1|] Hype.Dataset X: 784 x 5 Y: 1 x 5 X's columns reshaped to (28 x 28), presented in a (2 x 3) grid: DM : 56 x 84 ██· ●███♦- ·████♦· -█▴ ██████■- ♦███████- ▪██· ●███████■ ♦███████████▴ ●██· ▪███● -███- ♦█████████████■ ▴♦█· ♦██▪ -██■ ████♦ ●●████████ ▪█· ▪███ ·██■ ●█████· ··██████ ▪█· ■██▪ ·██■ ▪██████· ██████ ▪█· ·██■ ▪██■ -██████♦ ██████ ▪█· ♦██▪ ■██■ ▪███████ ·█████♦ ●█· ·███- ■██■ ██████♦· ♦██████· ██· ♦██■ ·███■ ██████- ▪███████- ██· ■██- -███- ██████ ●███████▴ ██· ■██· ■██♦ ██████·♦████████■ ██· ■██· ■███● ████████████████ ██ ■██● ████ ██████████████- ██ ▴███· -■███- ▴████████████▴ ·██ ·█████████♦ ■████████■● ██ ▴███████♦ ████████ ·█♦· ▪███♦· ●●■●●- ·██▴ █■ ·██ ███ -██■ ●██■ ▪████- ♦██ ♦███■ ·███ ·████■ ■██♦ ▴███■ ▪██■ -████● ███- ▴████▪ ▪██■ ·████♦ -███▴ █████· ♦██● ████♦ -███ ▪████· ███▴ ■███■ -███ ▴████· ███● ■███■ ████ ■███▪ ●███- -███♦ ▴███● ●███▪ ●██♦ ████▪ ●██· ■███▴ ■■- *) ================================================ FILE: docs/input/Training.fsx ================================================ (*** hide ***) #r "../../src/Hype/bin/Release/netstandard2.0/DiffSharp.dll" #r "../../src/Hype/bin/Release/netstandard2.0/Hype.dll" #I "../../packages/R.NET.Community/lib/net40/" #I "../../packages/R.NET.Community.FSharp/lib/net40/" #I "../../packages/RProvider" #load "RProvider.fsx" //fsi.ShowDeclarationValues <- false System.Environment.CurrentDirectory <- __SOURCE_DIRECTORY__ (** Training ======== In [optimization,](optimization.html) we've seen how nested AD and gradient-based optimization work together. Training a model is the optimization of model parameters to minimize a loss function, or equivalently, to maximize the likelihood of a given set of data under the model parameters. In addition to the _optimization method_, _learning rate_, _momentum_, and _gradient clipping_ parameters we've seen, this introduces parameters for the _loss function_, _regularization_, _training batches_, and _validation and early stopping_. But let's start with the **Dataset** type, which we will use for keeping the training, validation, and test data for the training procedure. Dataset ------- For supervised training, data consists of pairs of input vectors $\mathbf{x}_i \in \mathbb{R}^{d_x}$ and output vectors $\mathbf{y}_i \in \mathbb{R}^{d_y}$. We represent data using the **Dataset** type, which is basically a pair of matrices $$$ \begin{eqnarray*} \mathbf{X} &\in& \mathbb{R}^{d_x \times n}\\ \mathbf{Y} &\in& \mathbb{R}^{d_y \times n}\\ \end{eqnarray*} holding these vectors, where $n$ is the number of input–output pairs, $d_x$ is the number of input features and $d_y$ is the number of output features. In other words, each of the $n$ columns of the matrix $\mathbf{X}$ is an input vector of length $d_x$ and each of the $n$ columns of matrix $\mathbf{Y}$ is the corresponding output vector of length $d_y$. Keeping data in matrix form is essential for harnessing high-performance linear algebra engines tailored for your CPU or GPU. Hype, by default, uses a high-performance CPU backend using OpenBLAS for BLAS/LAPACK operations, and parallel implementations of non-BLAS operations such as elementwise functions. *) open Hype open DiffSharp.AD.Float32 let x = toDM [[0; 0; 1; 1] [0; 1; 0; 1]] let y = toDM [[0; 1; 1; 0]] let XORdata = Dataset(x, y) (** Hype provides several utility functions for loading data into matrices from images, delimited text files (e.g., CSV), or commonly used dataset files such as the MNIST. *) let MNIST = Dataset(Util.LoadMNISTPixels("train-images.idx3-ubyte", 60000), Util.LoadMNISTLabels("train-labels.idx1-ubyte", 60000) |> toDV |> DM.ofDV 1).NormalizeX() let MNISTtest = Dataset(Util.LoadMNISTPixels("t10k-images.idx3-ubyte", 10000), Util.LoadMNISTLabels("t10k-labels.idx1-ubyte", 10000) |> toDV |> DM.ofDV 1).NormalizeX() (** You can see the [API reference](reference/index.html) and the [source code](https://github.com/hypelib/Hype/blob/master/src/Hype/Hype.fs) for various ways of constructing Datasets. Training parameters ------------------- Let's load the housing prices dataset from the [Stanford UFLDL Tutorial](http://ufldl.stanford.edu/tutorial/supervised/LinearRegression/) and divide it into input and output pairs. We will later train a simple linear regression model, to demonstrate the use of training parameters. *) let h = Util.LoadDelimited("housing.data") |> DM.Transpose h.ToString() |> printfn "%s" (**
DM : 14 x 506
  0.00632    0.0273    0.0273    0.0324    0.0691    0.0299    0.0883     0.145     0.211      0.17     0.225     0.117    0.0938      0.63     0.638     0.627      1.05     0.784     0.803     0.726      1.25     0.852      1.23     0.988      0.75     0.841     0.672     0.956     0.773         1      1.13      1.35      1.39      1.15      1.61    0.0642    0.0974    0.0801     0.175    0.0276    0.0336     0.127     0.142     0.159     0.123     0.171     0.188     0.229     0.254      0.22    0.0887    0.0434    0.0536    0.0498    0.0136    0.0131    0.0206    0.0143     0.154     0.103     0.149     0.172      0.11     0.127    0.0195    0.0358    0.0438    0.0579     0.136     0.128    0.0883     0.159    0.0916     0.195     0.079    0.0951     0.102    0.0871    0.0565    0.0839    0.0411    0.0446    0.0366    0.0355    0.0506    0.0574    0.0519    0.0715    0.0566     0.053    0.0468    0.0393     0.042    0.0288    0.0429     0.122     0.115     0.121    0.0819    0.0686     0.149     0.114     0.229     0.212      0.14     0.133     0.171     0.131     0.128     0.264     0.108     0.101     0.123     0.222     0.142     0.171     0.132     0.151     0.131     0.145     0.069    0.0717     0.093      0.15    0.0985     0.169     0.387     0.259     0.325     0.881      0.34      1.19      0.59      0.33     0.976     0.558     0.323     0.352      0.25     0.545     0.291      1.63      3.32       4.1      2.78      2.38      2.16      2.37      2.33      2.73      1.66       1.5      1.13      2.15      1.41      3.54      2.45      1.22      1.34      1.43      1.27      1.46      1.83      1.52      2.24      2.92      2.01       1.8       2.3      2.45      1.21      2.31     0.139    0.0918    0.0845    0.0666    0.0702    0.0543    0.0664    0.0578    0.0659    0.0689     0.091       0.1    0.0831    0.0605     0.056    0.0788     0.126    0.0837    0.0907    0.0691    0.0866    0.0219    0.0144    0.0138    0.0401    0.0467    0.0377    0.0315    0.0178    0.0345    0.0218    0.0351    0.0201     0.136      0.23     0.252     0.136     0.436     0.174     0.376     0.217     0.141      0.29     0.198    0.0456    0.0701     0.111     0.114     0.358     0.408     0.624     0.615     0.315     0.527     0.382     0.412     0.298     0.442     0.537     0.463     0.575     0.331     0.448      0.33     0.521     0.512    0.0824    0.0925     0.113     0.106     0.103     0.128     0.206     0.191      0.34     0.197     0.164     0.191      0.14     0.214    0.0822     0.369    0.0482    0.0355    0.0154     0.612     0.664     0.657      0.54     0.534      0.52     0.825      0.55     0.762     0.786     0.578     0.541    0.0907     0.299     0.162     0.115     0.222    0.0564     0.096     0.105    0.0613    0.0798      0.21    0.0358    0.0371    0.0613     0.015   0.00906     0.011    0.0197    0.0387    0.0459     0.043     0.035    0.0789    0.0362    0.0827     0.082     0.129    0.0537     0.141    0.0647    0.0556    0.0442    0.0354    0.0927       0.1    0.0552    0.0548     0.075    0.0493     0.493     0.349      2.64      0.79     0.262     0.269     0.369     0.254     0.318     0.245     0.402     0.475     0.168     0.182     0.351     0.284     0.341     0.192     0.303     0.241    0.0662    0.0672    0.0454    0.0502    0.0347    0.0508    0.0374    0.0396    0.0343    0.0304    0.0331     0.055    0.0615     0.013     0.025    0.0254    0.0305    0.0311    0.0616    0.0187     0.015     0.029    0.0621    0.0795    0.0724    0.0171     0.043     0.107      8.98      3.85       5.2      4.26      4.54      3.84      3.68      4.22      3.47      4.56       3.7      3.52       4.9      5.67      6.54      9.23      8.27      1.11       8.5      9.61      5.29      9.82      3.65      7.87      8.98      5.87      9.19      7.99    0.0849      6.81      4.39       2.6      4.33      8.15      6.96      5.29      1.58      8.64      3.36      8.72      5.87      7.67      8.35      9.92      5.05      4.24       9.6       4.8      1.53      7.92     0.716      1.95       7.4      4.44      1.14      4.05      8.81      8.66      5.75      8.08     0.834      5.94      3.53      1.81      1.09      7.02      2.05      7.05      8.79      5.86      2.25      7.66      7.37      9.34      8.49    0.0623      6.44      5.58      3.91      1.16      4.42      5.18      3.68      9.39      2.05      9.72      5.67      9.97       2.8     0.672      6.29      9.92      9.33      7.53      6.72      5.44      5.09      8.25      9.51      4.75      4.67       8.2      7.75       6.8      4.81      3.69      6.65      5.82      7.84      3.16      3.77      4.42      5.58      3.08      4.35      4.04      3.57      4.65      8.06      6.39      4.87      5.02     0.233      4.33      5.82      5.71      5.73      2.82      2.38      3.67      5.69      4.84     0.151     0.183     0.207     0.106     0.111     0.173      0.28     0.179      0.29     0.268     0.239     0.178     0.224    0.0626    0.0453    0.0608      0.11    0.0474 
       18         0         0         0         0         0      12.5      12.5      12.5      12.5      12.5      12.5      12.5         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0        75        75         0         0         0         0         0         0         0         0         0        21        21        21        21        75        90        85       100        25        25        25        25        25        25      17.5        80        80      12.5      12.5      12.5         0         0         0         0         0         0         0         0         0         0        25        25        25        25         0         0         0         0         0         0         0         0        28        28        28         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0        45        45        45        45        45        45        60        60        80        80        80        80        95        95      82.5      82.5        95        95         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0        30        30        30        30        30        30        22        22        22        22        22        22        22        22        22        22        80        80        90        20        20        20        20        20        20        20        20        20        20        20        20        20        20        20        20        20        40        40        40        40        40        20        20        20        20        90        90        55        80      52.5      52.5      52.5        80        80        80         0         0         0         0         0        70        70        70        34        34        34        33        33        33        33         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0        35        35         0         0         0         0         0         0         0         0        35         0        55        55         0         0        85        80        40        40        60        60        90        80        80         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0 
     2.31      7.07      7.07      2.18      2.18      2.18      7.87      7.87      7.87      7.87      7.87      7.87      7.87      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      8.14      5.96      5.96      5.96      5.96      2.95      2.95      6.91      6.91      6.91      6.91      6.91      6.91      6.91      6.91      6.91      5.64      5.64      5.64      5.64         4      1.22      0.74      1.32      5.13      5.13      5.13      5.13      5.13      5.13      1.38      3.37      3.37      6.07      6.07      6.07      10.8      10.8      10.8      10.8      12.8      12.8      12.8      12.8      12.8      12.8      4.86      4.86      4.86      4.86      4.49      4.49      4.49      4.49      3.41      3.41      3.41      3.41        15        15        15      2.89      2.89      2.89      2.89      2.89      8.56      8.56      8.56      8.56      8.56      8.56      8.56      8.56      8.56      8.56      8.56        10        10        10        10        10        10        10        10        10      25.7      25.7      25.7      25.7      25.7      25.7      25.7      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      21.9      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      4.05      4.05      4.05      4.05      4.05      4.05      4.05      2.46      2.46      2.46      2.46      2.46      2.46      2.46      2.46      3.44      3.44      3.44      3.44      3.44      3.44      2.93      2.93      0.46      1.52      1.52      1.52      1.47      1.47      2.03      2.03      2.68      2.68      10.6      10.6      10.6      10.6      10.6      10.6      10.6      10.6      10.6      10.6      10.6      13.9      13.9      13.9      13.9       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2       6.2      4.93      4.93      4.93      4.93      4.93      4.93      5.86      5.86      5.86      5.86      5.86      5.86      5.86      5.86      5.86      5.86      3.64      3.64      3.75      3.97      3.97      3.97      3.97      3.97      3.97      3.97      3.97      3.97      3.97      3.97      3.97      6.96      6.96      6.96      6.96      6.96      6.41      6.41      6.41      6.41      6.41      3.33      3.33      3.33      3.33      1.21      2.97      2.25      1.76      5.32      5.32      5.32      4.95      4.95      4.95      13.9      13.9      13.9      13.9      13.9      2.24      2.24      2.24      6.09      6.09      6.09      2.18      2.18      2.18      2.18       9.9       9.9       9.9       9.9       9.9       9.9       9.9       9.9       9.9       9.9       9.9       9.9      7.38      7.38      7.38      7.38      7.38      7.38      7.38      7.38      3.24      3.24      3.24      6.06      6.06      5.19      5.19      5.19      5.19      5.19      5.19      5.19      5.19      1.52      1.89      3.78      3.78      4.39      4.39      4.15      2.01      1.25      1.25      1.69      1.69      2.02      1.91      1.91      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      18.1      27.7      27.7      27.7      27.7      27.7      9.69      9.69      9.69      9.69      9.69      9.69      9.69      9.69      11.9      11.9      11.9      11.9      11.9 
        0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         1         0         0         0         0         0         0         0         0         0         1         0         1         1         0         0         0         0         1         0         1         1         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         1         1         1         1         1         0         0         0         1         0         1         1         1         1         1         0         0         0         0         0         0         0         0         0         0         0         1         0         1         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         1         0         0         0         1         1         0         1         1         0         0         0         0         1         1         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         1         1         1         0         0         0         0         1         1         0         0         0         0         1         1         0         1         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0         0 
    0.538     0.469     0.469     0.458     0.458     0.458     0.524     0.524     0.524     0.524     0.524     0.524     0.524     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.538     0.499     0.499     0.499     0.499     0.428     0.428     0.448     0.448     0.448     0.448     0.448     0.448     0.448     0.448     0.448     0.439     0.439     0.439     0.439      0.41     0.403      0.41     0.411     0.453     0.453     0.453     0.453     0.453     0.453     0.416     0.398     0.398     0.409     0.409     0.409     0.413     0.413     0.413     0.413     0.437     0.437     0.437     0.437     0.437     0.437     0.426     0.426     0.426     0.426     0.449     0.449     0.449     0.449     0.489     0.489     0.489     0.489     0.464     0.464     0.464     0.445     0.445     0.445     0.445     0.445      0.52      0.52      0.52      0.52      0.52      0.52      0.52      0.52      0.52      0.52      0.52     0.547     0.547     0.547     0.547     0.547     0.547     0.547     0.547     0.547     0.581     0.581     0.581     0.581     0.581     0.581     0.581     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.624     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.871     0.605     0.605     0.871     0.605     0.605     0.605     0.605     0.605     0.605     0.605     0.605     0.605     0.605     0.605     0.605      0.51      0.51      0.51      0.51      0.51      0.51      0.51     0.488     0.488     0.488     0.488     0.488     0.488     0.488     0.488     0.437     0.437     0.437     0.437     0.437     0.437     0.401     0.401     0.422     0.404     0.404     0.404     0.403     0.403     0.415     0.415     0.416     0.416     0.489     0.489     0.489     0.489     0.489     0.489     0.489     0.489     0.489     0.489     0.489      0.55      0.55      0.55      0.55     0.507     0.507     0.507     0.507     0.504     0.504     0.504     0.504     0.504     0.504     0.504     0.504     0.507     0.507     0.507     0.507     0.507     0.507     0.428     0.428     0.428     0.428     0.428     0.428     0.431     0.431     0.431     0.431     0.431     0.431     0.431     0.431     0.431     0.431     0.392     0.392     0.394     0.647     0.647     0.647     0.647     0.647     0.647     0.647     0.647     0.647     0.647     0.575     0.575     0.464     0.464     0.464     0.464     0.464     0.447     0.447     0.447     0.447     0.447     0.443     0.443     0.443     0.443     0.401       0.4     0.389     0.385     0.405     0.405     0.405     0.411     0.411     0.411     0.437     0.437     0.437     0.437     0.437       0.4       0.4       0.4     0.433     0.433     0.433     0.472     0.472     0.472     0.472     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.544     0.493     0.493     0.493     0.493     0.493     0.493     0.493     0.493      0.46      0.46      0.46     0.438     0.438     0.515     0.515     0.515     0.515     0.515     0.515     0.515     0.515     0.442     0.518     0.484     0.484     0.442     0.442     0.429     0.435     0.429     0.429     0.411     0.411      0.41     0.413     0.413      0.77      0.77      0.77      0.77      0.77      0.77      0.77      0.77     0.718     0.718     0.718     0.631     0.631     0.631     0.631     0.631     0.668     0.668     0.668     0.671     0.671     0.671     0.671     0.671     0.671     0.671       0.7       0.7       0.7       0.7       0.7       0.7       0.7       0.7       0.7       0.7       0.7     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.693     0.659     0.659     0.597     0.597     0.597     0.597     0.597     0.597     0.693     0.679     0.679     0.679     0.679     0.718     0.718     0.718     0.614     0.614     0.584     0.679     0.584     0.679     0.679     0.679     0.584     0.584     0.584     0.713     0.713      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74      0.74     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.713     0.655     0.655     0.655     0.584      0.58      0.58      0.58     0.532      0.58     0.614     0.584     0.584     0.614     0.614     0.614     0.614     0.532     0.532     0.532     0.532     0.583     0.583     0.583     0.583     0.609     0.609     0.609     0.609     0.609     0.585     0.585     0.585     0.585     0.585     0.585     0.585     0.585     0.573     0.573     0.573     0.573     0.573 
     6.58      6.42      7.19         7      7.15      6.43      6.01      6.17      5.63         6      6.38      6.01      5.89      5.95       6.1      5.83      5.94      5.99      5.46      5.73      5.57      5.97      6.14      5.81      5.92       5.6      5.81      6.05       6.5      6.67      5.71      6.07      5.95       5.7       6.1      5.93      5.84      5.85      5.97       6.6      7.02      6.77      6.17      6.21      6.07      5.68      5.79      6.03       5.4       5.6      5.96      6.12      6.51         6      5.89      7.25      6.38      6.82      6.15      5.93      5.74      5.97      6.46      6.76       7.1      6.29      5.79      5.88      5.59      5.89      6.42      5.96      6.07      6.25      6.27      6.29      6.28      6.14      6.23      5.87      6.73      6.62       6.3      6.17      6.39      6.63      6.02      6.12      7.01      7.08      6.42      6.41      6.44      6.21      6.25      6.63      6.16      8.07      7.82      7.42      6.73      6.78      6.41      6.14      6.17      5.85      5.84      6.13      6.47      6.23       6.2      6.72      5.91      6.09      6.25      5.93      6.18      6.02      5.87      5.73      5.87         6      5.96      5.86      5.88      5.99      5.61      5.69      6.43      5.64      6.46      6.33      6.37      5.82      5.76      6.34      5.94      6.45      5.86      6.15      6.17      5.02       5.4      5.47       4.9      6.13      5.63      4.93      5.19       5.6      6.12       5.4      5.01      5.71      6.13      6.15      5.27      6.94      6.07      6.51      6.25      7.49       7.8      8.38      5.85       6.1      7.93      5.88      6.32       6.4      5.88      5.88      5.57      6.42      5.86      6.55      6.02      6.32      6.86      6.98      7.77      6.14      7.16      6.56       5.6      6.15      7.83      6.78      6.56      7.19      6.95      6.74      7.18       6.8       6.6      7.88      7.29      7.11      7.27      6.98      7.14      6.16      7.61      7.85      8.03      5.89      6.33      5.78      6.06      5.34      5.96       5.4      5.81      6.38      5.41      6.18      5.89      6.64      5.95      6.37      6.95      6.16      6.88      6.62      8.27      8.73      8.04      7.16      7.69      6.55      5.98      7.41      8.34      8.25      6.73      6.09      6.63      7.36      6.48      6.61       6.9       6.1      6.36      6.39      5.59      5.61      6.11      6.23      6.43      6.72      6.49      6.44      6.96      8.26      6.11      5.88      7.45       8.7      7.33      6.84       7.2      7.52       8.4      7.33      7.21      5.56      7.01       8.3      7.47      5.92      5.86      6.24      6.54      7.69      6.76      6.85      7.27      6.83      6.48      6.81      7.82      6.97      7.65      7.92      7.09      6.45      6.23      6.21      6.32      6.57      6.86      7.15      6.63      6.13      6.01      6.68      6.55      5.79      6.35      7.04      6.87      6.59       6.5      6.98      7.24      6.62      7.42      6.85      6.64      5.97      4.97      6.12      6.02      6.27      6.57      5.71      5.91      5.78      6.38      6.11      6.43      6.38      6.04      5.71      6.42      6.43      6.31      6.08      5.87      6.33      6.14      5.71      6.03      6.32      6.31      6.04      5.87       5.9      6.06      5.99      5.97      7.24      6.54       6.7      6.87      6.01       5.9      6.52      6.64      6.94      6.49      6.58      5.88      6.73      5.66      5.94      6.21       6.4      6.13      6.11       6.4      6.25      5.36       5.8      8.78      3.56      4.96      3.86      4.97      6.68      7.02      6.22      5.88      4.91      4.14      7.31      6.65      6.79      6.38      6.22      6.97      6.55      5.54      5.52      4.37      5.28      4.65         5      4.88      5.39      5.71      6.05      5.04      6.19      5.89      6.47      6.41      5.75      5.45      5.85      5.99      6.34       6.4      5.35      5.53      5.68      4.14      5.61      5.62      6.85      5.76      6.66      4.63      5.16      4.52      6.43      6.78       5.3      5.96      6.82      6.41      6.01      5.65       6.1      5.57       5.9      5.84       6.2      6.19      6.38      6.35      6.83      6.43      6.44      6.21      6.63      6.46      6.15      5.94      5.63      5.82      6.41      6.22      6.49      5.85      6.46      6.34      6.25      6.19      6.42      6.75      6.66       6.3      7.39      6.73      6.53      5.98      5.94       6.3      6.08       6.7      6.38      6.32      6.51      6.21      5.76      5.95         6      5.93      5.71      6.17      6.23      6.44      6.98      5.43      6.16      6.48       5.3      6.19      6.23      6.24      6.75      7.06      5.76      5.87      6.31      6.11      5.91      5.45      5.41      5.09      5.98      5.98      5.71      5.93      5.67      5.39      5.79      6.02      5.57      6.03      6.59      6.12      6.98      6.79      6.03 
     65.2      78.9      61.1      45.8      54.2      58.7      66.6      96.1       100      85.9      94.3      82.9        39      61.8      84.5      56.5      29.3      81.7      36.6      69.5      98.1      89.2      91.7       100      94.1      85.7      90.3      88.8      94.4      87.3      94.1       100        82        95      96.9      68.2      61.4      41.5      30.2      21.8      15.8       2.9       6.6       6.5        40      33.8      33.3      85.5      95.3        62      45.7        63      21.1      21.4      47.6      21.9      35.7      40.5      29.2      47.2      66.2      93.4      67.8      43.4      59.5      17.8      31.1      21.4      36.8        33       6.6      17.5       7.8       6.2         6        45      74.5      45.8      53.7      36.6      33.5      70.4      32.2      46.7        48      56.1      45.1      56.8      86.3      63.1      66.1      73.9      53.6      28.9      77.3      57.8      69.6        76      36.9      62.5      79.9      71.3      85.4      87.4        90      96.7      91.9      85.2      97.1      91.2      54.4      81.6      92.9      95.4      84.2      88.2      72.5      82.6      73.1      65.2      69.7      84.1      92.9        97      95.8      88.4      95.6        96      98.8      94.7      98.9      97.7      97.9      95.4      98.4      98.2      93.5      98.4      98.2      97.9      93.6       100       100       100      97.8       100       100      95.7      93.8      94.9      97.3       100        88      98.5        96      82.6        94      97.4       100       100      92.6      90.8      98.2      93.9      91.8        93      96.2      79.2      96.1      95.2      94.6      97.3      88.5      84.1      68.7      33.1      47.2      73.4      74.4      58.4      83.3      62.2      92.2      95.6      89.8      68.8      53.6      41.1      29.1      38.9      21.5      30.8      26.3       9.9      18.8        32      34.1      36.6      38.3      15.3      13.9      38.4      15.7      33.2      31.9      22.3      52.5      72.7      59.1       100      92.1      88.6      53.8      32.3       9.8      42.4        56      85.1      93.8      92.4      88.5      91.3      77.7      80.8      78.3        83      86.5      79.9        17      21.4      68.1      76.9      73.3      70.4      66.5      61.5      76.5      71.6      18.5      42.2      54.3      65.1      52.9       7.8      76.5      70.2      34.9      79.2      49.1      17.5        13       8.9       6.8       8.4        32      19.1      34.2      86.9       100       100      81.8      89.4      91.5      94.5      91.6      62.8      84.6        67      52.6      61.5      42.1      16.3      58.7      51.8      32.9      42.8        49      27.6      32.1      32.2      64.5      37.2      49.7      24.8      20.8      31.9      31.5      31.3      45.6      22.9      27.9      27.7      23.4      18.4      42.3      31.1        51        58      20.1        10      47.4      40.4      18.4      17.7      41.1      58.1      71.9      70.3      82.5      76.7      37.8      52.8      90.4      82.8      87.3      77.7      83.2      71.7      67.2      58.8      52.3      54.3      49.9      74.3      40.1      14.7      28.9      43.7      25.8      17.2      32.2      28.4      23.3      38.1      38.5      34.5      46.3      59.6      37.3      45.4      58.5      49.3      59.7      56.4      28.1      48.5      52.3      27.7      29.7      34.5      44.4      35.9      18.5      36.1      21.9      19.5      97.4        91      83.4      81.3        88      91.1      96.2        89      82.9      87.9      91.4       100       100      96.8      97.5       100      89.6       100       100      97.9      93.3      98.8      96.2       100      91.9      99.1       100       100      91.2      98.1       100      89.5       100      98.9        97      82.5        97      92.6      94.7      98.8        96      98.9       100      77.8       100       100       100        96      85.4       100       100       100      97.9       100       100       100       100       100       100       100      90.8      89.1       100      76.5       100      95.3      87.6      85.1      70.6      95.4      59.7      78.7      78.1      95.6      86.1      94.3      74.8      87.9        95      94.6      93.3       100      87.9      93.9      92.4      97.2       100       100      96.6      94.8      96.4      96.6      98.7      98.3      92.6      98.2      91.8      99.3      94.1      86.5      87.9      80.3      83.7      84.4        90      88.4        83      89.9      65.4      48.2      84.7      94.5        71      56.7        84      90.7        75      67.6      95.4      97.4      93.6      97.3      96.7        88      64.7      74.9        77      40.3      41.9      51.9      79.8      53.2      92.7      98.3        98      98.8      83.5        54      42.6      28.8      72.9      70.6      65.3      73.5      79.7      69.1      76.7        91      89.3      80.8 
     4.09      4.97      4.97      6.06      6.06      6.06      5.56      5.95      6.08      6.59      6.35      6.23      5.45      4.71      4.46       4.5       4.5      4.26       3.8       3.8       3.8      4.01      3.98       4.1       4.4      4.45      4.68      4.45      4.45      4.24      4.23      4.18      3.99      3.79      3.76      3.36      3.38      3.93      3.85       5.4       5.4      5.72      5.72      5.72      5.72       5.1       5.1      5.69      5.87      6.09      6.81      6.81      6.81      6.81      7.32       8.7      9.19      8.32      7.81      6.93      7.23      6.82      7.23      7.98      9.22      6.61      6.61       6.5       6.5       6.5      5.29      5.29      5.29      5.29      4.25       4.5      4.05      4.09      5.01       4.5       5.4       5.4       5.4       5.4      4.78      4.44      4.43      3.75      3.42      3.41      3.09      3.09      3.67      3.67      3.62       3.5       3.5       3.5       3.5       3.5      2.78      2.86      2.71      2.71      2.42      2.11      2.21      2.12      2.43      2.55      2.78      2.68      2.35      2.55      2.26      2.46      2.73      2.75      2.48      2.76      2.26       2.2      2.09      1.94      2.01      1.99      1.76      1.79      1.81      1.98      2.12      2.27      2.33      2.47      2.35      2.11      1.97      1.85      1.67      1.67      1.61      1.44      1.32      1.41      1.35      1.42      1.52      1.46      1.53      1.53      1.62      1.59      1.61      1.62      1.75      1.75      1.74      1.88      1.76      1.77       1.8      1.97      2.04      2.16      2.42      2.28      2.05      2.43       2.1      2.26      2.43      2.39       2.6      2.65       2.7      3.13      3.55      3.32      2.92      2.83      2.74       2.6       2.7      2.85      2.99      3.28       3.2      3.79      4.57      4.57      6.48      6.48      6.48      6.22      6.22      5.65      7.31      7.31      7.31      7.65      7.65      6.27      6.27      5.12      5.12      3.95      4.35      4.35      4.24      3.88      3.88      3.67      3.65      3.95      3.59      3.95      3.11      3.42      2.89      3.36      2.86      3.05      3.27      3.27      2.89      2.89      3.22      3.22      3.38      3.38      3.67      3.67      3.84      3.65      3.65      3.65      4.15      4.15      6.19      6.19      6.34      6.34      7.04      7.04      7.95      7.95      8.06      8.06      7.83      7.83       7.4       7.4      8.91      8.91      9.22      9.22      6.34       1.8      1.89      2.01      2.11      2.14      2.29      2.08      1.93      1.99      2.13      2.42      2.87      3.92      4.43      4.43      3.92      4.37      4.08      4.27      4.79      4.86      4.14       4.1      4.69      5.24      5.21      5.89      7.31      7.31      9.09      7.32      7.32      7.32      5.12      5.12      5.12       5.5       5.5      5.96      5.96      6.32      7.83      7.83      7.83      5.49      5.49      5.49      4.02      3.37       3.1      3.18      3.32       3.1      2.52      2.64      2.83      3.26       3.6      3.95         4      4.03      3.53         4      4.54      4.54      4.72      4.72      4.72      5.42      5.42      5.42      5.21      5.21      5.87      6.64      6.64      6.46      6.46      5.99      5.23      5.62      4.81      4.81      4.81      7.04      6.27      5.73      6.47      8.01      8.01      8.54      8.34      8.79      8.79      10.7      10.7      12.1      10.6      10.6      2.12      2.51      2.72      2.51      2.52       2.3       2.1       1.9       1.9      1.61      1.75      1.51      1.33      1.36       1.2      1.17      1.13      1.17      1.14      1.32      1.34      1.36      1.39      1.39      1.42      1.52      1.58      1.53      1.44      1.43      1.47      1.52      1.59      1.73      1.93      2.17      1.77      1.79      1.78      1.73      1.68      1.63      1.49       1.5      1.59      1.57      1.64       1.7      1.61      1.43      1.18      1.29      1.45      1.47      1.41      1.53      1.55      1.59      1.66      1.83      1.82      1.65       1.8      1.79      1.86      1.87      1.95      2.02      2.06      1.91         2      1.86      1.94      1.97      2.05      2.09       2.2      2.32      2.22      2.12         2      1.91      1.82      1.82      1.87      2.07         2      1.98       1.9      1.99      2.07       2.2      2.26      2.19      2.32      2.36      2.37      2.45       2.5      2.44      2.58      2.78      2.78      2.72       2.6      2.57      2.73       2.8      2.96      3.07      2.87      2.54      2.91      2.82      3.03       3.1       2.9      2.53      2.43      2.21      2.31       2.1      2.17      1.95      3.42      3.33      3.41       4.1      3.72      3.99      3.55      3.15      1.82      1.76      1.82      1.87      2.11      2.38      2.38       2.8       2.8      2.89      2.41       2.4       2.5      2.48      2.29      2.17      2.39      2.51 
        1         2         2         3         3         3         5         5         5         5         5         5         5         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         5         5         5         5         3         3         3         3         3         3         3         3         3         3         3         4         4         4         4         3         5         2         5         8         8         8         8         8         8         3         4         4         4         4         4         4         4         4         4         5         5         5         5         5         5         4         4         4         4         3         3         3         3         2         2         2         2         4         4         4         2         2         2         2         2         5         5         5         5         5         5         5         5         5         5         5         6         6         6         6         6         6         6         6         6         2         2         2         2         2         2         2         4         4         4         4         4         4         4         4         4         4         4         4         4         4         4         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         5         3         3         3         3         3         3         3         3         5         5         5         5         5         5         1         1         4         2         2         2         3         3         2         2         4         4         4         4         4         4         4         4         4         4         4         4         4         5         5         5         5         8         8         8         8         8         8         8         8         8         8         8         8         8         8         8         8         8         8         6         6         6         6         6         6         7         7         7         7         7         7         7         7         7         7         1         1         3         5         5         5         5         5         5         5         5         5         5         5         5         3         3         3         3         3         4         4         4         4         4         5         5         5         5         1         1         1         1         6         6         6         4         4         4         4         4         4         4         4         5         5         5         7         7         7         7         7         7         7         4         4         4         4         4         4         4         4         4         4         4         4         5         5         5         5         5         5         5         5         4         4         4         1         1         5         5         5         5         5         5         5         5         1         1         5         5         3         3         4         4         1         1         4         4         5         4         4        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24        24         4         4         4         4         4         6         6         6         6         6         6         6         6         1         1         1         1         1 
      296       242       242       222       222       222       311       311       311       311       311       311       311       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       279       279       279       279       252       252       233       233       233       233       233       233       233       233       233       243       243       243       243       469       226       313       256       284       284       284       284       284       284       216       337       337       345       345       345       305       305       305       305       398       398       398       398       398       398       281       281       281       281       247       247       247       247       270       270       270       270       270       270       270       276       276       276       276       276       384       384       384       384       384       384       384       384       384       384       384       432       432       432       432       432       432       432       432       432       188       188       188       188       188       188       188       437       437       437       437       437       437       437       437       437       437       437       437       437       437       437       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       403       296       296       296       296       296       296       296       193       193       193       193       193       193       193       193       398       398       398       398       398       398       265       265       255       329       329       329       402       402       348       348       224       224       277       277       277       277       277       277       277       277       277       277       277       276       276       276       276       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       307       300       300       300       300       300       300       330       330       330       330       330       330       330       330       330       330       315       315       244       264       264       264       264       264       264       264       264       264       264       264       264       223       223       223       223       223       254       254       254       254       254       216       216       216       216       198       285       300       241       293       293       293       245       245       245       289       289       289       289       289       358       358       358       329       329       329       222       222       222       222       304       304       304       304       304       304       304       304       304       304       304       304       287       287       287       287       287       287       287       287       430       430       430       304       304       224       224       224       224       224       224       224       224       284       422       370       370       352       352       351       280       335       335       411       411       187       334       334       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       666       711       711       711       711       711       391       391       391       391       391       391       391       391       273       273       273       273       273 
     15.3      17.8      17.8      18.7      18.7      18.7      15.2      15.2      15.2      15.2      15.2      15.2      15.2        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21        21      19.2      19.2      19.2      19.2      18.3      18.3      17.9      17.9      17.9      17.9      17.9      17.9      17.9      17.9      17.9      16.8      16.8      16.8      16.8      21.1      17.9      17.3      15.1      19.7      19.7      19.7      19.7      19.7      19.7      18.6      16.1      16.1      18.9      18.9      18.9      19.2      19.2      19.2      19.2      18.7      18.7      18.7      18.7      18.7      18.7        19        19        19        19      18.5      18.5      18.5      18.5      17.8      17.8      17.8      17.8      18.2      18.2      18.2        18        18        18        18        18      20.9      20.9      20.9      20.9      20.9      20.9      20.9      20.9      20.9      20.9      20.9      17.8      17.8      17.8      17.8      17.8      17.8      17.8      17.8      17.8      19.1      19.1      19.1      19.1      19.1      19.1      19.1      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      21.2      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      14.7      16.6      16.6      16.6      16.6      16.6      16.6      16.6      17.8      17.8      17.8      17.8      17.8      17.8      17.8      17.8      15.2      15.2      15.2      15.2      15.2      15.2      15.6      15.6      14.4      12.6      12.6      12.6        17        17      14.7      14.7      14.7      14.7      18.6      18.6      18.6      18.6      18.6      18.6      18.6      18.6      18.6      18.6      18.6      16.4      16.4      16.4      16.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      17.4      16.6      16.6      16.6      16.6      16.6      16.6      19.1      19.1      19.1      19.1      19.1      19.1      19.1      19.1      19.1      19.1      16.4      16.4      15.9        13        13        13        13        13        13        13        13        13        13        13        13      18.6      18.6      18.6      18.6      18.6      17.6      17.6      17.6      17.6      17.6      14.9      14.9      14.9      14.9      13.6      15.3      15.3      18.2      16.6      16.6      16.6      19.2      19.2      19.2        16        16        16        16        16      14.8      14.8      14.8      16.1      16.1      16.1      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      18.4      19.6      19.6      19.6      19.6      19.6      19.6      19.6      19.6      16.9      16.9      16.9      16.9      16.9      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      15.5      15.9      17.6      17.6      18.8      18.8      17.9        17      19.7      19.7      18.3      18.3        17        22        22      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.2      20.1      20.1      20.1      20.1      20.1      19.2      19.2      19.2      19.2      19.2      19.2      19.2      19.2        21        21        21        21        21 
      397       397       393       395       397       394       396       397       387       387       393       397       391       397       380       396       387       387       289       391       377       393       397       395       394       303       377       306       388       380       360       377       233       359       248       397       378       397       393       396       396       385       383       394       389       397       397       393       397       397       396       394       397       397       397       396       397       393       391       397       395       378       397       396       393       397       397       396       397       397       384       377       391       377       395       383       374       387       386       396       397       396       397       391       397       392       396       395       397       396       392       394       395       396       397       358       392       397       394       397       395       396      70.8       394       393       394       396       388       395       391       393       396       395       397       389       345       393       395       339       392       389       378       378       370       379       385       359       392       397       397       395       397       386       389       263       395       378       394       392       397       388       397       397       397       397       173       169       392       357       352       373       342       343       262       321        88      88.6       363       354       364       339       374       390       388       395       240       369       228       297       330       292       348       397       396       393       391       393       396       391       397       396       397       394       397       391       387       393       394       383       397       378       390       390       393       377       394       397       354       392       397       384       394       395       393       391       397       395       389       381       397       393       395       391       386       349       394       393       393       397       394       392       395       390       397       385       382       387       372       378       380       378       376       386       379       360       377       388       390       379       384       391       395       373       375       372       389       390       376       375       394       396       377       386       397       393       395       386       390       383       392       393       388       387       393       388       392       384       385       390       391       389       397       395       391       397       397       389       393       397       397       387       392       377       396       395       395       342       397       397       372       397       397       397       397       397       397       393       397       368       372       391       396       384       390       394       393       397       397       397       396       350       397       396       393       396       396       391       397       395       396       397       397       397       391       397       394       397       397       382       375       369       394       362       390       389       397       397       395       396       397       397       395       390       397       388       386       365       392       391       390       397       371       392       384       383       376       378       391       395       391       375       351       381       353       355       355       316       131       376       375       392       366       348       397       397       397       363       397       397       394       397       397       397       397       286       397       397       397       373       397       394       378       397       397       397       392       397       393       397       338       397       397       376       397       329       385       370       332       315       179       2.6      35.1      28.8       211      88.3      27.3      21.6       127      16.5      48.5       319       320       292      2.52      3.65      7.68      24.7      18.8      96.7      60.7      83.5      81.3        98       100       101       110      27.5      9.32        69       397       391       386       396       387       241      43.1       318       389       397       304      0.32       355       385       376      6.68      50.9      10.5       3.5       272       397       255       391       397       394       397       334        22       331       369       397       397       395       393       375       353       303       396       349       380       383       397       393       395       393       371       389       393       388       395       344       318       390       397       397       397       393       397       397       397       396       397       392       397       397       393       397 
     4.98      9.14      4.03      2.94      5.33      5.21      12.4      19.2      29.9      17.1      20.5      13.3      15.7      8.26      10.3      8.47      6.58      14.7      11.7      11.3        21      13.8      18.7      19.9      16.3      16.5      14.8      17.3      12.8        12      22.6        13      27.7      18.4      20.3      9.68      11.4      8.77      10.1      4.32      1.98      4.84      5.81      7.44      9.55      10.2      14.2      18.8      30.8      16.2      13.5      9.43      5.28      8.43      14.8      4.81      5.77      3.95      6.86      9.22      13.2      14.4      6.73       9.5      8.05      4.67      10.2       8.1      13.1      8.79      6.72      9.88      5.52      7.54      6.78      8.94        12      10.3      12.3       9.1      5.29      7.22      6.72      7.51      9.62      6.53      12.9      8.44       5.5       5.7      8.81       8.2      8.16      6.21      10.6      6.65      11.3      4.21      3.57      6.19      9.42      7.67      10.6      13.4      12.3      16.5      18.7      14.1      12.3      15.6        13      10.2      16.2      17.1      10.5      15.8        12      10.3      15.4      13.6      14.4      14.3      17.9      25.4      17.6      14.8      27.3      17.2      15.4      18.3      12.6      12.3      11.1        15      17.3        17      16.9      14.6      21.3      18.5      24.2      34.4      26.8      26.4      29.3      27.8      16.7      29.5      28.3      21.5      14.1      13.3      12.1      15.8      15.1        15      16.1      4.59      6.43      7.39       5.5      1.73      1.92      3.32      11.6      9.81       3.7      12.1      11.1      11.3      14.4        12      14.7      9.04      9.64      5.33      10.1      6.29      6.92      5.04      7.56      9.45      4.82      5.68        14      13.2      4.45      6.68      4.56      5.39       5.1      4.69      2.87      5.03      4.38      2.97      4.08      8.61      6.62      4.56      4.45      7.43      3.11      3.81      2.88      10.9        11      18.1      14.7      23.1      17.3        24        16      9.38      29.6      9.47      13.5      9.69      17.9      10.5      9.71      21.5      9.93       7.6      4.14      4.63      3.13      6.36      3.92      3.76      11.7      5.25      2.47      3.95      8.05      10.9      9.54      4.73      6.36      7.37      11.4      12.4      11.2      5.19      12.5      18.5      9.16      10.2      9.52      6.56       5.9      3.59      3.53      3.54      6.57      9.25      3.11      5.12      7.79       6.9      9.59      7.26      5.91      11.3       8.1      10.5      14.8      7.44      3.16      13.7        13      6.59      7.73      6.58      3.53      2.98      6.05      4.16      7.19      4.85      3.76      4.59      3.01      3.16      7.85      8.23      12.9      7.14       7.6      9.51      3.33      3.56       4.7      8.58      10.4      6.27      7.39      15.8      4.97      4.74      6.07       9.5      8.67      4.86      6.93      8.93      6.47      7.53      4.54      9.97      12.6      5.98      11.7       7.9      9.28      11.5      18.3      15.9      10.4      12.7       7.2      6.87       7.7      11.7      6.12      5.08      6.15      12.8      9.97      7.34      9.09      12.4      7.83      5.68      6.75      8.01       9.8      10.6      8.51      9.74      9.29      5.49      8.65      7.18      4.61      10.5      12.7      6.36      5.99      5.89      5.98      5.49      7.79       4.5      8.05      5.57      17.6      13.3      11.5      12.7      7.79      14.2      10.2      14.6      5.29      7.12        14      13.3      3.26      3.73      2.96      9.53      8.88      34.8        38      13.4      23.2      21.2      23.7      21.8      17.2      21.1      23.6      24.6      30.6      30.8      28.3        32      30.6      20.9      17.1      18.8      25.7      15.2      16.4      17.1      19.4      19.9      30.6        30      26.8      20.3      20.3      19.8      27.4        23      23.3      12.1      26.4      19.8      10.1      21.2      34.4      20.1        37      29.1      25.8      26.6      20.6      22.7        15      15.7      14.1      23.3      17.2      24.4      15.7      14.5      21.5      24.1      17.6      19.7        12      16.2      15.2      23.3      18.1      26.5        34      22.9      22.1      19.5      16.6      18.9      23.8        24      17.8      16.4      18.1      19.3      17.4      17.7      17.3      16.7      18.7      18.1        19      16.9      16.2      14.7      16.4      14.7        14      10.3      13.2      14.1      17.2      21.3      18.1      14.8      16.3      12.9      14.4      11.7      18.1      24.1      18.7      24.9        18      13.1      10.7      7.74      7.01      10.4      13.3      10.6        15      11.5      18.1        24      29.7      18.1      13.4        12      13.6      17.6      21.1      14.1      12.9      15.1      14.3      9.67      9.08      5.64      6.48      7.88 
       24      21.6      34.7      33.4      36.2      28.7      22.9      27.1      16.5      18.9        15      18.9      21.7      20.4      18.2      19.9      23.1      17.5      20.2      18.2      13.6      19.6      15.2      14.5      15.6      13.9      16.6      14.8      18.4        21      12.7      14.5      13.2      13.1      13.5      18.9        20        21      24.7      30.8      34.9      26.6      25.3      24.7      21.2      19.3        20      16.6      14.4      19.4      19.7      20.5        25      23.4      18.9      35.4      24.7      31.6      23.3      19.6      18.7        16      22.2        25        33      23.5      19.4        22      17.4      20.9      24.2      21.7      22.8      23.4      24.1      21.4        20      20.8      21.2      20.3        28      23.9      24.8      22.9      23.9      26.6      22.5      22.2      23.6      28.7      22.6        22      22.9        25      20.6      28.4      21.4      38.7      43.8      33.2      27.5      26.5      18.6      19.3      20.1      19.5      19.5      20.4      19.8      19.4      21.7      22.8      18.8      18.7      18.5      18.3      21.2      19.2      20.4      19.3        22      20.3      20.5      17.3      18.8      21.4      15.7      16.2        18      14.3      19.2      19.6        23      18.4      15.6      18.1      17.4      17.1      13.3      17.8        14      14.4      13.4      15.6      11.8      13.8      15.6      14.6      17.8      15.4      21.5      19.6      15.3      19.4        17      15.6      13.1      41.3      24.3      23.3        27        50        50        50      22.7        25        50      23.8      23.8      22.3      17.4      19.1      23.1      23.6      22.6      29.4      23.2      24.6      29.9      37.2      39.8      36.2      37.9      32.5      26.4      29.6        50        32      29.8      34.9        37      30.5      36.4      31.1      29.1        50      33.3      30.3      34.6      34.9      32.9      24.1      42.3      48.5        50      22.6      24.4      22.5      24.4        20      21.7      19.3      22.4      28.1      23.7        25      23.3      28.7      21.5        23      26.7      21.7      27.5      30.1      44.8        50      37.6      31.6      46.7      31.5      24.3      31.7      41.7      48.3        29        24      25.1      31.5      23.7      23.3        22      20.1      22.2      23.7      17.6      18.5      24.3      20.5      24.5      26.2      24.4      24.8      29.6      42.8      21.9      20.9        44        50        36      30.1      33.8      43.1      48.8        31      36.5      22.8      30.7        50      43.5      20.7      21.1      25.2      24.4      35.2      32.4        32      33.2      33.1      29.1      35.1      45.4      35.4        46        50      32.2        22      20.1      23.2      22.3      24.8      28.5      37.3      27.9      23.9      21.7      28.6      27.1      20.3      22.5        29      24.8        22      26.4      33.1      36.1      28.4      33.4      28.2      22.8      20.3      16.1      22.1      19.4      21.6      23.8      16.2      17.8      19.8      23.1        21      23.8      23.1      20.4      18.5        25      24.6        23      22.2      19.3      22.6      19.8      17.1      19.4      22.2      20.7      21.1      19.5      18.5      20.6        19      18.7      32.7      16.5      23.9      31.2      17.5      17.2      23.1      24.5      26.6      22.9      24.1      18.6      30.1      18.2      20.6      17.8      21.7      22.7      22.6        25      19.9      20.8      16.8      21.9      27.5      21.9      23.1        50        50        50        50        50      13.8      13.8        15      13.9      13.3      13.1      10.2      10.4      10.9      11.3      12.3       8.8       7.2      10.5       7.4      10.2      11.5      15.1      23.2       9.7      13.8      12.7      13.1      12.5       8.5         5       6.3       5.6       7.2      12.1       8.3       8.5         5      11.9      27.9      17.2      27.5        15      17.2      17.9      16.3         7       7.2       7.5      10.4       8.8       8.4      16.7      14.2      20.8      13.4      11.7       8.3      10.2      10.9        11       9.5      14.5      14.1      16.1      14.3      11.7      13.4       9.6       8.7       8.4      12.8      10.5      17.1      18.4      15.4      10.8      11.8      14.9      12.6      14.1        13      13.4      15.2      16.1      17.8      14.9      14.1      12.7      13.5      14.9        20      16.4      17.7      19.5      20.2      21.4      19.9        19      19.1      19.1      20.1      19.9      19.6      23.2      29.8      13.8      13.3      16.7        12      14.6      21.4        23      23.7        25      21.8      20.6      21.2      19.1      20.6      15.2         7       8.1      13.6      20.1      21.8      24.5      23.1      19.7      18.3      21.2      17.5      16.8      22.4      20.6      23.9        22      11.9 
The data has 14 rows and 506 columns, where each column represents the 14 features of one house. The values in the last row represent the price of the property, and we will train a model to predict this value, given the remaining 13 features. We also add a row of ones to the input matrix that will account for the bias (intercept) of our model and simplify the implementation. *) let hx = h.[0..12, *] let hy = h.[13..13, *] let housing = Dataset(hx, hy).AppendBiasRowX() (** Our linear regression model is of the form $$$ h_{\mathbf{w}} (\mathbf{x}) = \sum_j w_j x_j = \mathbf{w}^{T} \mathbf{x} which represents a family of linear functions parameterized by the vector $\mathbf{w}$. *) let model (w:DV) (x:DV) = w * x (** For training the model, we minimize a loss function $$$ J(\mathbf{w}) = \frac{1}{2} \sum_{i=1}^{n} \left(h_{\mathbf{w}} (\mathbf{x}^{(i)}) - y^{(i)} \right)^2 = \frac{1}{2} \sum_{i=1}^{n} \left( \mathbf{w}^{T} \mathbf{x}^{(i)} - y^{(i)} \right)^2 where $\mathbf{x}^{(i)}$ are vectors holding the 13 input features plus the bias input (the constant 1) and $y^{(i)}$ are the target values (which are here scalar). *) let wopt, lopt, whist, lhist = Optimize.Train(model, Rnd.UniformDV(14), housing, {Params.Default with Epochs = 1000 Loss = Loss.Quadratic}) let trainedmodel = model wopt (*** hide ***) open RProvider open RProvider.graphics open RProvider.grDevices let px, py = housing.Y.[0,*] |> DV.toArray |> Array.mapi (fun i v -> float i, (v |> float32 |> float)) |> Array.unzip let ppx, ppy = housing.X |> DM.mapCols (fun v -> toDV [trainedmodel v]) |> DM.toDV |> DV.toArray |> Array.mapi (fun i v -> float i, (v |> float32 |> float)) |> Array.unzip let ll = lhist |> Array.map (float32>>float) namedParams[ "x", box px "y", box py "pch", box 19 "col", box "darkblue" "type", box "p" "xlab", box "House number" "ylab", box "Price" "width", box 700 "height", box 500 ] |> R.plot|> ignore namedParams[ "x", box ppx "y", box ppy "pch", box 19 "col", box "red" "type", box "p" "width", box 700 "height", box 500 ] |> R.points|> ignore (**
[12/11/2015 14:41:04] --- Training started
[12/11/2015 14:41:04] Parameters     : 14
[12/11/2015 14:41:04] Iterations     : 1000
[12/11/2015 14:41:04] Epochs         : 1000
[12/11/2015 14:41:04] Batches        : Full (1 per epoch)
[12/11/2015 14:41:04] Training data  : 506
[12/11/2015 14:41:04] Validation data: None
[12/11/2015 14:41:04] Valid. interval: 10
[12/11/2015 14:41:04] Method         : Gradient descent
[12/11/2015 14:41:04] Learning rate  : RMSProp a0 = D 0.00100000005f, k = D 0.899999976f
[12/11/2015 14:41:04] Momentum       : None
[12/11/2015 14:41:04] Loss           : L2 norm
[12/11/2015 14:41:04] Regularizer    : L2 lambda = D 9.99999975e-05f
[12/11/2015 14:41:04] Gradient clip. : None
[12/11/2015 14:41:04] Early stopping : None
[12/11/2015 14:41:04] Improv. thresh.: D 0.995000005f
[12/11/2015 14:41:04] Return best    : true
[12/11/2015 14:41:04]    1/1000 | Batch 1/1 | D  5.281104e+002 [- ]
[12/11/2015 14:41:04]    2/1000 | Batch 1/1 | D  5.252324e+002 [↓▼]
[12/11/2015 14:41:04]    3/1000 | Batch 1/1 | D  5.231447e+002 [↓ ]
[12/11/2015 14:41:04]    4/1000 | Batch 1/1 | D  5.213967e+002 [↓▼]
[12/11/2015 14:41:04]    5/1000 | Batch 1/1 | D  5.198447e+002 [↓ ]
[12/11/2015 14:41:04]    6/1000 | Batch 1/1 | D  5.184225e+002 [↓▼]
[12/11/2015 14:41:04]    7/1000 | Batch 1/1 | D  5.170928e+002 [↓ ]
...
[12/11/2015 14:41:27]  994/1000 | Batch 1/1 | D  6.404338e+000 [↓ ]
[12/11/2015 14:41:27]  995/1000 | Batch 1/1 | D  6.392090e+000 [↓ ]
[12/11/2015 14:41:27]  996/1000 | Batch 1/1 | D  6.377205e+000 [↓▼]
[12/11/2015 14:41:28]  997/1000 | Batch 1/1 | D  6.363370e+000 [↓ ]
[12/11/2015 14:41:28]  998/1000 | Batch 1/1 | D  6.351198e+000 [↓ ]
[12/11/2015 14:41:28]  999/1000 | Batch 1/1 | D  6.344284e+000 [↓▼]
[12/11/2015 14:41:28] 1000/1000 | Batch 1/1 | D  6.334455e+000 [↓ ]
[12/11/2015 14:41:28] Duration       : 00:00:23.3076639
[12/11/2015 14:41:28] Loss initial   : D  5.281104e+002
[12/11/2015 14:41:28] Loss final     : D  6.344284e+000 (Best)
[12/11/2015 14:41:28] Loss change    : D -5.217661e+002 (-98.80 %)
[12/11/2015 14:41:28] Loss chg. / s  : D -2.238603e+001
[12/11/2015 14:41:28] Epochs / s     : 42.90434272
[12/11/2015 14:41:28] Epochs / min   : 2574.260563
[12/11/2015 14:41:28] --- Training finished

val trainedmodel : (DV -> D)
The following is a plot of the prices in the dataset where the blue points represent the real price, and the red points are the values predicted by the trained linear model.
Chart

### Loss function *) type Loss = | L1Loss // L1 norm, least absolute deviations | L2Loss // L2 norm | Quadratic // L2 norm squared, least squares | CrossEntropyOnLinear // Cross entropy after linear layer | CrossEntropyOnSoftmax // Cross entropy after softmax layer (** ### Regularization *) type Regularization = | L1Reg of D // L1 regularization | L2Reg of D // L2 regularization | NoReg static member DefaultL1Reg = L1Reg (D 0.0001f) static member DefaultL2Reg = L2Reg (D 0.0001f) (** ### Batch *) type Batch = | Full | Minibatch of int // Minibatch of given size | Stochastic // Minibatch with size 1, SGD (** ### Validation and early stopping *) type EarlyStopping = | Early of int * int // Stagnation patience, overfitting patience | NoEarly static member DefaultEarly = Early (750, 10) (** Training proceeds by minimizing the loss function by adjusting model parameters. Continuing this optimization for longer than necessary causes overfitting, where the model strives to precisely approximate the training data. Overfitting reduces the model's generalization ability and it's performance with new data in the field. To prevent overfitting, data is divided into training and validation sets, and while the model is being optimized by computing the loss function using the training data, the model's performance with the validation data is also monitored. Generally, at the initial stages of training the loss for both the training and validation data will decrease. Eventually, the validation loss will asymptotically approach a minimum, and beyond a certain stage, it will start to increase even when the training loss keeps decreasing. This signifies a good time to stop the training, for preventing overfitting the model to the training data. Hype does this via the **EarlyStopping** parameter, where you can specify a stagnation "patience" for the number of acceptable iterations for non-decreasing training loss and an overfitting patience for the number of acceptable iterations where the training loss decreases without an accompanying decrease in the validation loss. Let's divide the housing dataset into training and validation sets and train the model using early stopping. *) let housingtrain = housing.[..399] // The first 400 data points let housingvalid = housing.[400..] // The remaining 106 data points (**
val housingtrain : Dataset = Hype.Dataset
   X: 14 x 400
   Y: 1 x 400
val housingvalid : Dataset = Hype.Dataset
   X: 14 x 106
   Y: 1 x 106
*) let wopt, lopt, whist, lhist = Optimize.Train(model, Rnd.UniformDV(14), housingtrain, housingvalid, {Params.Default with Epochs = 1000; EarlyStopping = Hype.EarlyStopping.Early(750, 10)}) (**
[12/11/2015 15:09:15] --- Training started
[12/11/2015 15:09:15] Parameters     : 14
[12/11/2015 15:09:15] Iterations     : 1000
[12/11/2015 15:09:15] Epochs         : 1000
[12/11/2015 15:09:15] Batches        : Full (1 per epoch)
[12/11/2015 15:09:15] Training data  : 400
[12/11/2015 15:09:15] Validation data: 106
[12/11/2015 15:09:15] Valid. interval: 10
[12/11/2015 15:09:15] Method         : Gradient descent
[12/11/2015 15:09:15] Learning rate  : RMSProp a0 = D 0.00100000005f, k = D 0.899999976f
[12/11/2015 15:09:15] Momentum       : None
[12/11/2015 15:09:15] Loss           : L2 norm
[12/11/2015 15:09:15] Regularizer    : L2 lambda = D 9.99999975e-05f
[12/11/2015 15:09:15] Gradient clip. : None
[12/11/2015 15:09:15] Early stopping : Stagnation thresh. = 750, overfit. thresh. = 10
[12/11/2015 15:09:15] Improv. thresh.: D 0.995000005f
[12/11/2015 15:09:15] Return best    : true
[12/11/2015 15:09:15]    1/1000 | Batch 1/1 | D  3.221269e+002 [- ] | Valid D  3.322605e+002 [- ] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:15]    2/1000 | Batch 1/1 | D  3.193867e+002 [↓▼] | Valid D  3.288632e+002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:15]    3/1000 | Batch 1/1 | D  3.173987e+002 [↓▼] | Valid D  3.263986e+002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:15]    4/1000 | Batch 1/1 | D  3.157341e+002 [↓▼] | Valid D  3.243348e+002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:15]    5/1000 | Batch 1/1 | D  3.142565e+002 [↓ ] | Valid D  3.225029e+002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:15]    6/1000 | Batch 1/1 | D  3.129025e+002 [↓▼] | Valid D  3.208241e+002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:15]    7/1000 | Batch 1/1 | D  3.116365e+002 [↓ ] | Valid D  3.192545e+002 [↓ ] | Stag: 10 Ovfit: 0
[12/11/2015 15:09:15]    8/1000 | Batch 1/1 | D  3.104370e+002 [↓▼] | Valid D  3.177671e+002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:15]    9/1000 | Batch 1/1 | D  3.092885e+002 [↓ ] | Valid D  3.163436e+002 [↓ ] | Stag: 10 Ovfit: 0
[12/11/2015 15:09:15]   10/1000 | Batch 1/1 | D  3.081814e+002 [↓▼] | Valid D  3.149709e+002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:15]   11/1000 | Batch 1/1 | D  3.071076e+002 [↓ ] | Valid D  3.136398e+002 [↓ ] | Stag: 10 Ovfit: 0
[12/11/2015 15:09:15]   12/1000 | Batch 1/1 | D  3.060618e+002 [↓▼] | Valid D  3.123428e+002 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:15]   13/1000 | Batch 1/1 | D  3.050388e+002 [↓ ] | Valid D  3.110746e+002 [↓ ] | Stag: 10 Ovfit: 0
...
[12/11/2015 15:09:21]  318/1000 | Batch 1/1 | D  4.250416e+001 [↓▼] | Valid D  3.382476e+001 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:21]  319/1000 | Batch 1/1 | D  4.178834e+001 [↓▼] | Valid D  3.371201e+001 [↓ ] | Stag: 10 Ovfit: 0
[12/11/2015 15:09:21]  320/1000 | Batch 1/1 | D  4.109373e+001 [↓▼] | Valid D  3.361367e+001 [↓▼] | Stag:  0 Ovfit: 0
[12/11/2015 15:09:21]  321/1000 | Batch 1/1 | D  4.040976e+001 [↓▼] | Valid D  3.362166e+001 [↑ ] | Stag: 10 Ovfit: 0
[12/11/2015 15:09:21]  322/1000 | Batch 1/1 | D  3.973472e+001 [↓▼] | Valid D  3.368684e+001 [↑ ] | Stag: 20 Ovfit: 1
[12/11/2015 15:09:21]  323/1000 | Batch 1/1 | D  3.907929e+001 [↓▼] | Valid D  3.382304e+001 [↑ ] | Stag: 30 Ovfit: 2
[12/11/2015 15:09:21]  324/1000 | Batch 1/1 | D  3.845267e+001 [↓▼] | Valid D  3.398524e+001 [↑ ] | Stag: 40 Ovfit: 3
[12/11/2015 15:09:21]  325/1000 | Batch 1/1 | D  3.783842e+001 [↓▼] | Valid D  3.418199e+001 [↑ ] | Stag: 50 Ovfit: 4
[12/11/2015 15:09:21]  326/1000 | Batch 1/1 | D  3.721857e+001 [↓▼] | Valid D  3.450164e+001 [↑ ] | Stag: 60 Ovfit: 5
[12/11/2015 15:09:21]  327/1000 | Batch 1/1 | D  3.659464e+001 [↓▼] | Valid D  3.499456e+001 [↑ ] | Stag: 70 Ovfit: 6
[12/11/2015 15:09:21]  328/1000 | Batch 1/1 | D  3.598552e+001 [↓▼] | Valid D  3.556280e+001 [↑ ] | Stag: 80 Ovfit: 7
[12/11/2015 15:09:21]  329/1000 | Batch 1/1 | D  3.538885e+001 [↓▼] | Valid D  3.616002e+001 [↑ ] | Stag: 90 Ovfit: 8
[12/11/2015 15:09:21]  330/1000 | Batch 1/1 | D  3.481464e+001 [↓▼] | Valid D  3.678414e+001 [↑ ] | Stag:100 Ovfit: 9
[12/11/2015 15:09:21] *** EARLY STOPPING TRIGGERED: Overfitting ***
[12/11/2015 15:09:21]  331/1000 | Batch 1/1 | D  3.426452e+001 [↓▼] | Valid D  3.741238e+001 [↑ ] | Stag:110 Ovfit:10
[12/11/2015 15:09:21] Duration       : 00:00:05.9617220
[12/11/2015 15:09:21] Loss initial   : D  3.221269e+002
[12/11/2015 15:09:21] Loss final     : D  3.373809e+001 (Best)
[12/11/2015 15:09:21] Loss change    : D -2.883888e+002 (-89.53 %)
[12/11/2015 15:09:21] Loss chg. / s  : D -4.837340e+001
[12/11/2015 15:09:21] Epochs / s     : 55.52087132
[12/11/2015 15:09:21] Epochs / min   : 3331.252279
[12/11/2015 15:09:21] --- Training finished
*) ================================================ FILE: docs/input/download.fsx ================================================ (** Download ======== Hype is tested on Linux and Windows. You can download the source code or the binaries of the [latest release on GitHub](https://github.com/hypelib/Hype/releases). You can also install the library as a package through [NuGet](https://www.nuget.org/packages/Hype), by running
Install-Package Hype
in the package manager console. Hype only supports the 64 bit platform, so please make sure that you set "x64" as the platform target. For detailed instructions, please see the installation instructions of [DiffSharp](http://diffsharp.github.io/DiffSharp/), on which Hype depends. *) ================================================ FILE: docs/input/files/misc/style.css ================================================ @import url(https://fonts.googleapis.com/css?family=Droid+Sans|Droid+Sans+Mono|Open+Sans:400,600,700); /*-------------------------------------------------------------------------- Formatting for F# code snippets /*--------------------------------------------------------------------------*/ /* strings --- and stlyes for other string related formats */ span.s { color:#E0E268; } /* printf formatters */ span.pf { color:#E0C57F; } /* escaped chars */ span.e { color:#EA8675; } /* identifiers --- and styles for more specific identifier types */ span.i { color:#d1d1d1; } /* type or module */ span.t { color:#43AEC6; } /* function */ span.f { color:#e1e1e1; } /* DU case or active pattern */ span.p { color:#4ec9b0; } /* keywords */ span.k { color:#FAB11D; } /* comment */ span.c { color:#808080; } /* operators */ span.o { color:#af75c1; } /* numbers */ span.n { color:#96C71D; } /* line number */ span.l { color:#80b0b0; } /* mutable var or ref cell */ span.v { color:#d1d1d1; font-weight: bold; } /* inactive code */ span.inactive { color:#808080; } /* preprocessor */ span.prep { color:#af75c1; } /* fsi output */ span.fsi { color:#808080; } /* omitted */ span.omitted { background:#3c4e52; border-radius:5px; color:#808080; padding:0px 0px 1px 0px; } /* tool tip */ div.tip { background:#475b5f; border-radius:4px; font:11pt 'Droid Sans', arial, sans-serif; padding:6px 8px 6px 8px; display:none; color:#d1d1d1; } table.pre pre { padding:0px; margin:0px; border:none; } table.pre, pre.fssnip, pre { line-height:13pt; border:1px solid #191919; border-collapse:separate; white-space:pre; font: 9pt 'Droid Sans Mono',consolas,monospace; width:90%; margin:10px 20px 20px 20px; background-color:#212d30; padding:10px; border-radius:5px; color:#d1d1d1; } table.pre pre { padding:0px; margin:0px; border-radius:0px; width: 100%; } table.pre td { padding:0px; white-space:normal; margin:0px; } table.pre td.lines { width:30px; } /*-------------------------------------------------------------------------- Formatting for page & standard document content /*--------------------------------------------------------------------------*/ body { font-family: 'Open Sans', 'Segoe UI', serif; padding-top: 0px; padding-bottom: 40px; color: #dbdbdb; background-color: #191919; } pre { word-wrap: inherit; } .nav-pills > li > a { background-color: #191919; font-size: 11pt; height: 32px; padding: 7px 15px; } .nav-pills > li > a:hover { color : #000000; background-color: #3781c3; font-size: 11pt; } .alert-info { color: #ebebeb; background-color: #1f1f1f; border-color: #191919; } .hype { color: #fff; } .nav-label { font-size: 11pt; height: 34px; padding: 7px 15px; margin: 20px 0px 0px 0px; color: #dbdbdb; } a:link { color: #3781c3; } a:visited { color: #3275b0; } a:hover { color: #7fb0db; } a:active { color: #35ad67; } h1 { margin-bottom: 30px; } /* Make table headings and td.title bold */ td.title, thead { font-weight:bold; } /*-------------------------------------------------------------------------- Formatting for API reference /*--------------------------------------------------------------------------*/ .type-list .type-name, .module-list .module-name { width:25%; font-weight:bold; } .member-list .member-name { width:35%; } #main .xmldoc h2 { font-size:14pt; margin:10px 0px 0px 0px; } #main .xmldoc h3 { font-size:12pt; margin:10px 0px 0px 0px; } .github-link { float:right; text-decoration:none; } .github-link img { border-style:none; margin-left:10px; } .github-link .hover { display:none; } .github-link:hover .hover { display:block; } .github-link .normal { display: block; } .github-link:hover .normal { display: none; } code { color: white; background-color: #2f2f2f; font-size: 100%; } /*-------------------------------------------------------------------------- Links /*--------------------------------------------------------------------------*/ h1 a, h1 a:hover, h1 a:focus, h2 a, h2 a:hover, h2 a:focus, h3 a, h3 a:hover, h3 a:focus, h4 a, h4 a:hover, h4 a:focus, h5 a, h5 a:hover, h5 a:focus, h6 a, h6 a:hover, h6 a:focus { color : inherit; text-decoration : inherit; outline:none } /*-------------------------------------------------------------------------- Additional formatting for the homepage /*--------------------------------------------------------------------------*/ #nuget { margin-top:20px; font-size: 11pt; padding:20px; background: #e0e0e0; color: #3c3c3c; } #nuget pre { font-size:11pt; -moz-border-radius: 0px; -webkit-border-radius: 0px; border-radius: 0px; background: #3c3c3c; border-style:none; color: #e0e0e0; margin-top:15px; } ================================================ FILE: docs/input/files/misc/style_light.css ================================================ @import url(https://fonts.googleapis.com/css?family=Droid+Sans|Droid+Sans+Mono|Open+Sans:400,600,700); /*-------------------------------------------------------------------------- Formatting for F# code snippets /*--------------------------------------------------------------------------*/ /* identifier */ span.i { color:#000000; } /* string */ span.s { color:#a31515; } /* keywords */ span.k { color:#0000ff; } /* comment */ span.c { color:#008000; } /* operators */ span.o { color:#000000; } /* numbers */ span.n { color:#000000; } /* line number */ span.l { color:#96c2cd; } /* type or module */ span.t { color:#2b91af; } /* function */ span.f { color:#0000a0; } /* DU case or active pattern */ span.p { color:#800080; } /* mutable var or ref cell */ span.v { color:#000000; font-weight: bold; } /* printf formatters */ span.pf { color:#2b91af; } /* escaped chars */ span.e { color:#ff0080; } /* mutable var or ref cell */ /* inactive code */ span.inactive { color:#808080; } /* preprocessor */ span.prep { color:#0000ff; } /* fsi output */ span.fsi { color:#808080; } /* omitted */ span.omitted { background:#3c4e52; border-radius:5px; color:#808080; padding:0px 0px 1px 0px; } /* tool tip */ div.tip { background:#e5e5e5; border-radius:4px; font:9pt 'Droid Sans', arial, sans-serif; padding:6px 8px 6px 8px; display:none; color:#000000; } table.pre pre { padding:0px; margin:0px; border:none; } table.pre, pre.fssnip, pre { line-height:13pt; border:1px solid #d8d8d8; border-collapse:separate; white-space:pre; font: 10pt consolas,monospace; width:90%; margin:10px 20px 20px 20px; background-color:#fdfdfd; padding:10px; border-radius:5px; color:#000000; } table.pre pre { padding:0px; margin:0px; border-radius:0px; width: 100%; } table.pre td { padding:0px; white-space:normal; margin:0px; } table.pre td.lines { width:30px; } /*-------------------------------------------------------------------------- Formatting for page & standard document content /*--------------------------------------------------------------------------*/ body { font-family: 'Open Sans', serif; padding-top: 0px; padding-bottom: 40px; } pre { word-wrap: inherit; } /* Format the heading - nicer spacing etc. */ .masthead { overflow: hidden; } .masthead .muted a { text-decoration:none; color:#999999; } .masthead ul, .masthead li { margin-bottom:0px; } .masthead .nav li { margin-top: 15px; font-size:110%; } .masthead h3 { margin-bottom:5px; font-size:170%; } hr { margin:0px 0px 20px 0px; } /* Make table headings and td.title bold */ td.title, thead { font-weight:bold; } /* Format the right-side menu */ #menu { margin-top:50px; font-size:11pt; padding-left:20px; } #menu .nav-header { font-size:12pt; color:#606060; margin-top:20px; } #menu li { line-height:25px; } /* Change font sizes for headings etc. */ #main h1 { font-size: 26pt; margin:10px 0px 15px 0px; font-weight:400; } #main h2 { font-size: 20pt; margin:20px 0px 0px 0px; font-weight:400; } #main h3 { font-size: 14pt; margin:15px 0px 0px 0px; font-weight:600; } #main p { font-size: 11pt; margin:5px 0px 15px 0px; } #main ul { font-size: 11pt; margin-top:10px; } #main li { font-size: 11pt; margin: 5px 0px 5px 0px; } #main strong { font-weight:700; } /*-------------------------------------------------------------------------- Formatting for API reference /*--------------------------------------------------------------------------*/ .type-list .type-name, .module-list .module-name { width:25%; font-weight:bold; } .member-list .member-name { width:35%; } #main .xmldoc h2 { font-size:14pt; margin:10px 0px 0px 0px; } #main .xmldoc h3 { font-size:12pt; margin:10px 0px 0px 0px; } .github-link { float:right; text-decoration:none; } .github-link img { border-style:none; margin-left:10px; } .github-link .hover { display:none; } .github-link:hover .hover { display:block; } .github-link .normal { display: block; } .github-link:hover .normal { display: none; } /*-------------------------------------------------------------------------- Links /*--------------------------------------------------------------------------*/ h1 a, h1 a:hover, h1 a:focus, h2 a, h2 a:hover, h2 a:focus, h3 a, h3 a:hover, h3 a:focus, h4 a, h4 a:hover, h4 a:focus, h5 a, h5 a:hover, h5 a:focus, h6 a, h6 a:hover, h6 a:focus { color : inherit; text-decoration : inherit; outline:none } /*-------------------------------------------------------------------------- Additional formatting for the homepage /*--------------------------------------------------------------------------*/ #nuget { margin-top:20px; font-size: 11pt; padding:20px; } #nuget pre { font-size:11pt; -moz-border-radius: 0px; -webkit-border-radius: 0px; border-radius: 0px; background: #404040; border-style:none; color: #e0e0e0; margin-top:15px; } ================================================ FILE: docs/input/files/misc/tips.js ================================================ var currentTip = null; var currentTipElement = null; function hideTip(evt, name, unique) { var el = document.getElementById(name); el.style.display = "none"; currentTip = null; } function findPos(obj) { // no idea why, but it behaves differently in webbrowser component if (window.location.search == "?inapp") return [obj.offsetLeft + 10, obj.offsetTop + 30]; var curleft = 0; var curtop = obj.offsetHeight; while (obj) { curleft += obj.offsetLeft; curtop += obj.offsetTop; obj = obj.offsetParent; }; return [curleft, curtop]; } function hideUsingEsc(e) { if (!e) { e = event; } hideTip(e, currentTipElement, currentTip); } function showTip(evt, name, unique, owner) { document.onkeydown = hideUsingEsc; if (currentTip == unique) return; currentTip = unique; currentTipElement = name; var pos = findPos(owner ? owner : (evt.srcElement ? evt.srcElement : evt.target)); var posx = pos[0]; var posy = pos[1]; var el = document.getElementById(name); var parent = (document.documentElement == null) ? document.body : document.documentElement; el.style.position = "absolute"; el.style.left = posx + "px"; el.style.top = posy + "px"; el.style.display = "block"; } ================================================ FILE: docs/input/housing.data ================================================ 0.00632 18.00 2.310 0 0.5380 6.5750 65.20 4.0900 1 296.0 15.30 396.90 4.98 24.00 0.02731 0.00 7.070 0 0.4690 6.4210 78.90 4.9671 2 242.0 17.80 396.90 9.14 21.60 0.02729 0.00 7.070 0 0.4690 7.1850 61.10 4.9671 2 242.0 17.80 392.83 4.03 34.70 0.03237 0.00 2.180 0 0.4580 6.9980 45.80 6.0622 3 222.0 18.70 394.63 2.94 33.40 0.06905 0.00 2.180 0 0.4580 7.1470 54.20 6.0622 3 222.0 18.70 396.90 5.33 36.20 0.02985 0.00 2.180 0 0.4580 6.4300 58.70 6.0622 3 222.0 18.70 394.12 5.21 28.70 0.08829 12.50 7.870 0 0.5240 6.0120 66.60 5.5605 5 311.0 15.20 395.60 12.43 22.90 0.14455 12.50 7.870 0 0.5240 6.1720 96.10 5.9505 5 311.0 15.20 396.90 19.15 27.10 0.21124 12.50 7.870 0 0.5240 5.6310 100.00 6.0821 5 311.0 15.20 386.63 29.93 16.50 0.17004 12.50 7.870 0 0.5240 6.0040 85.90 6.5921 5 311.0 15.20 386.71 17.10 18.90 0.22489 12.50 7.870 0 0.5240 6.3770 94.30 6.3467 5 311.0 15.20 392.52 20.45 15.00 0.11747 12.50 7.870 0 0.5240 6.0090 82.90 6.2267 5 311.0 15.20 396.90 13.27 18.90 0.09378 12.50 7.870 0 0.5240 5.8890 39.00 5.4509 5 311.0 15.20 390.50 15.71 21.70 0.62976 0.00 8.140 0 0.5380 5.9490 61.80 4.7075 4 307.0 21.00 396.90 8.26 20.40 0.63796 0.00 8.140 0 0.5380 6.0960 84.50 4.4619 4 307.0 21.00 380.02 10.26 18.20 0.62739 0.00 8.140 0 0.5380 5.8340 56.50 4.4986 4 307.0 21.00 395.62 8.47 19.90 1.05393 0.00 8.140 0 0.5380 5.9350 29.30 4.4986 4 307.0 21.00 386.85 6.58 23.10 0.78420 0.00 8.140 0 0.5380 5.9900 81.70 4.2579 4 307.0 21.00 386.75 14.67 17.50 0.80271 0.00 8.140 0 0.5380 5.4560 36.60 3.7965 4 307.0 21.00 288.99 11.69 20.20 0.72580 0.00 8.140 0 0.5380 5.7270 69.50 3.7965 4 307.0 21.00 390.95 11.28 18.20 1.25179 0.00 8.140 0 0.5380 5.5700 98.10 3.7979 4 307.0 21.00 376.57 21.02 13.60 0.85204 0.00 8.140 0 0.5380 5.9650 89.20 4.0123 4 307.0 21.00 392.53 13.83 19.60 1.23247 0.00 8.140 0 0.5380 6.1420 91.70 3.9769 4 307.0 21.00 396.90 18.72 15.20 0.98843 0.00 8.140 0 0.5380 5.8130 100.00 4.0952 4 307.0 21.00 394.54 19.88 14.50 0.75026 0.00 8.140 0 0.5380 5.9240 94.10 4.3996 4 307.0 21.00 394.33 16.30 15.60 0.84054 0.00 8.140 0 0.5380 5.5990 85.70 4.4546 4 307.0 21.00 303.42 16.51 13.90 0.67191 0.00 8.140 0 0.5380 5.8130 90.30 4.6820 4 307.0 21.00 376.88 14.81 16.60 0.95577 0.00 8.140 0 0.5380 6.0470 88.80 4.4534 4 307.0 21.00 306.38 17.28 14.80 0.77299 0.00 8.140 0 0.5380 6.4950 94.40 4.4547 4 307.0 21.00 387.94 12.80 18.40 1.00245 0.00 8.140 0 0.5380 6.6740 87.30 4.2390 4 307.0 21.00 380.23 11.98 21.00 1.13081 0.00 8.140 0 0.5380 5.7130 94.10 4.2330 4 307.0 21.00 360.17 22.60 12.70 1.35472 0.00 8.140 0 0.5380 6.0720 100.00 4.1750 4 307.0 21.00 376.73 13.04 14.50 1.38799 0.00 8.140 0 0.5380 5.9500 82.00 3.9900 4 307.0 21.00 232.60 27.71 13.20 1.15172 0.00 8.140 0 0.5380 5.7010 95.00 3.7872 4 307.0 21.00 358.77 18.35 13.10 1.61282 0.00 8.140 0 0.5380 6.0960 96.90 3.7598 4 307.0 21.00 248.31 20.34 13.50 0.06417 0.00 5.960 0 0.4990 5.9330 68.20 3.3603 5 279.0 19.20 396.90 9.68 18.90 0.09744 0.00 5.960 0 0.4990 5.8410 61.40 3.3779 5 279.0 19.20 377.56 11.41 20.00 0.08014 0.00 5.960 0 0.4990 5.8500 41.50 3.9342 5 279.0 19.20 396.90 8.77 21.00 0.17505 0.00 5.960 0 0.4990 5.9660 30.20 3.8473 5 279.0 19.20 393.43 10.13 24.70 0.02763 75.00 2.950 0 0.4280 6.5950 21.80 5.4011 3 252.0 18.30 395.63 4.32 30.80 0.03359 75.00 2.950 0 0.4280 7.0240 15.80 5.4011 3 252.0 18.30 395.62 1.98 34.90 0.12744 0.00 6.910 0 0.4480 6.7700 2.90 5.7209 3 233.0 17.90 385.41 4.84 26.60 0.14150 0.00 6.910 0 0.4480 6.1690 6.60 5.7209 3 233.0 17.90 383.37 5.81 25.30 0.15936 0.00 6.910 0 0.4480 6.2110 6.50 5.7209 3 233.0 17.90 394.46 7.44 24.70 0.12269 0.00 6.910 0 0.4480 6.0690 40.00 5.7209 3 233.0 17.90 389.39 9.55 21.20 0.17142 0.00 6.910 0 0.4480 5.6820 33.80 5.1004 3 233.0 17.90 396.90 10.21 19.30 0.18836 0.00 6.910 0 0.4480 5.7860 33.30 5.1004 3 233.0 17.90 396.90 14.15 20.00 0.22927 0.00 6.910 0 0.4480 6.0300 85.50 5.6894 3 233.0 17.90 392.74 18.80 16.60 0.25387 0.00 6.910 0 0.4480 5.3990 95.30 5.8700 3 233.0 17.90 396.90 30.81 14.40 0.21977 0.00 6.910 0 0.4480 5.6020 62.00 6.0877 3 233.0 17.90 396.90 16.20 19.40 0.08873 21.00 5.640 0 0.4390 5.9630 45.70 6.8147 4 243.0 16.80 395.56 13.45 19.70 0.04337 21.00 5.640 0 0.4390 6.1150 63.00 6.8147 4 243.0 16.80 393.97 9.43 20.50 0.05360 21.00 5.640 0 0.4390 6.5110 21.10 6.8147 4 243.0 16.80 396.90 5.28 25.00 0.04981 21.00 5.640 0 0.4390 5.9980 21.40 6.8147 4 243.0 16.80 396.90 8.43 23.40 0.01360 75.00 4.000 0 0.4100 5.8880 47.60 7.3197 3 469.0 21.10 396.90 14.80 18.90 0.01311 90.00 1.220 0 0.4030 7.2490 21.90 8.6966 5 226.0 17.90 395.93 4.81 35.40 0.02055 85.00 0.740 0 0.4100 6.3830 35.70 9.1876 2 313.0 17.30 396.90 5.77 24.70 0.01432 100.00 1.320 0 0.4110 6.8160 40.50 8.3248 5 256.0 15.10 392.90 3.95 31.60 0.15445 25.00 5.130 0 0.4530 6.1450 29.20 7.8148 8 284.0 19.70 390.68 6.86 23.30 0.10328 25.00 5.130 0 0.4530 5.9270 47.20 6.9320 8 284.0 19.70 396.90 9.22 19.60 0.14932 25.00 5.130 0 0.4530 5.7410 66.20 7.2254 8 284.0 19.70 395.11 13.15 18.70 0.17171 25.00 5.130 0 0.4530 5.9660 93.40 6.8185 8 284.0 19.70 378.08 14.44 16.00 0.11027 25.00 5.130 0 0.4530 6.4560 67.80 7.2255 8 284.0 19.70 396.90 6.73 22.20 0.12650 25.00 5.130 0 0.4530 6.7620 43.40 7.9809 8 284.0 19.70 395.58 9.50 25.00 0.01951 17.50 1.380 0 0.4161 7.1040 59.50 9.2229 3 216.0 18.60 393.24 8.05 33.00 0.03584 80.00 3.370 0 0.3980 6.2900 17.80 6.6115 4 337.0 16.10 396.90 4.67 23.50 0.04379 80.00 3.370 0 0.3980 5.7870 31.10 6.6115 4 337.0 16.10 396.90 10.24 19.40 0.05789 12.50 6.070 0 0.4090 5.8780 21.40 6.4980 4 345.0 18.90 396.21 8.10 22.00 0.13554 12.50 6.070 0 0.4090 5.5940 36.80 6.4980 4 345.0 18.90 396.90 13.09 17.40 0.12816 12.50 6.070 0 0.4090 5.8850 33.00 6.4980 4 345.0 18.90 396.90 8.79 20.90 0.08826 0.00 10.810 0 0.4130 6.4170 6.60 5.2873 4 305.0 19.20 383.73 6.72 24.20 0.15876 0.00 10.810 0 0.4130 5.9610 17.50 5.2873 4 305.0 19.20 376.94 9.88 21.70 0.09164 0.00 10.810 0 0.4130 6.0650 7.80 5.2873 4 305.0 19.20 390.91 5.52 22.80 0.19539 0.00 10.810 0 0.4130 6.2450 6.20 5.2873 4 305.0 19.20 377.17 7.54 23.40 0.07896 0.00 12.830 0 0.4370 6.2730 6.00 4.2515 5 398.0 18.70 394.92 6.78 24.10 0.09512 0.00 12.830 0 0.4370 6.2860 45.00 4.5026 5 398.0 18.70 383.23 8.94 21.40 0.10153 0.00 12.830 0 0.4370 6.2790 74.50 4.0522 5 398.0 18.70 373.66 11.97 20.00 0.08707 0.00 12.830 0 0.4370 6.1400 45.80 4.0905 5 398.0 18.70 386.96 10.27 20.80 0.05646 0.00 12.830 0 0.4370 6.2320 53.70 5.0141 5 398.0 18.70 386.40 12.34 21.20 0.08387 0.00 12.830 0 0.4370 5.8740 36.60 4.5026 5 398.0 18.70 396.06 9.10 20.30 0.04113 25.00 4.860 0 0.4260 6.7270 33.50 5.4007 4 281.0 19.00 396.90 5.29 28.00 0.04462 25.00 4.860 0 0.4260 6.6190 70.40 5.4007 4 281.0 19.00 395.63 7.22 23.90 0.03659 25.00 4.860 0 0.4260 6.3020 32.20 5.4007 4 281.0 19.00 396.90 6.72 24.80 0.03551 25.00 4.860 0 0.4260 6.1670 46.70 5.4007 4 281.0 19.00 390.64 7.51 22.90 0.05059 0.00 4.490 0 0.4490 6.3890 48.00 4.7794 3 247.0 18.50 396.90 9.62 23.90 0.05735 0.00 4.490 0 0.4490 6.6300 56.10 4.4377 3 247.0 18.50 392.30 6.53 26.60 0.05188 0.00 4.490 0 0.4490 6.0150 45.10 4.4272 3 247.0 18.50 395.99 12.86 22.50 0.07151 0.00 4.490 0 0.4490 6.1210 56.80 3.7476 3 247.0 18.50 395.15 8.44 22.20 0.05660 0.00 3.410 0 0.4890 7.0070 86.30 3.4217 2 270.0 17.80 396.90 5.50 23.60 0.05302 0.00 3.410 0 0.4890 7.0790 63.10 3.4145 2 270.0 17.80 396.06 5.70 28.70 0.04684 0.00 3.410 0 0.4890 6.4170 66.10 3.0923 2 270.0 17.80 392.18 8.81 22.60 0.03932 0.00 3.410 0 0.4890 6.4050 73.90 3.0921 2 270.0 17.80 393.55 8.20 22.00 0.04203 28.00 15.040 0 0.4640 6.4420 53.60 3.6659 4 270.0 18.20 395.01 8.16 22.90 0.02875 28.00 15.040 0 0.4640 6.2110 28.90 3.6659 4 270.0 18.20 396.33 6.21 25.00 0.04294 28.00 15.040 0 0.4640 6.2490 77.30 3.6150 4 270.0 18.20 396.90 10.59 20.60 0.12204 0.00 2.890 0 0.4450 6.6250 57.80 3.4952 2 276.0 18.00 357.98 6.65 28.40 0.11504 0.00 2.890 0 0.4450 6.1630 69.60 3.4952 2 276.0 18.00 391.83 11.34 21.40 0.12083 0.00 2.890 0 0.4450 8.0690 76.00 3.4952 2 276.0 18.00 396.90 4.21 38.70 0.08187 0.00 2.890 0 0.4450 7.8200 36.90 3.4952 2 276.0 18.00 393.53 3.57 43.80 0.06860 0.00 2.890 0 0.4450 7.4160 62.50 3.4952 2 276.0 18.00 396.90 6.19 33.20 0.14866 0.00 8.560 0 0.5200 6.7270 79.90 2.7778 5 384.0 20.90 394.76 9.42 27.50 0.11432 0.00 8.560 0 0.5200 6.7810 71.30 2.8561 5 384.0 20.90 395.58 7.67 26.50 0.22876 0.00 8.560 0 0.5200 6.4050 85.40 2.7147 5 384.0 20.90 70.80 10.63 18.60 0.21161 0.00 8.560 0 0.5200 6.1370 87.40 2.7147 5 384.0 20.90 394.47 13.44 19.30 0.13960 0.00 8.560 0 0.5200 6.1670 90.00 2.4210 5 384.0 20.90 392.69 12.33 20.10 0.13262 0.00 8.560 0 0.5200 5.8510 96.70 2.1069 5 384.0 20.90 394.05 16.47 19.50 0.17120 0.00 8.560 0 0.5200 5.8360 91.90 2.2110 5 384.0 20.90 395.67 18.66 19.50 0.13117 0.00 8.560 0 0.5200 6.1270 85.20 2.1224 5 384.0 20.90 387.69 14.09 20.40 0.12802 0.00 8.560 0 0.5200 6.4740 97.10 2.4329 5 384.0 20.90 395.24 12.27 19.80 0.26363 0.00 8.560 0 0.5200 6.2290 91.20 2.5451 5 384.0 20.90 391.23 15.55 19.40 0.10793 0.00 8.560 0 0.5200 6.1950 54.40 2.7778 5 384.0 20.90 393.49 13.00 21.70 0.10084 0.00 10.010 0 0.5470 6.7150 81.60 2.6775 6 432.0 17.80 395.59 10.16 22.80 0.12329 0.00 10.010 0 0.5470 5.9130 92.90 2.3534 6 432.0 17.80 394.95 16.21 18.80 0.22212 0.00 10.010 0 0.5470 6.0920 95.40 2.5480 6 432.0 17.80 396.90 17.09 18.70 0.14231 0.00 10.010 0 0.5470 6.2540 84.20 2.2565 6 432.0 17.80 388.74 10.45 18.50 0.17134 0.00 10.010 0 0.5470 5.9280 88.20 2.4631 6 432.0 17.80 344.91 15.76 18.30 0.13158 0.00 10.010 0 0.5470 6.1760 72.50 2.7301 6 432.0 17.80 393.30 12.04 21.20 0.15098 0.00 10.010 0 0.5470 6.0210 82.60 2.7474 6 432.0 17.80 394.51 10.30 19.20 0.13058 0.00 10.010 0 0.5470 5.8720 73.10 2.4775 6 432.0 17.80 338.63 15.37 20.40 0.14476 0.00 10.010 0 0.5470 5.7310 65.20 2.7592 6 432.0 17.80 391.50 13.61 19.30 0.06899 0.00 25.650 0 0.5810 5.8700 69.70 2.2577 2 188.0 19.10 389.15 14.37 22.00 0.07165 0.00 25.650 0 0.5810 6.0040 84.10 2.1974 2 188.0 19.10 377.67 14.27 20.30 0.09299 0.00 25.650 0 0.5810 5.9610 92.90 2.0869 2 188.0 19.10 378.09 17.93 20.50 0.15038 0.00 25.650 0 0.5810 5.8560 97.00 1.9444 2 188.0 19.10 370.31 25.41 17.30 0.09849 0.00 25.650 0 0.5810 5.8790 95.80 2.0063 2 188.0 19.10 379.38 17.58 18.80 0.16902 0.00 25.650 0 0.5810 5.9860 88.40 1.9929 2 188.0 19.10 385.02 14.81 21.40 0.38735 0.00 25.650 0 0.5810 5.6130 95.60 1.7572 2 188.0 19.10 359.29 27.26 15.70 0.25915 0.00 21.890 0 0.6240 5.6930 96.00 1.7883 4 437.0 21.20 392.11 17.19 16.20 0.32543 0.00 21.890 0 0.6240 6.4310 98.80 1.8125 4 437.0 21.20 396.90 15.39 18.00 0.88125 0.00 21.890 0 0.6240 5.6370 94.70 1.9799 4 437.0 21.20 396.90 18.34 14.30 0.34006 0.00 21.890 0 0.6240 6.4580 98.90 2.1185 4 437.0 21.20 395.04 12.60 19.20 1.19294 0.00 21.890 0 0.6240 6.3260 97.70 2.2710 4 437.0 21.20 396.90 12.26 19.60 0.59005 0.00 21.890 0 0.6240 6.3720 97.90 2.3274 4 437.0 21.20 385.76 11.12 23.00 0.32982 0.00 21.890 0 0.6240 5.8220 95.40 2.4699 4 437.0 21.20 388.69 15.03 18.40 0.97617 0.00 21.890 0 0.6240 5.7570 98.40 2.3460 4 437.0 21.20 262.76 17.31 15.60 0.55778 0.00 21.890 0 0.6240 6.3350 98.20 2.1107 4 437.0 21.20 394.67 16.96 18.10 0.32264 0.00 21.890 0 0.6240 5.9420 93.50 1.9669 4 437.0 21.20 378.25 16.90 17.40 0.35233 0.00 21.890 0 0.6240 6.4540 98.40 1.8498 4 437.0 21.20 394.08 14.59 17.10 0.24980 0.00 21.890 0 0.6240 5.8570 98.20 1.6686 4 437.0 21.20 392.04 21.32 13.30 0.54452 0.00 21.890 0 0.6240 6.1510 97.90 1.6687 4 437.0 21.20 396.90 18.46 17.80 0.29090 0.00 21.890 0 0.6240 6.1740 93.60 1.6119 4 437.0 21.20 388.08 24.16 14.00 1.62864 0.00 21.890 0 0.6240 5.0190 100.00 1.4394 4 437.0 21.20 396.90 34.41 14.40 3.32105 0.00 19.580 1 0.8710 5.4030 100.00 1.3216 5 403.0 14.70 396.90 26.82 13.40 4.09740 0.00 19.580 0 0.8710 5.4680 100.00 1.4118 5 403.0 14.70 396.90 26.42 15.60 2.77974 0.00 19.580 0 0.8710 4.9030 97.80 1.3459 5 403.0 14.70 396.90 29.29 11.80 2.37934 0.00 19.580 0 0.8710 6.1300 100.00 1.4191 5 403.0 14.70 172.91 27.80 13.80 2.15505 0.00 19.580 0 0.8710 5.6280 100.00 1.5166 5 403.0 14.70 169.27 16.65 15.60 2.36862 0.00 19.580 0 0.8710 4.9260 95.70 1.4608 5 403.0 14.70 391.71 29.53 14.60 2.33099 0.00 19.580 0 0.8710 5.1860 93.80 1.5296 5 403.0 14.70 356.99 28.32 17.80 2.73397 0.00 19.580 0 0.8710 5.5970 94.90 1.5257 5 403.0 14.70 351.85 21.45 15.40 1.65660 0.00 19.580 0 0.8710 6.1220 97.30 1.6180 5 403.0 14.70 372.80 14.10 21.50 1.49632 0.00 19.580 0 0.8710 5.4040 100.00 1.5916 5 403.0 14.70 341.60 13.28 19.60 1.12658 0.00 19.580 1 0.8710 5.0120 88.00 1.6102 5 403.0 14.70 343.28 12.12 15.30 2.14918 0.00 19.580 0 0.8710 5.7090 98.50 1.6232 5 403.0 14.70 261.95 15.79 19.40 1.41385 0.00 19.580 1 0.8710 6.1290 96.00 1.7494 5 403.0 14.70 321.02 15.12 17.00 3.53501 0.00 19.580 1 0.8710 6.1520 82.60 1.7455 5 403.0 14.70 88.01 15.02 15.60 2.44668 0.00 19.580 0 0.8710 5.2720 94.00 1.7364 5 403.0 14.70 88.63 16.14 13.10 1.22358 0.00 19.580 0 0.6050 6.9430 97.40 1.8773 5 403.0 14.70 363.43 4.59 41.30 1.34284 0.00 19.580 0 0.6050 6.0660 100.00 1.7573 5 403.0 14.70 353.89 6.43 24.30 1.42502 0.00 19.580 0 0.8710 6.5100 100.00 1.7659 5 403.0 14.70 364.31 7.39 23.30 1.27346 0.00 19.580 1 0.6050 6.2500 92.60 1.7984 5 403.0 14.70 338.92 5.50 27.00 1.46336 0.00 19.580 0 0.6050 7.4890 90.80 1.9709 5 403.0 14.70 374.43 1.73 50.00 1.83377 0.00 19.580 1 0.6050 7.8020 98.20 2.0407 5 403.0 14.70 389.61 1.92 50.00 1.51902 0.00 19.580 1 0.6050 8.3750 93.90 2.1620 5 403.0 14.70 388.45 3.32 50.00 2.24236 0.00 19.580 0 0.6050 5.8540 91.80 2.4220 5 403.0 14.70 395.11 11.64 22.70 2.92400 0.00 19.580 0 0.6050 6.1010 93.00 2.2834 5 403.0 14.70 240.16 9.81 25.00 2.01019 0.00 19.580 0 0.6050 7.9290 96.20 2.0459 5 403.0 14.70 369.30 3.70 50.00 1.80028 0.00 19.580 0 0.6050 5.8770 79.20 2.4259 5 403.0 14.70 227.61 12.14 23.80 2.30040 0.00 19.580 0 0.6050 6.3190 96.10 2.1000 5 403.0 14.70 297.09 11.10 23.80 2.44953 0.00 19.580 0 0.6050 6.4020 95.20 2.2625 5 403.0 14.70 330.04 11.32 22.30 1.20742 0.00 19.580 0 0.6050 5.8750 94.60 2.4259 5 403.0 14.70 292.29 14.43 17.40 2.31390 0.00 19.580 0 0.6050 5.8800 97.30 2.3887 5 403.0 14.70 348.13 12.03 19.10 0.13914 0.00 4.050 0 0.5100 5.5720 88.50 2.5961 5 296.0 16.60 396.90 14.69 23.10 0.09178 0.00 4.050 0 0.5100 6.4160 84.10 2.6463 5 296.0 16.60 395.50 9.04 23.60 0.08447 0.00 4.050 0 0.5100 5.8590 68.70 2.7019 5 296.0 16.60 393.23 9.64 22.60 0.06664 0.00 4.050 0 0.5100 6.5460 33.10 3.1323 5 296.0 16.60 390.96 5.33 29.40 0.07022 0.00 4.050 0 0.5100 6.0200 47.20 3.5549 5 296.0 16.60 393.23 10.11 23.20 0.05425 0.00 4.050 0 0.5100 6.3150 73.40 3.3175 5 296.0 16.60 395.60 6.29 24.60 0.06642 0.00 4.050 0 0.5100 6.8600 74.40 2.9153 5 296.0 16.60 391.27 6.92 29.90 0.05780 0.00 2.460 0 0.4880 6.9800 58.40 2.8290 3 193.0 17.80 396.90 5.04 37.20 0.06588 0.00 2.460 0 0.4880 7.7650 83.30 2.7410 3 193.0 17.80 395.56 7.56 39.80 0.06888 0.00 2.460 0 0.4880 6.1440 62.20 2.5979 3 193.0 17.80 396.90 9.45 36.20 0.09103 0.00 2.460 0 0.4880 7.1550 92.20 2.7006 3 193.0 17.80 394.12 4.82 37.90 0.10008 0.00 2.460 0 0.4880 6.5630 95.60 2.8470 3 193.0 17.80 396.90 5.68 32.50 0.08308 0.00 2.460 0 0.4880 5.6040 89.80 2.9879 3 193.0 17.80 391.00 13.98 26.40 0.06047 0.00 2.460 0 0.4880 6.1530 68.80 3.2797 3 193.0 17.80 387.11 13.15 29.60 0.05602 0.00 2.460 0 0.4880 7.8310 53.60 3.1992 3 193.0 17.80 392.63 4.45 50.00 0.07875 45.00 3.440 0 0.4370 6.7820 41.10 3.7886 5 398.0 15.20 393.87 6.68 32.00 0.12579 45.00 3.440 0 0.4370 6.5560 29.10 4.5667 5 398.0 15.20 382.84 4.56 29.80 0.08370 45.00 3.440 0 0.4370 7.1850 38.90 4.5667 5 398.0 15.20 396.90 5.39 34.90 0.09068 45.00 3.440 0 0.4370 6.9510 21.50 6.4798 5 398.0 15.20 377.68 5.10 37.00 0.06911 45.00 3.440 0 0.4370 6.7390 30.80 6.4798 5 398.0 15.20 389.71 4.69 30.50 0.08664 45.00 3.440 0 0.4370 7.1780 26.30 6.4798 5 398.0 15.20 390.49 2.87 36.40 0.02187 60.00 2.930 0 0.4010 6.8000 9.90 6.2196 1 265.0 15.60 393.37 5.03 31.10 0.01439 60.00 2.930 0 0.4010 6.6040 18.80 6.2196 1 265.0 15.60 376.70 4.38 29.10 0.01381 80.00 0.460 0 0.4220 7.8750 32.00 5.6484 4 255.0 14.40 394.23 2.97 50.00 0.04011 80.00 1.520 0 0.4040 7.2870 34.10 7.3090 2 329.0 12.60 396.90 4.08 33.30 0.04666 80.00 1.520 0 0.4040 7.1070 36.60 7.3090 2 329.0 12.60 354.31 8.61 30.30 0.03768 80.00 1.520 0 0.4040 7.2740 38.30 7.3090 2 329.0 12.60 392.20 6.62 34.60 0.03150 95.00 1.470 0 0.4030 6.9750 15.30 7.6534 3 402.0 17.00 396.90 4.56 34.90 0.01778 95.00 1.470 0 0.4030 7.1350 13.90 7.6534 3 402.0 17.00 384.30 4.45 32.90 0.03445 82.50 2.030 0 0.4150 6.1620 38.40 6.2700 2 348.0 14.70 393.77 7.43 24.10 0.02177 82.50 2.030 0 0.4150 7.6100 15.70 6.2700 2 348.0 14.70 395.38 3.11 42.30 0.03510 95.00 2.680 0 0.4161 7.8530 33.20 5.1180 4 224.0 14.70 392.78 3.81 48.50 0.02009 95.00 2.680 0 0.4161 8.0340 31.90 5.1180 4 224.0 14.70 390.55 2.88 50.00 0.13642 0.00 10.590 0 0.4890 5.8910 22.30 3.9454 4 277.0 18.60 396.90 10.87 22.60 0.22969 0.00 10.590 0 0.4890 6.3260 52.50 4.3549 4 277.0 18.60 394.87 10.97 24.40 0.25199 0.00 10.590 0 0.4890 5.7830 72.70 4.3549 4 277.0 18.60 389.43 18.06 22.50 0.13587 0.00 10.590 1 0.4890 6.0640 59.10 4.2392 4 277.0 18.60 381.32 14.66 24.40 0.43571 0.00 10.590 1 0.4890 5.3440 100.00 3.8750 4 277.0 18.60 396.90 23.09 20.00 0.17446 0.00 10.590 1 0.4890 5.9600 92.10 3.8771 4 277.0 18.60 393.25 17.27 21.70 0.37578 0.00 10.590 1 0.4890 5.4040 88.60 3.6650 4 277.0 18.60 395.24 23.98 19.30 0.21719 0.00 10.590 1 0.4890 5.8070 53.80 3.6526 4 277.0 18.60 390.94 16.03 22.40 0.14052 0.00 10.590 0 0.4890 6.3750 32.30 3.9454 4 277.0 18.60 385.81 9.38 28.10 0.28955 0.00 10.590 0 0.4890 5.4120 9.80 3.5875 4 277.0 18.60 348.93 29.55 23.70 0.19802 0.00 10.590 0 0.4890 6.1820 42.40 3.9454 4 277.0 18.60 393.63 9.47 25.00 0.04560 0.00 13.890 1 0.5500 5.8880 56.00 3.1121 5 276.0 16.40 392.80 13.51 23.30 0.07013 0.00 13.890 0 0.5500 6.6420 85.10 3.4211 5 276.0 16.40 392.78 9.69 28.70 0.11069 0.00 13.890 1 0.5500 5.9510 93.80 2.8893 5 276.0 16.40 396.90 17.92 21.50 0.11425 0.00 13.890 1 0.5500 6.3730 92.40 3.3633 5 276.0 16.40 393.74 10.50 23.00 0.35809 0.00 6.200 1 0.5070 6.9510 88.50 2.8617 8 307.0 17.40 391.70 9.71 26.70 0.40771 0.00 6.200 1 0.5070 6.1640 91.30 3.0480 8 307.0 17.40 395.24 21.46 21.70 0.62356 0.00 6.200 1 0.5070 6.8790 77.70 3.2721 8 307.0 17.40 390.39 9.93 27.50 0.61470 0.00 6.200 0 0.5070 6.6180 80.80 3.2721 8 307.0 17.40 396.90 7.60 30.10 0.31533 0.00 6.200 0 0.5040 8.2660 78.30 2.8944 8 307.0 17.40 385.05 4.14 44.80 0.52693 0.00 6.200 0 0.5040 8.7250 83.00 2.8944 8 307.0 17.40 382.00 4.63 50.00 0.38214 0.00 6.200 0 0.5040 8.0400 86.50 3.2157 8 307.0 17.40 387.38 3.13 37.60 0.41238 0.00 6.200 0 0.5040 7.1630 79.90 3.2157 8 307.0 17.40 372.08 6.36 31.60 0.29819 0.00 6.200 0 0.5040 7.6860 17.00 3.3751 8 307.0 17.40 377.51 3.92 46.70 0.44178 0.00 6.200 0 0.5040 6.5520 21.40 3.3751 8 307.0 17.40 380.34 3.76 31.50 0.53700 0.00 6.200 0 0.5040 5.9810 68.10 3.6715 8 307.0 17.40 378.35 11.65 24.30 0.46296 0.00 6.200 0 0.5040 7.4120 76.90 3.6715 8 307.0 17.40 376.14 5.25 31.70 0.57529 0.00 6.200 0 0.5070 8.3370 73.30 3.8384 8 307.0 17.40 385.91 2.47 41.70 0.33147 0.00 6.200 0 0.5070 8.2470 70.40 3.6519 8 307.0 17.40 378.95 3.95 48.30 0.44791 0.00 6.200 1 0.5070 6.7260 66.50 3.6519 8 307.0 17.40 360.20 8.05 29.00 0.33045 0.00 6.200 0 0.5070 6.0860 61.50 3.6519 8 307.0 17.40 376.75 10.88 24.00 0.52058 0.00 6.200 1 0.5070 6.6310 76.50 4.1480 8 307.0 17.40 388.45 9.54 25.10 0.51183 0.00 6.200 0 0.5070 7.3580 71.60 4.1480 8 307.0 17.40 390.07 4.73 31.50 0.08244 30.00 4.930 0 0.4280 6.4810 18.50 6.1899 6 300.0 16.60 379.41 6.36 23.70 0.09252 30.00 4.930 0 0.4280 6.6060 42.20 6.1899 6 300.0 16.60 383.78 7.37 23.30 0.11329 30.00 4.930 0 0.4280 6.8970 54.30 6.3361 6 300.0 16.60 391.25 11.38 22.00 0.10612 30.00 4.930 0 0.4280 6.0950 65.10 6.3361 6 300.0 16.60 394.62 12.40 20.10 0.10290 30.00 4.930 0 0.4280 6.3580 52.90 7.0355 6 300.0 16.60 372.75 11.22 22.20 0.12757 30.00 4.930 0 0.4280 6.3930 7.80 7.0355 6 300.0 16.60 374.71 5.19 23.70 0.20608 22.00 5.860 0 0.4310 5.5930 76.50 7.9549 7 330.0 19.10 372.49 12.50 17.60 0.19133 22.00 5.860 0 0.4310 5.6050 70.20 7.9549 7 330.0 19.10 389.13 18.46 18.50 0.33983 22.00 5.860 0 0.4310 6.1080 34.90 8.0555 7 330.0 19.10 390.18 9.16 24.30 0.19657 22.00 5.860 0 0.4310 6.2260 79.20 8.0555 7 330.0 19.10 376.14 10.15 20.50 0.16439 22.00 5.860 0 0.4310 6.4330 49.10 7.8265 7 330.0 19.10 374.71 9.52 24.50 0.19073 22.00 5.860 0 0.4310 6.7180 17.50 7.8265 7 330.0 19.10 393.74 6.56 26.20 0.14030 22.00 5.860 0 0.4310 6.4870 13.00 7.3967 7 330.0 19.10 396.28 5.90 24.40 0.21409 22.00 5.860 0 0.4310 6.4380 8.90 7.3967 7 330.0 19.10 377.07 3.59 24.80 0.08221 22.00 5.860 0 0.4310 6.9570 6.80 8.9067 7 330.0 19.10 386.09 3.53 29.60 0.36894 22.00 5.860 0 0.4310 8.2590 8.40 8.9067 7 330.0 19.10 396.90 3.54 42.80 0.04819 80.00 3.640 0 0.3920 6.1080 32.00 9.2203 1 315.0 16.40 392.89 6.57 21.90 0.03548 80.00 3.640 0 0.3920 5.8760 19.10 9.2203 1 315.0 16.40 395.18 9.25 20.90 0.01538 90.00 3.750 0 0.3940 7.4540 34.20 6.3361 3 244.0 15.90 386.34 3.11 44.00 0.61154 20.00 3.970 0 0.6470 8.7040 86.90 1.8010 5 264.0 13.00 389.70 5.12 50.00 0.66351 20.00 3.970 0 0.6470 7.3330 100.00 1.8946 5 264.0 13.00 383.29 7.79 36.00 0.65665 20.00 3.970 0 0.6470 6.8420 100.00 2.0107 5 264.0 13.00 391.93 6.90 30.10 0.54011 20.00 3.970 0 0.6470 7.2030 81.80 2.1121 5 264.0 13.00 392.80 9.59 33.80 0.53412 20.00 3.970 0 0.6470 7.5200 89.40 2.1398 5 264.0 13.00 388.37 7.26 43.10 0.52014 20.00 3.970 0 0.6470 8.3980 91.50 2.2885 5 264.0 13.00 386.86 5.91 48.80 0.82526 20.00 3.970 0 0.6470 7.3270 94.50 2.0788 5 264.0 13.00 393.42 11.25 31.00 0.55007 20.00 3.970 0 0.6470 7.2060 91.60 1.9301 5 264.0 13.00 387.89 8.10 36.50 0.76162 20.00 3.970 0 0.6470 5.5600 62.80 1.9865 5 264.0 13.00 392.40 10.45 22.80 0.78570 20.00 3.970 0 0.6470 7.0140 84.60 2.1329 5 264.0 13.00 384.07 14.79 30.70 0.57834 20.00 3.970 0 0.5750 8.2970 67.00 2.4216 5 264.0 13.00 384.54 7.44 50.00 0.54050 20.00 3.970 0 0.5750 7.4700 52.60 2.8720 5 264.0 13.00 390.30 3.16 43.50 0.09065 20.00 6.960 1 0.4640 5.9200 61.50 3.9175 3 223.0 18.60 391.34 13.65 20.70 0.29916 20.00 6.960 0 0.4640 5.8560 42.10 4.4290 3 223.0 18.60 388.65 13.00 21.10 0.16211 20.00 6.960 0 0.4640 6.2400 16.30 4.4290 3 223.0 18.60 396.90 6.59 25.20 0.11460 20.00 6.960 0 0.4640 6.5380 58.70 3.9175 3 223.0 18.60 394.96 7.73 24.40 0.22188 20.00 6.960 1 0.4640 7.6910 51.80 4.3665 3 223.0 18.60 390.77 6.58 35.20 0.05644 40.00 6.410 1 0.4470 6.7580 32.90 4.0776 4 254.0 17.60 396.90 3.53 32.40 0.09604 40.00 6.410 0 0.4470 6.8540 42.80 4.2673 4 254.0 17.60 396.90 2.98 32.00 0.10469 40.00 6.410 1 0.4470 7.2670 49.00 4.7872 4 254.0 17.60 389.25 6.05 33.20 0.06127 40.00 6.410 1 0.4470 6.8260 27.60 4.8628 4 254.0 17.60 393.45 4.16 33.10 0.07978 40.00 6.410 0 0.4470 6.4820 32.10 4.1403 4 254.0 17.60 396.90 7.19 29.10 0.21038 20.00 3.330 0 0.4429 6.8120 32.20 4.1007 5 216.0 14.90 396.90 4.85 35.10 0.03578 20.00 3.330 0 0.4429 7.8200 64.50 4.6947 5 216.0 14.90 387.31 3.76 45.40 0.03705 20.00 3.330 0 0.4429 6.9680 37.20 5.2447 5 216.0 14.90 392.23 4.59 35.40 0.06129 20.00 3.330 1 0.4429 7.6450 49.70 5.2119 5 216.0 14.90 377.07 3.01 46.00 0.01501 90.00 1.210 1 0.4010 7.9230 24.80 5.8850 1 198.0 13.60 395.52 3.16 50.00 0.00906 90.00 2.970 0 0.4000 7.0880 20.80 7.3073 1 285.0 15.30 394.72 7.85 32.20 0.01096 55.00 2.250 0 0.3890 6.4530 31.90 7.3073 1 300.0 15.30 394.72 8.23 22.00 0.01965 80.00 1.760 0 0.3850 6.2300 31.50 9.0892 1 241.0 18.20 341.60 12.93 20.10 0.03871 52.50 5.320 0 0.4050 6.2090 31.30 7.3172 6 293.0 16.60 396.90 7.14 23.20 0.04590 52.50 5.320 0 0.4050 6.3150 45.60 7.3172 6 293.0 16.60 396.90 7.60 22.30 0.04297 52.50 5.320 0 0.4050 6.5650 22.90 7.3172 6 293.0 16.60 371.72 9.51 24.80 0.03502 80.00 4.950 0 0.4110 6.8610 27.90 5.1167 4 245.0 19.20 396.90 3.33 28.50 0.07886 80.00 4.950 0 0.4110 7.1480 27.70 5.1167 4 245.0 19.20 396.90 3.56 37.30 0.03615 80.00 4.950 0 0.4110 6.6300 23.40 5.1167 4 245.0 19.20 396.90 4.70 27.90 0.08265 0.00 13.920 0 0.4370 6.1270 18.40 5.5027 4 289.0 16.00 396.90 8.58 23.90 0.08199 0.00 13.920 0 0.4370 6.0090 42.30 5.5027 4 289.0 16.00 396.90 10.40 21.70 0.12932 0.00 13.920 0 0.4370 6.6780 31.10 5.9604 4 289.0 16.00 396.90 6.27 28.60 0.05372 0.00 13.920 0 0.4370 6.5490 51.00 5.9604 4 289.0 16.00 392.85 7.39 27.10 0.14103 0.00 13.920 0 0.4370 5.7900 58.00 6.3200 4 289.0 16.00 396.90 15.84 20.30 0.06466 70.00 2.240 0 0.4000 6.3450 20.10 7.8278 5 358.0 14.80 368.24 4.97 22.50 0.05561 70.00 2.240 0 0.4000 7.0410 10.00 7.8278 5 358.0 14.80 371.58 4.74 29.00 0.04417 70.00 2.240 0 0.4000 6.8710 47.40 7.8278 5 358.0 14.80 390.86 6.07 24.80 0.03537 34.00 6.090 0 0.4330 6.5900 40.40 5.4917 7 329.0 16.10 395.75 9.50 22.00 0.09266 34.00 6.090 0 0.4330 6.4950 18.40 5.4917 7 329.0 16.10 383.61 8.67 26.40 0.10000 34.00 6.090 0 0.4330 6.9820 17.70 5.4917 7 329.0 16.10 390.43 4.86 33.10 0.05515 33.00 2.180 0 0.4720 7.2360 41.10 4.0220 7 222.0 18.40 393.68 6.93 36.10 0.05479 33.00 2.180 0 0.4720 6.6160 58.10 3.3700 7 222.0 18.40 393.36 8.93 28.40 0.07503 33.00 2.180 0 0.4720 7.4200 71.90 3.0992 7 222.0 18.40 396.90 6.47 33.40 0.04932 33.00 2.180 0 0.4720 6.8490 70.30 3.1827 7 222.0 18.40 396.90 7.53 28.20 0.49298 0.00 9.900 0 0.5440 6.6350 82.50 3.3175 4 304.0 18.40 396.90 4.54 22.80 0.34940 0.00 9.900 0 0.5440 5.9720 76.70 3.1025 4 304.0 18.40 396.24 9.97 20.30 2.63548 0.00 9.900 0 0.5440 4.9730 37.80 2.5194 4 304.0 18.40 350.45 12.64 16.10 0.79041 0.00 9.900 0 0.5440 6.1220 52.80 2.6403 4 304.0 18.40 396.90 5.98 22.10 0.26169 0.00 9.900 0 0.5440 6.0230 90.40 2.8340 4 304.0 18.40 396.30 11.72 19.40 0.26938 0.00 9.900 0 0.5440 6.2660 82.80 3.2628 4 304.0 18.40 393.39 7.90 21.60 0.36920 0.00 9.900 0 0.5440 6.5670 87.30 3.6023 4 304.0 18.40 395.69 9.28 23.80 0.25356 0.00 9.900 0 0.5440 5.7050 77.70 3.9450 4 304.0 18.40 396.42 11.50 16.20 0.31827 0.00 9.900 0 0.5440 5.9140 83.20 3.9986 4 304.0 18.40 390.70 18.33 17.80 0.24522 0.00 9.900 0 0.5440 5.7820 71.70 4.0317 4 304.0 18.40 396.90 15.94 19.80 0.40202 0.00 9.900 0 0.5440 6.3820 67.20 3.5325 4 304.0 18.40 395.21 10.36 23.10 0.47547 0.00 9.900 0 0.5440 6.1130 58.80 4.0019 4 304.0 18.40 396.23 12.73 21.00 0.16760 0.00 7.380 0 0.4930 6.4260 52.30 4.5404 5 287.0 19.60 396.90 7.20 23.80 0.18159 0.00 7.380 0 0.4930 6.3760 54.30 4.5404 5 287.0 19.60 396.90 6.87 23.10 0.35114 0.00 7.380 0 0.4930 6.0410 49.90 4.7211 5 287.0 19.60 396.90 7.70 20.40 0.28392 0.00 7.380 0 0.4930 5.7080 74.30 4.7211 5 287.0 19.60 391.13 11.74 18.50 0.34109 0.00 7.380 0 0.4930 6.4150 40.10 4.7211 5 287.0 19.60 396.90 6.12 25.00 0.19186 0.00 7.380 0 0.4930 6.4310 14.70 5.4159 5 287.0 19.60 393.68 5.08 24.60 0.30347 0.00 7.380 0 0.4930 6.3120 28.90 5.4159 5 287.0 19.60 396.90 6.15 23.00 0.24103 0.00 7.380 0 0.4930 6.0830 43.70 5.4159 5 287.0 19.60 396.90 12.79 22.20 0.06617 0.00 3.240 0 0.4600 5.8680 25.80 5.2146 4 430.0 16.90 382.44 9.97 19.30 0.06724 0.00 3.240 0 0.4600 6.3330 17.20 5.2146 4 430.0 16.90 375.21 7.34 22.60 0.04544 0.00 3.240 0 0.4600 6.1440 32.20 5.8736 4 430.0 16.90 368.57 9.09 19.80 0.05023 35.00 6.060 0 0.4379 5.7060 28.40 6.6407 1 304.0 16.90 394.02 12.43 17.10 0.03466 35.00 6.060 0 0.4379 6.0310 23.30 6.6407 1 304.0 16.90 362.25 7.83 19.40 0.05083 0.00 5.190 0 0.5150 6.3160 38.10 6.4584 5 224.0 20.20 389.71 5.68 22.20 0.03738 0.00 5.190 0 0.5150 6.3100 38.50 6.4584 5 224.0 20.20 389.40 6.75 20.70 0.03961 0.00 5.190 0 0.5150 6.0370 34.50 5.9853 5 224.0 20.20 396.90 8.01 21.10 0.03427 0.00 5.190 0 0.5150 5.8690 46.30 5.2311 5 224.0 20.20 396.90 9.80 19.50 0.03041 0.00 5.190 0 0.5150 5.8950 59.60 5.6150 5 224.0 20.20 394.81 10.56 18.50 0.03306 0.00 5.190 0 0.5150 6.0590 37.30 4.8122 5 224.0 20.20 396.14 8.51 20.60 0.05497 0.00 5.190 0 0.5150 5.9850 45.40 4.8122 5 224.0 20.20 396.90 9.74 19.00 0.06151 0.00 5.190 0 0.5150 5.9680 58.50 4.8122 5 224.0 20.20 396.90 9.29 18.70 0.01301 35.00 1.520 0 0.4420 7.2410 49.30 7.0379 1 284.0 15.50 394.74 5.49 32.70 0.02498 0.00 1.890 0 0.5180 6.5400 59.70 6.2669 1 422.0 15.90 389.96 8.65 16.50 0.02543 55.00 3.780 0 0.4840 6.6960 56.40 5.7321 5 370.0 17.60 396.90 7.18 23.90 0.03049 55.00 3.780 0 0.4840 6.8740 28.10 6.4654 5 370.0 17.60 387.97 4.61 31.20 0.03113 0.00 4.390 0 0.4420 6.0140 48.50 8.0136 3 352.0 18.80 385.64 10.53 17.50 0.06162 0.00 4.390 0 0.4420 5.8980 52.30 8.0136 3 352.0 18.80 364.61 12.67 17.20 0.01870 85.00 4.150 0 0.4290 6.5160 27.70 8.5353 4 351.0 17.90 392.43 6.36 23.10 0.01501 80.00 2.010 0 0.4350 6.6350 29.70 8.3440 4 280.0 17.00 390.94 5.99 24.50 0.02899 40.00 1.250 0 0.4290 6.9390 34.50 8.7921 1 335.0 19.70 389.85 5.89 26.60 0.06211 40.00 1.250 0 0.4290 6.4900 44.40 8.7921 1 335.0 19.70 396.90 5.98 22.90 0.07950 60.00 1.690 0 0.4110 6.5790 35.90 10.7103 4 411.0 18.30 370.78 5.49 24.10 0.07244 60.00 1.690 0 0.4110 5.8840 18.50 10.7103 4 411.0 18.30 392.33 7.79 18.60 0.01709 90.00 2.020 0 0.4100 6.7280 36.10 12.1265 5 187.0 17.00 384.46 4.50 30.10 0.04301 80.00 1.910 0 0.4130 5.6630 21.90 10.5857 4 334.0 22.00 382.80 8.05 18.20 0.10659 80.00 1.910 0 0.4130 5.9360 19.50 10.5857 4 334.0 22.00 376.04 5.57 20.60 8.98296 0.00 18.100 1 0.7700 6.2120 97.40 2.1222 24 666.0 20.20 377.73 17.60 17.80 3.84970 0.00 18.100 1 0.7700 6.3950 91.00 2.5052 24 666.0 20.20 391.34 13.27 21.70 5.20177 0.00 18.100 1 0.7700 6.1270 83.40 2.7227 24 666.0 20.20 395.43 11.48 22.70 4.26131 0.00 18.100 0 0.7700 6.1120 81.30 2.5091 24 666.0 20.20 390.74 12.67 22.60 4.54192 0.00 18.100 0 0.7700 6.3980 88.00 2.5182 24 666.0 20.20 374.56 7.79 25.00 3.83684 0.00 18.100 0 0.7700 6.2510 91.10 2.2955 24 666.0 20.20 350.65 14.19 19.90 3.67822 0.00 18.100 0 0.7700 5.3620 96.20 2.1036 24 666.0 20.20 380.79 10.19 20.80 4.22239 0.00 18.100 1 0.7700 5.8030 89.00 1.9047 24 666.0 20.20 353.04 14.64 16.80 3.47428 0.00 18.100 1 0.7180 8.7800 82.90 1.9047 24 666.0 20.20 354.55 5.29 21.90 4.55587 0.00 18.100 0 0.7180 3.5610 87.90 1.6132 24 666.0 20.20 354.70 7.12 27.50 3.69695 0.00 18.100 0 0.7180 4.9630 91.40 1.7523 24 666.0 20.20 316.03 14.00 21.90 3.52220 0.00 18.100 0 0.6310 3.8630 100.00 1.5106 24 666.0 20.20 131.42 13.33 23.10 4.89822 0.00 18.100 0 0.6310 4.9700 100.00 1.3325 24 666.0 20.20 375.52 3.26 50.00 5.66998 0.00 18.100 1 0.6310 6.6830 96.80 1.3567 24 666.0 20.20 375.33 3.73 50.00 6.53876 0.00 18.100 1 0.6310 7.0160 97.50 1.2024 24 666.0 20.20 392.05 2.96 50.00 9.23230 0.00 18.100 0 0.6310 6.2160 100.00 1.1691 24 666.0 20.20 366.15 9.53 50.00 8.26725 0.00 18.100 1 0.6680 5.8750 89.60 1.1296 24 666.0 20.20 347.88 8.88 50.00 1.10810 0.00 18.100 0 0.6680 4.9060 100.00 1.1742 24 666.0 20.20 396.90 34.77 13.80 8.49820 0.00 18.100 0 0.6680 4.1380 100.00 1.1370 24 666.0 20.20 396.90 37.97 13.80 9.60910 0.00 18.100 0 0.6710 7.3130 97.90 1.3163 24 666.0 20.20 396.90 13.44 15.00 5.28800 0.00 18.100 0 0.6710 6.6490 93.30 1.3449 24 666.0 20.20 363.02 23.24 13.90 9.82349 0.00 18.100 0 0.6710 6.7940 98.80 1.3580 24 666.0 20.20 396.90 21.24 13.30 3.64820 0.00 18.100 0 0.6710 6.3800 96.20 1.3861 24 666.0 20.20 396.90 23.69 13.10 7.86670 0.00 18.100 0 0.6710 6.2230 100.00 1.3861 24 666.0 20.20 393.74 21.78 10.20 8.97620 0.00 18.100 0 0.6710 6.9680 91.90 1.4165 24 666.0 20.20 396.90 17.21 10.40 5.87440 0.00 18.100 0 0.6710 6.5450 99.10 1.5192 24 666.0 20.20 396.90 21.08 10.90 9.18702 0.00 18.100 0 0.7000 5.5360 100.00 1.5804 24 666.0 20.20 396.90 23.60 11.30 7.99248 0.00 18.100 0 0.7000 5.5200 100.00 1.5331 24 666.0 20.20 396.90 24.56 12.30 0.08490 0.00 18.100 0 0.7000 4.3680 91.20 1.4395 24 666.0 20.20 285.83 30.63 8.80 6.81180 0.00 18.100 0 0.7000 5.2770 98.10 1.4261 24 666.0 20.20 396.90 30.81 7.20 4.39380 0.00 18.100 0 0.7000 4.6520 100.00 1.4672 24 666.0 20.20 396.90 28.28 10.50 2.59710 0.00 18.100 0 0.7000 5.0000 89.50 1.5184 24 666.0 20.20 396.90 31.99 7.40 4.33370 0.00 18.100 0 0.7000 4.8800 100.00 1.5895 24 666.0 20.20 372.92 30.62 10.20 8.15174 0.00 18.100 0 0.7000 5.3900 98.90 1.7281 24 666.0 20.20 396.90 20.85 11.50 6.96215 0.00 18.100 0 0.7000 5.7130 97.00 1.9265 24 666.0 20.20 394.43 17.11 15.10 5.29305 0.00 18.100 0 0.7000 6.0510 82.50 2.1678 24 666.0 20.20 378.38 18.76 23.20 1.57790 0.00 18.100 0 0.7000 5.0360 97.00 1.7700 24 666.0 20.20 396.90 25.68 9.70 8.64476 0.00 18.100 0 0.6930 6.1930 92.60 1.7912 24 666.0 20.20 396.90 15.17 13.80 3.35980 0.00 18.100 0 0.6930 5.8870 94.70 1.7821 24 666.0 20.20 396.90 16.35 12.70 8.71675 0.00 18.100 0 0.6930 6.4710 98.80 1.7257 24 666.0 20.20 391.98 17.12 13.10 5.87205 0.00 18.100 0 0.6930 6.4050 96.00 1.6768 24 666.0 20.20 396.90 19.37 12.50 7.67202 0.00 18.100 0 0.6930 5.7470 98.90 1.6334 24 666.0 20.20 393.10 19.92 8.50 8.35180 0.00 18.100 0 0.6930 5.4530 100.00 1.4896 24 666.0 20.20 396.90 30.59 5.00 9.91655 0.00 18.100 0 0.6930 5.8520 77.80 1.5004 24 666.0 20.20 338.16 29.97 6.30 5.04610 0.00 18.100 0 0.6930 5.9870 100.00 1.5888 24 666.0 20.20 396.90 26.77 5.60 4.23620 0.00 18.100 0 0.6930 6.3430 100.00 1.5741 24 666.0 20.20 396.90 20.32 7.20 9.59571 0.00 18.100 0 0.6930 6.4040 100.00 1.6390 24 666.0 20.20 376.11 20.31 12.10 4.80170 0.00 18.100 0 0.6930 5.3490 96.00 1.7028 24 666.0 20.20 396.90 19.77 8.30 1.52920 0.00 18.100 0 0.6930 5.5310 85.40 1.6074 24 666.0 20.20 329.46 27.38 8.50 7.92080 0.00 18.100 0 0.6930 5.6830 100.00 1.4254 24 666.0 20.20 384.97 22.98 5.00 0.71620 0.00 18.100 0 0.6590 4.1380 100.00 1.1781 24 666.0 20.20 370.22 23.34 11.90 1.95110 0.00 18.100 0 0.6590 5.6080 100.00 1.2852 24 666.0 20.20 332.09 12.13 27.90 7.40389 0.00 18.100 0 0.5970 5.6170 97.90 1.4547 24 666.0 20.20 314.64 26.40 17.20 4.43830 0.00 18.100 0 0.5970 6.8520 100.00 1.4655 24 666.0 20.20 179.36 19.78 27.50 1.13580 0.00 18.100 0 0.5970 5.7570 100.00 1.4130 24 666.0 20.20 2.60 10.11 15.00 4.05070 0.00 18.100 0 0.5970 6.6570 100.00 1.5275 24 666.0 20.20 35.05 21.22 17.20 8.81100 0.00 18.100 0 0.5970 4.6280 100.00 1.5539 24 666.0 20.20 28.79 34.37 17.90 8.65580 0.00 18.100 0 0.5970 5.1550 100.00 1.5894 24 666.0 20.20 210.97 20.08 16.30 5.74610 0.00 18.100 0 0.6930 4.5190 100.00 1.6582 24 666.0 20.20 88.27 36.98 7.00 8.08460 0.00 18.100 0 0.6790 6.4340 100.00 1.8347 24 666.0 20.20 27.25 29.05 7.20 0.83420 0.00 18.100 0 0.6790 6.7820 90.80 1.8195 24 666.0 20.20 21.57 25.79 7.50 5.94060 0.00 18.100 0 0.6790 5.3040 89.10 1.6475 24 666.0 20.20 127.36 26.64 10.40 3.53410 0.00 18.100 0 0.6790 5.9570 100.00 1.8026 24 666.0 20.20 16.45 20.62 8.80 1.81230 0.00 18.100 0 0.7180 6.8240 76.50 1.7940 24 666.0 20.20 48.45 22.74 8.40 1.08740 0.00 18.100 0 0.7180 6.4110 100.00 1.8589 24 666.0 20.20 318.75 15.02 16.70 7.02259 0.00 18.100 0 0.7180 6.0060 95.30 1.8746 24 666.0 20.20 319.98 15.70 14.20 2.04820 0.00 18.100 0 0.6140 5.6480 87.60 1.9512 24 666.0 20.20 291.55 14.10 20.80 7.05042 0.00 18.100 0 0.6140 6.1030 85.10 2.0218 24 666.0 20.20 2.52 23.29 13.40 8.79212 0.00 18.100 0 0.5840 5.5650 70.60 2.0635 24 666.0 20.20 3.65 17.16 11.70 5.86030 0.00 18.100 0 0.6790 5.8960 95.40 1.9096 24 666.0 20.20 7.68 24.39 8.30 2.24720 0.00 18.100 0 0.5840 5.8370 59.70 1.9976 24 666.0 20.20 24.65 15.69 10.20 7.66190 0.00 18.100 0 0.6790 6.2020 78.70 1.8629 24 666.0 20.20 18.82 14.52 10.90 7.36711 0.00 18.100 0 0.6790 6.1930 78.10 1.9356 24 666.0 20.20 96.73 21.52 11.00 9.33889 0.00 18.100 0 0.6790 6.3800 95.60 1.9682 24 666.0 20.20 60.72 24.08 9.50 8.49213 0.00 18.100 0 0.5840 6.3480 86.10 2.0527 24 666.0 20.20 83.45 17.64 14.50 0.06230 0.00 18.100 0 0.5840 6.8330 94.30 2.0882 24 666.0 20.20 81.33 19.69 14.10 6.44405 0.00 18.100 0 0.5840 6.4250 74.80 2.2004 24 666.0 20.20 97.95 12.03 16.10 5.58107 0.00 18.100 0 0.7130 6.4360 87.90 2.3158 24 666.0 20.20 100.19 16.22 14.30 3.91340 0.00 18.100 0 0.7130 6.2080 95.00 2.2222 24 666.0 20.20 100.63 15.17 11.70 1.16040 0.00 18.100 0 0.7400 6.6290 94.60 2.1247 24 666.0 20.20 109.85 23.27 13.40 4.42080 0.00 18.100 0 0.7400 6.4610 93.30 2.0026 24 666.0 20.20 27.49 18.05 9.60 5.17720 0.00 18.100 0 0.7400 6.1520 100.00 1.9142 24 666.0 20.20 9.32 26.45 8.70 3.67810 0.00 18.100 0 0.7400 5.9350 87.90 1.8206 24 666.0 20.20 68.95 34.02 8.40 9.39063 0.00 18.100 0 0.7400 5.6270 93.90 1.8172 24 666.0 20.20 396.90 22.88 12.80 2.05110 0.00 18.100 0 0.7400 5.8180 92.40 1.8662 24 666.0 20.20 391.45 22.11 10.50 9.72418 0.00 18.100 0 0.7400 6.4060 97.20 2.0651 24 666.0 20.20 385.96 19.52 17.10 5.66637 0.00 18.100 0 0.7400 6.2190 100.00 2.0048 24 666.0 20.20 395.69 16.59 18.40 9.96654 0.00 18.100 0 0.7400 6.4850 100.00 1.9784 24 666.0 20.20 386.73 18.85 15.40 2.80230 0.00 18.100 0 0.7400 5.8540 96.60 1.8956 24 666.0 20.20 240.52 23.79 10.80 0.67180 0.00 18.100 0 0.7400 6.4590 94.80 1.9879 24 666.0 20.20 43.06 23.98 11.80 6.28807 0.00 18.100 0 0.7400 6.3410 96.40 2.0720 24 666.0 20.20 318.01 17.79 14.90 9.92485 0.00 18.100 0 0.7400 6.2510 96.60 2.1980 24 666.0 20.20 388.52 16.44 12.60 9.32909 0.00 18.100 0 0.7130 6.1850 98.70 2.2616 24 666.0 20.20 396.90 18.13 14.10 7.52601 0.00 18.100 0 0.7130 6.4170 98.30 2.1850 24 666.0 20.20 304.21 19.31 13.00 6.71772 0.00 18.100 0 0.7130 6.7490 92.60 2.3236 24 666.0 20.20 0.32 17.44 13.40 5.44114 0.00 18.100 0 0.7130 6.6550 98.20 2.3552 24 666.0 20.20 355.29 17.73 15.20 5.09017 0.00 18.100 0 0.7130 6.2970 91.80 2.3682 24 666.0 20.20 385.09 17.27 16.10 8.24809 0.00 18.100 0 0.7130 7.3930 99.30 2.4527 24 666.0 20.20 375.87 16.74 17.80 9.51363 0.00 18.100 0 0.7130 6.7280 94.10 2.4961 24 666.0 20.20 6.68 18.71 14.90 4.75237 0.00 18.100 0 0.7130 6.5250 86.50 2.4358 24 666.0 20.20 50.92 18.13 14.10 4.66883 0.00 18.100 0 0.7130 5.9760 87.90 2.5806 24 666.0 20.20 10.48 19.01 12.70 8.20058 0.00 18.100 0 0.7130 5.9360 80.30 2.7792 24 666.0 20.20 3.50 16.94 13.50 7.75223 0.00 18.100 0 0.7130 6.3010 83.70 2.7831 24 666.0 20.20 272.21 16.23 14.90 6.80117 0.00 18.100 0 0.7130 6.0810 84.40 2.7175 24 666.0 20.20 396.90 14.70 20.00 4.81213 0.00 18.100 0 0.7130 6.7010 90.00 2.5975 24 666.0 20.20 255.23 16.42 16.40 3.69311 0.00 18.100 0 0.7130 6.3760 88.40 2.5671 24 666.0 20.20 391.43 14.65 17.70 6.65492 0.00 18.100 0 0.7130 6.3170 83.00 2.7344 24 666.0 20.20 396.90 13.99 19.50 5.82115 0.00 18.100 0 0.7130 6.5130 89.90 2.8016 24 666.0 20.20 393.82 10.29 20.20 7.83932 0.00 18.100 0 0.6550 6.2090 65.40 2.9634 24 666.0 20.20 396.90 13.22 21.40 3.16360 0.00 18.100 0 0.6550 5.7590 48.20 3.0665 24 666.0 20.20 334.40 14.13 19.90 3.77498 0.00 18.100 0 0.6550 5.9520 84.70 2.8715 24 666.0 20.20 22.01 17.15 19.00 4.42228 0.00 18.100 0 0.5840 6.0030 94.50 2.5403 24 666.0 20.20 331.29 21.32 19.10 5.57570 0.00 18.100 0 0.5800 5.9260 71.00 2.9084 24 666.0 20.20 368.74 18.13 19.10 3.07510 0.00 18.100 0 0.5800 5.7130 56.70 2.8237 24 666.0 20.20 396.90 14.76 20.10 4.34879 0.00 18.100 0 0.5800 6.1670 84.00 3.0334 24 666.0 20.20 396.90 16.29 19.90 4.03841 0.00 18.100 0 0.5320 6.2290 90.70 3.0993 24 666.0 20.20 395.33 12.87 19.60 3.56868 0.00 18.100 0 0.5800 6.4370 75.00 2.8965 24 666.0 20.20 393.37 14.36 23.20 4.64689 0.00 18.100 0 0.6140 6.9800 67.60 2.5329 24 666.0 20.20 374.68 11.66 29.80 8.05579 0.00 18.100 0 0.5840 5.4270 95.40 2.4298 24 666.0 20.20 352.58 18.14 13.80 6.39312 0.00 18.100 0 0.5840 6.1620 97.40 2.2060 24 666.0 20.20 302.76 24.10 13.30 4.87141 0.00 18.100 0 0.6140 6.4840 93.60 2.3053 24 666.0 20.20 396.21 18.68 16.70 5.02340 0.00 18.100 0 0.6140 5.3040 97.30 2.1007 24 666.0 20.20 349.48 24.91 12.00 0.23300 0.00 18.100 0 0.6140 6.1850 96.70 2.1705 24 666.0 20.20 379.70 18.03 14.60 4.33370 0.00 18.100 0 0.6140 6.2290 88.00 1.9512 24 666.0 20.20 383.32 13.11 21.40 5.82401 0.00 18.100 0 0.5320 6.2420 64.70 3.4242 24 666.0 20.20 396.90 10.74 23.00 5.70818 0.00 18.100 0 0.5320 6.7500 74.90 3.3317 24 666.0 20.20 393.07 7.74 23.70 5.73116 0.00 18.100 0 0.5320 7.0610 77.00 3.4106 24 666.0 20.20 395.28 7.01 25.00 2.81838 0.00 18.100 0 0.5320 5.7620 40.30 4.0983 24 666.0 20.20 392.92 10.42 21.80 2.37857 0.00 18.100 0 0.5830 5.8710 41.90 3.7240 24 666.0 20.20 370.73 13.34 20.60 3.67367 0.00 18.100 0 0.5830 6.3120 51.90 3.9917 24 666.0 20.20 388.62 10.58 21.20 5.69175 0.00 18.100 0 0.5830 6.1140 79.80 3.5459 24 666.0 20.20 392.68 14.98 19.10 4.83567 0.00 18.100 0 0.5830 5.9050 53.20 3.1523 24 666.0 20.20 388.22 11.45 20.60 0.15086 0.00 27.740 0 0.6090 5.4540 92.70 1.8209 4 711.0 20.10 395.09 18.06 15.20 0.18337 0.00 27.740 0 0.6090 5.4140 98.30 1.7554 4 711.0 20.10 344.05 23.97 7.00 0.20746 0.00 27.740 0 0.6090 5.0930 98.00 1.8226 4 711.0 20.10 318.43 29.68 8.10 0.10574 0.00 27.740 0 0.6090 5.9830 98.80 1.8681 4 711.0 20.10 390.11 18.07 13.60 0.11132 0.00 27.740 0 0.6090 5.9830 83.50 2.1099 4 711.0 20.10 396.90 13.35 20.10 0.17331 0.00 9.690 0 0.5850 5.7070 54.00 2.3817 6 391.0 19.20 396.90 12.01 21.80 0.27957 0.00 9.690 0 0.5850 5.9260 42.60 2.3817 6 391.0 19.20 396.90 13.59 24.50 0.17899 0.00 9.690 0 0.5850 5.6700 28.80 2.7986 6 391.0 19.20 393.29 17.60 23.10 0.28960 0.00 9.690 0 0.5850 5.3900 72.90 2.7986 6 391.0 19.20 396.90 21.14 19.70 0.26838 0.00 9.690 0 0.5850 5.7940 70.60 2.8927 6 391.0 19.20 396.90 14.10 18.30 0.23912 0.00 9.690 0 0.5850 6.0190 65.30 2.4091 6 391.0 19.20 396.90 12.92 21.20 0.17783 0.00 9.690 0 0.5850 5.5690 73.50 2.3999 6 391.0 19.20 395.77 15.10 17.50 0.22438 0.00 9.690 0 0.5850 6.0270 79.70 2.4982 6 391.0 19.20 396.90 14.33 16.80 0.06263 0.00 11.930 0 0.5730 6.5930 69.10 2.4786 1 273.0 21.00 391.99 9.67 22.40 0.04527 0.00 11.930 0 0.5730 6.1200 76.70 2.2875 1 273.0 21.00 396.90 9.08 20.60 0.06076 0.00 11.930 0 0.5730 6.9760 91.00 2.1675 1 273.0 21.00 396.90 5.64 23.90 0.10959 0.00 11.930 0 0.5730 6.7940 89.30 2.3889 1 273.0 21.00 393.45 6.48 22.00 0.04741 0.00 11.930 0 0.5730 6.0300 80.80 2.5050 1 273.0 21.00 396.90 7.88 11.90 ================================================ FILE: docs/input/index.fsx ================================================ (*** hide ***) #r "../../src/Hype/bin/Debug/DiffSharp.dll" #r "../../src/Hype/bin/Debug/Hype.dll" open DiffSharp.AD.Float32 (** Hype: Compositional Machine Learning and Hyperparameter Optimization ==================================================================== Hype is a proof-of-concept deep learning library, where you can perform optimization on [compositional](http://mathworld.wolfram.com/Composition.html) machine learning systems of many components, even when such components themselves internally perform optimization. This is enabled by nested automatic differentiation (AD) giving you access to the automatic exact derivative of any floating-point value in your code with respect to any other. Underlying computations are run by a BLAS/LAPACK backend (OpenBLAS by default). ### Automatic derivatives You do not need to worry about supplying gradients (or Hessians) of your models, which are computed exactly and efficiently by AD. The underlying AD functionality is provided by [DiffSharp](http://diffsharp.github.io/DiffSharp/index.html). "Reverse mode" AD is a generalized form of "backpropagation" and is distinct from numerical or symbolic differentiation. In addition to reverse AD, Hype makes use of forward AD and nested combinations of forward and reverse AD. The core [differentiation API](http://diffsharp.github.io/DiffSharp/api-overview.html) provides gradients, Hessians, Jacobians, directional derivatives, and matrix-free exact Hessian- and Jacobian-vector products. ### Hypergradients You can get exact gradients of the training or validation loss with respect to hyperparameters. These __hypergradients__ allow you to do gradient-based optimization of gradient-based optimization, meaning that you can do things like optimizing learning rate and momentum schedules, weight initialization parameters, or step sizes and mass matrices in Hamiltonian Monte Carlo models. (A recent article doing this with Python: _Maclaurin, Dougal, David Duvenaud, and Ryan P. Adams. "Gradient-based Hyperparameter Optimization through Reversible Learning." arXiv preprint arXiv:1502.03492 (2015)._) *) open Hype open Hype.Neural // Train a network with stochastic gradient descent and a learning rate schedule let train (x:DV) = let n = FeedForward() n.Add(Linear(784, 300)) n.Add(tanh) n.Add(Linear(300, 10)) let loss, _ = Layer.Train(n, data, {Params.Default with LearningRate = Schedule x Momentum = Momentum.DefaultNesterov Batch = Minibatch 100 Loss = CrossEntropyOnLinear}) loss // Return the loss at the end of training // Train the training, i.e., optimize the learning schedule vector by using its hypergradient let hypertrain = Optimize.Minimize(train, DV.create 200 (D 1.f), {Params.Default with Epochs = 50}) (** You can also take derivatives with respect to training data, to analyze training sensitivities. ### Compositionality Nested AD handles higher-order derivatives up to any level, including in complex cases such as $$$ \mathbf{min} \left(x \; \mapsto \; f(x) + \mathbf{min} \left( y \; \mapsto \; g(x,\,y) \right) \right)\, , where $\mathbf{min}$ uses gradient-based optimization. (Note that the inner function has a reference to the argument of the outer function.) This allows you to create complex systems where many components may internally perform optimization. For example, you can optimize the rules of a multi-player game where the players themselves optimize their own strategy using a simple model of the opponent which they optimize according to their opponent's observed behaviour. Or you can perform optimization of procedures that are internally using differentiation for purposes other than optimization, such as adaptive control or simulations. ### Complex objective functions You can use derivatives in the definition of objective functions for training your models. For example, your objective function can take input sensitivities into account, for training models that are invariant under a set of input transformations. Roadmap -------
In the current release * OpenBLAS backend by default * Regression, feedforward neural networks * Recurrent neural networks, LSTMs, GRUs * Hamiltonian Monte Carlo
Upcoming features * GPU/CUDA backend * Probabilistic inference * Convolutional neural networks
About ----- Hype is developed by [Atılım Güneş Baydin](http://www.cs.nuim.ie/~gunes/) and [Barak A. Pearlmutter](http://bcl.hamilton.ie/~barak/) at the [Brain and Computation Lab](http://www.bcl.hamilton.ie/), Hamilton Institute, National University of Ireland Maynooth. License ------- Hype is released under the MIT license. *) ================================================ FILE: docs/input/templates/docpage.cshtml ================================================ @{ Layout = "template"; Title = Properties["page-title"]; Description = Properties["project-summary"]; } @Properties["document"] @Properties["tooltips"] ================================================ FILE: docs/input/templates/reference/module.cshtml ================================================ @using FSharp.MetadataFormat @{ Layout = "template"; Title = Model.Module.Name + " - " + Properties["project-name"]; } @{ // Get all the members & comment for the type var members = (IEnumerable)Model.Module.AllMembers; var comment = (Comment)Model.Module.Comment; // Group all members by their category which is an inline annotation // that can be added to members using special XML comment: // // /// [category:Something] // // ...and can be used to categorize members in large modules or types // (but if this is not used, then all members end up in just one category) var byCategory = members .GroupBy(m => m.Category) .OrderBy(g => String.IsNullOrEmpty(g.Key) ? "ZZZ" : g.Key) .Select((g, n) => new { Index = n, GroupKey = g.Key, Members = g.OrderBy(m => m.Name), Name = String.IsNullOrEmpty(g.Key) ? "Other module members" : g.Key }); // Get nested modules and nested types as statically typed collections var nestModules = (IEnumerable)Model.Module.NestedModules; var nestTypes = (IEnumerable)Model.Module.NestedTypes; }

@Model.Module.Name

@foreach (var sec in comment.Sections) { // XML comment for the type has multiple sections that can be labelled // with categories (to give comment for an individual category). Here, // we print only those that belong to the section. if (!byCategory.Any(g => g.GroupKey == sec.Key)) { if (sec.Key != "") {

@sec.Key

} @sec.Value } }
@if (byCategory.Count() > 1) {

Table of contents

    @foreach (var g in byCategory) {
  • @g.Name
  • }
} @if (nestTypes.Count() + nestModules.Count() > 0) {

Nested types and modules

@RenderPart("part-nested", new { Types = nestTypes, Modules = nestModules })
} @foreach (var g in byCategory) { // Iterate over all the categories and print members. If there are more than one // categories, print the category heading (as

) and add XML comment from the type // that is related to this specific category. if (byCategory.Count() > 1) {

@g.Name 

var info = comment.Sections.FirstOrDefault(kvp => kvp.Key == g.GroupKey); if (info.Key != null) {
@info.Value
} } @RenderPart("part-members", new { Header = "Functions and values", TableHeader = "Function or value", Members = g.Members.Where(m => m.Kind == MemberKind.ValueOrFunction) }) @RenderPart("part-members", new { Header = "Type extensions", TableHeader = "Type extension", Members = g.Members.Where(m => m.Kind == MemberKind.TypeExtension) }) @RenderPart("part-members", new { Header = "Active patterns", TableHeader = "Active pattern", Members = g.Members.Where(m => m.Kind == MemberKind.ActivePattern) }) } ================================================ FILE: docs/input/templates/reference/namespaces.cshtml ================================================ @using FSharp.MetadataFormat @{ Layout = "template"; Title = "Namespaces - " + Properties["project-name"]; }

@Model.Name

@{ var nsIndex = 0; } @foreach (var ns in Model.Namespaces) { nsIndex++; var typedNs = (Namespace)ns; var allCategories = typedNs.Types.Select(t => t.Category) .Concat(typedNs.Modules.Select(m => m.Category)) .Distinct() .OrderBy(s => String.IsNullOrEmpty(s) ? "ZZZ" : s); var allByCategory = allCategories .Select((c, i) => new { Name = String.IsNullOrEmpty(c) ? "Other namespace members" : c, Index = String.Format("{0}_{1}", nsIndex, i), Types = typedNs.Types.Where(t => t.Category == c).ToArray(), Modules = typedNs.Modules.Where(m => m.Category == c).ToArray() }) .Where(c => c.Types.Length + c.Modules.Length > 0).ToArray();

@ns.Name Namespace

if (allByCategory.Length > 1) {
    @foreach (var g in allByCategory) {
  • @g.Name
  • }
} foreach(var g in allByCategory) { if (allByCategory.Length > 1) {

@g.Name

}
@RenderPart("part-nested", new { Types = g.Types, Modules = g.Modules })
} } ================================================ FILE: docs/input/templates/reference/part-members.cshtml ================================================ @if (Enumerable.Count(Model.Members) > 0) {

@Model.Header

@foreach (var it in Model.Members) { }
@Model.TableHeaderDescription
@{ var id = Html.UniqueID().ToString(); } @Html.Encode(it.Details.FormatUsage(40))
Signature: @Html.Encode(it.Details.Signature)
@if (!it.Details.Modifiers.IsEmpty) { Modifiers: @it.Details.FormatModifiers
} @if (!it.Details.TypeArguments.IsEmpty) { Type parameters: @it.Details.FormatTypeArguments }
@if (!String.IsNullOrEmpty(it.Details.FormatSourceLocation)) { } @it.Comment.FullText
} ================================================ FILE: docs/input/templates/reference/part-nested.cshtml ================================================ @if (Enumerable.Count(Model.Types) > 0) { @foreach (var it in Model.Types) { }
TypeDescription
@it.Name @it.Comment.Blurb
} @if (Enumerable.Count(Model.Modules) > 0) { @foreach (var it in Model.Modules) { }
ModuleDescription
@it.Name @it.Comment.Blurb
} ================================================ FILE: docs/input/templates/reference/type.cshtml ================================================ @using FSharp.MetadataFormat @{ Layout = "template"; Title = Model.Type.Name + " - " + Properties["project-name"]; } @{ // Get all the members & comment for the type var members = (IEnumerable)Model.Type.AllMembers; var comment = (Comment)Model.Type.Comment; // Group all members by their category which is an inline annotation // that can be added to members using special XML comment: // // /// [category:Something] // // ...and can be used to categorize members in large modules or types // (but if this is not used, then all members end up in just one category) var byCategory = members .GroupBy(m => m.Category) .OrderBy(g => String.IsNullOrEmpty(g.Key) ? "ZZZ" : g.Key) .Select((g, n) => new { Index = n, GroupKey = g.Key, Members = g.OrderBy(m => m.Kind == MemberKind.StaticParameter ? "" : m.Name), Name = String.IsNullOrEmpty(g.Key) ? "Other type members" : g.Key }); }

@Model.Type.Name

@foreach (var sec in comment.Sections) { // XML comment for the type has multiple sections that can be labelled // with categories (to give comment for an individual category). Here, // we print only those that belong to the section. if (!byCategory.Any(g => g.GroupKey == sec.Key)) { if (sec.Key != "") {

@sec.Key

} @sec.Value } }
@if (byCategory.Count() > 1) {

Table of contents

    @foreach (var g in byCategory) {
  • @g.Name
  • }
} @foreach (var g in byCategory) { // Iterate over all the categories and print members. If there are more than one // categories, print the category heading (as

) and add XML comment from the type // that is related to this specific category. if (byCategory.Count() > 1) {

@g.Name 

var info = comment.Sections.FirstOrDefault(kvp => kvp.Key == g.GroupKey); if (info.Key != null) {
@info.Value
} } @RenderPart("part-members", new { Header = "Union Cases", TableHeader = "Union Case", Members = g.Members.Where(m => m.Kind == MemberKind.UnionCase) }) @RenderPart("part-members", new { Header = "Record Fields", TableHeader = "Record Field", Members = g.Members.Where(m => m.Kind == MemberKind.RecordField) }) @RenderPart("part-members", new { Header = "Static parameters", TableHeader = "Static parameters", Members = g.Members.Where(m => m.Kind == MemberKind.StaticParameter) }) @RenderPart("part-members", new { Header = "Constructors", TableHeader = "Constructor", Members = g.Members.Where(m => m.Kind == MemberKind.Constructor) }) @RenderPart("part-members", new { Header = "Instance members", TableHeader = "Instance member", Members = g.Members.Where(m => m.Kind == MemberKind.InstanceMember) }) @RenderPart("part-members", new { Header = "Static members", TableHeader = "Static member", Members = g.Members.Where(m => m.Kind == MemberKind.StaticMember) }) } ================================================ FILE: docs/input/templates/template.cshtml ================================================  @Title ================================================ FILE: docs/input/templates/template.html ================================================ {page-title} ================================================ FILE: paket.dependencies ================================================ source https://api.nuget.org/v3/index.json framework: netstandard2.0 redirects: on storage: none nuget System.Drawing.Common >= 4.5.1 nuget DiffSharp >= 0.8.4-beta nuget FSharp.Core #These packages are used in .fsx examples which are currently difficult to make cross-platform unless they're local nuget FSharp.Formatting storage: packages //nuget R.NET storage: packages nuget RProvider storage: packages nuget XPlot.GoogleCharts.WPF storage: packages ================================================ FILE: src/Hype/AssemblyInfo.fs ================================================ namespace Hype.AssemblyInfo open System.Reflection open System.Runtime.CompilerServices open System.Runtime.InteropServices // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [] [] [] [] [] [] [] [] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [] // The following GUID is for the ID of the typelib if this project is exposed to COM [] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [] [] [] [] do () ================================================ FILE: src/Hype/Classifier.fs ================================================ // // This file is part of // Hype: Compositional Machine Learning and Hyperparameter Optimization // // Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter) // // Hype is released under the MIT license. // (See accompanying LICENSE file.) // // Written by: // // Atilim Gunes Baydin // atilimgunes.baydin@nuim.ie // // Barak A. Pearlmutter // barak@cs.nuim.ie // // Brain and Computation Lab // Hamilton Institute & Department of Computer Science // National University of Ireland Maynooth // Maynooth, Co. Kildare // Ireland // // www.bcl.hamilton.ie // namespace Hype open Hype open Hype.Neural open DiffSharp.AD.Float32 open DiffSharp.Util /// Base type for classifiers [] type Classifier(f:DM->DM) = let f = f member c.Run(x:DM) = f x member c.Run(x:DV) = x |> DM.ofDV x.Length |> f |> DM.toDV abstract member Classify : DM -> int[] abstract member Classify : DV -> int member c.ClassificationError(x:DM, y:int[]) = let cc = c.Classify(x) let incorrect = Array.map2 (fun c y -> if c = y then 0 else 1) cc y (float32 (incorrect |> Array.sum)) / (float32 incorrect.Length) member c.ClassificationError(d:Dataset) = c.ClassificationError(d.X, d.Yi) /// Classifier for binary classification type LogisticClassifier(f) = inherit Classifier(f) new(l:Layer) = LogisticClassifier(l.Run) override c.Classify(x:DM) = let cc = Array.zeroCreate x.Cols x |> f |> DM.iteriCols (fun i v -> if v.[0] > D 0.5f then cc.[i] <- 1) cc override c.Classify(x:DV) = if c.Run(x).[0] > D 0.5f then 1 else 0 member c.ClassificationError(d:Dataset) = let yi = d.Y |> DM.toDV |> DV.toArray |> Array.map (float32>>int) c.ClassificationError(d.X, yi) /// Classifier for softmax classification type SoftmaxClassifier(f) = inherit Classifier(f) new(l:Layer) = SoftmaxClassifier(l.Run) override c.Classify(x:DM) = let cc = Array.zeroCreate x.Cols x |> f |> DM.iteriCols (fun i v -> cc.[i] <- DV.MaxIndex(v)) cc override c.Classify(x:DV) = DV.MaxIndex(c.Run(x)) ================================================ FILE: src/Hype/Hype.fs ================================================ // // This file is part of // Hype: Compositional Machine Learning and Hyperparameter Optimization // // Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter) // // Hype is released under the MIT license. // (See accompanying LICENSE file.) // // Written by: // // Atilim Gunes Baydin // atilimgunes.baydin@nuim.ie // // Barak A. Pearlmutter // barak@cs.nuim.ie // // Brain and Computation Lab // Hamilton Institute & Department of Computer Science // National University of Ireland Maynooth // Maynooth, Co. Kildare // Ireland // // www.bcl.hamilton.ie // /// Main namespace namespace Hype open System.IO open DiffSharp.AD.Float32 open DiffSharp.Util /// Random number generator type Rnd() = static let mutable R = new System.Random() /// Seed the random number generator with integer `seed` static member Seed(seed) = R <- new System.Random(seed) /// Generate a random permutation of a set of length `n` static member Permutation(n:int) = let swap i j (a:_[]) = let tmp = a.[i] a.[i] <- a.[j] a.[j] <- tmp let a = Array.init n (fun i -> i) a |> Array.iteri (fun i _ -> swap i (R.Next(i, n)) a) a /// Sample a non-negative random integer static member UniformInt() = R.Next() /// Sample a non-negative random integer less than `max` static member UniformInt(max) = R.Next(max) /// Sample a random integer between `min` and `max` static member UniformInt(min, max) = R.Next(min, max) /// Sample a `float32` from the standard uniform distribution. X ~ U(0,1) static member Uniform() = float32 (R.NextDouble()) /// Sample a `D` from the standard uniform distribution. X ~ U(0,1) static member UniformD() = D (float32 (R.NextDouble())) /// Sample a `float32` from the uniform distribution between zero and `max`. X ~ U(0,max) static member Uniform(max) = max * (float32 (R.NextDouble())) /// Sample a `D` from the unifrom distribution between zero and `max`. X ~ U(0,max) static member UniformD(max) = max * D (float32 (R.NextDouble())) /// Sample a `float32` from the uniform distribution between `min` and `max`. X ~ U(min,max) static member Uniform(min, max) = min + (float32 (R.NextDouble())) * (max - min) /// Sample a `D` from the uniform distribution between `min` and `max`. X ~ U(min,max) static member UniformD(min, max) = min + D (float32 (R.NextDouble())) * (max - min) /// Sample a `float32` from the standard normal distribution. X ~ N(0,1) static member Normal() = let rec n() = let x, y = (float32 (R.NextDouble())) * 2.0f - 1.0f, (float32 (R.NextDouble())) * 2.0f - 1.0f let s = x * x + y * y if s > 1.0f then n() else x * sqrt (-2.0f * (log s) / s) n() /// Sample a `D` from the standard normal distribution. X ~ N(0,1) static member NormalD() = D (Rnd.Normal()) /// Sample a `float32` from the normal distribution with given mean `mu` and standard deviation `sigma`. X ~ N(mu,sigma) static member Normal(mu, sigma) = Rnd.Normal() * sigma + mu /// Sample a `D` from the normal distribution with given mean `mu` and standard deviation `sigma`. X ~ N(mu,sigma) static member NormalD(mu, sigma) = Rnd.NormalD() * sigma + mu /// Sample a `DV` of length `n` from the standard uniform distribution. Elements of vector X ~ U(0,1) static member UniformDV(n) = DV (Array.Parallel.init n (fun _ -> Rnd.Uniform())) /// Sample a `DV` of length `n` from the uniform distribution between zero and `max`. Elements of vector X ~ U(0,max) static member UniformDV(n, max) = DV.init n (fun _ -> Rnd.UniformD(max)) /// Sample a `DV` of length `n` from the uniform distribution between `min` and `max`. Elements of vector X ~ U(min,max) static member UniformDV(n, min, max) = DV.init n (fun _ -> Rnd.UniformD(min, max)) /// Sample a `DV` of length `n` from the standard normal distribution. Elements of vector X ~ N(0,1) static member NormalDV(n) = DV (Array.Parallel.init n (fun _ -> Rnd.Normal())) /// Sample a `DV` of length `n` from the normal distribution with given mean `mu` and standard deviation `sigma`. Elements of vector X ~ N(mu,sigma) static member NormalDV(n, mu, sigma) = DV.init n (fun _ -> Rnd.NormalD(mu, sigma)) /// Sample a `DM` of `m` rows and `n` columns from the standard uniform distribution. Elements of matrix X ~ U(0,1) static member UniformDM(m, n) = DM (Array2D.Parallel.init m n (fun _ _ -> Rnd.Uniform())) /// Sample a `DM` of `m` rows and `n` columns from the uniform distribution between zero and `max`. Elements of matrix X ~ U(0,max) static member UniformDM(m, n, max) = DM.init m n (fun _ _ -> Rnd.UniformD(max)) /// Sample a `DM` of `m` rows and `n` columns from the uniform distribution between `min` and `max`. Elements of matrix X ~ U(min,max) static member UniformDM(m, n, min, max) = DM.init m n (fun _ _ -> Rnd.UniformD(min, max)) /// Sample a `DM` of `m` rows and `n` columns from the standard normal distribution. Elements of matrix X ~ N(0,1) static member NormalDM(m, n) = DM (Array2D.Parallel.init m n (fun _ _ -> Rnd.Normal())) /// Sample a `DM` of `m` rows and `n` columns from the normal distribution with given mean `mu` and standard deviation `sigma`. Elements of matrix X ~ N(mu,sigma) static member NormalDM(m, n, mu, sigma) = DM.init m n (fun _ _ -> Rnd.NormalD(mu, sigma)) /// Select a random element of array `a` static member Choice(a:_[]) = a.[R.Next(a.Length)] /// Select a random element of array `a`, given selection probabilities in array `probs` static member Choice(a:_[], probs:float32[]) = Rnd.Choice(a, toDV probs) /// Select a random element of array `a`, given selection probabilities in vector `probs` static member Choice(a:_[], probs:DV) = let probs' = probs / (DV.sum(probs)) let p = float32 (R.NextDouble()) let mutable r = 0.f let mutable i = 0 let mutable hit = false while not hit do r <- r + (float32 probs'.[i]) if r >= p then hit <- true else i <- i + 1 a.[i] /// Dataset for holding training data type Dataset private (x:DM, y:DM, xi:seq, yi:seq) = /// The matrix X of input values, where columns are the individual inputs Xi member val X = x with get /// The matrix Y of output values, where columns are the individual outputs Yi member val Y = y with get /// The index of the maximum elements of individual inputs Xi, used for one-hot representations member val Xi = xi |> Array.ofSeq with get /// The index of the maximum elements of individual outputs Yi, used for one-hot reprsentations member val Yi = yi |> Array.ofSeq with get /// Construct a dataset with given input matrix `x` and output matrix `y`. Columns of `x` and `y` are the individual inputs and corresponding outputs. new(x:DM, y:DM) = let xi = x |> DM.toCols |> Seq.toArray |> Array.map DV.maxIndex let yi = y |> DM.toCols |> Seq.toArray |> Array.map DV.maxIndex Dataset(x, y, xi, yi) /// Construct a dataset of one-hot input and output elements. `xi` are the input indices, `onehotdimsx` is the input dimensions, `yi` are the output indices, `onehotdimsy` is the output dimensions. new(xi:seq, onehotdimsx:int, yi:seq, onehotdimsy:int) = let x = xi |> Seq.map (fun i -> DV.standardBasis onehotdimsx i) |> DM.ofCols let y = yi |> Seq.map (fun i -> DV.standardBasis onehotdimsy i) |> DM.ofCols Dataset(x, y, xi, yi) /// Construct a dataset of one-hot input and output elements. `xi` are the input indices, input dimensions is max(xi) + 1, `yi` are the output indices, output dimensions is max(yi) + 1. new(xi:seq, yi:seq) = let onehotdimsx = 1 + Seq.max xi let onehotdimsy = 1 + Seq.max yi Dataset(xi, onehotdimsx, yi, onehotdimsy) /// Construct a dataset with given input matrix `x` and one-hot output elements. `yi` are the output indices, `onehotdimsy` is the output dimensions. new(x:DM, yi:seq, onehotdimsy:int) = let xi = x |> DM.toCols |> Seq.toArray |> Array.map DV.maxIndex let y = yi |> Seq.map (fun i -> DV.standardBasis onehotdimsy i) |> DM.ofCols Dataset(x, y, xi, yi) /// Construct a dataset with one-hot input elements and given output matrix `y`. `xi` are the input indices, `onehotdimsx` is the input dimensions. new(xi:seq, onehotdimsx:int, y:DM) = let x = xi |> Seq.map (fun i -> DV.standardBasis onehotdimsx i) |> DM.ofCols let yi = y |> DM.toCols |> Seq.toArray |> Array.map DV.maxIndex Dataset(x, y, xi, yi) /// Construct a dataset with given input matrix `x` and one-hot output elements. `yi` are the output indices, output dimensions is max(yi) + 1. new(x:DM, yi:seq) = let onehotdimsy = 1 + Seq.max yi Dataset(x, yi, onehotdimsy) /// Construct a dataset with one-hot input elements and given output matrix `y`. `xi` are the input indices, input dimensions is max(xi) + 1. new(xi:seq, y:DM) = let onehotdimsx = 1 + Seq.max xi Dataset(xi, onehotdimsx, y) /// Construct a dataset from the given sequence of input-output vector pairs new(s:seq) = let x, y = s |> Seq.toArray |> Array.unzip Dataset(x |> DM.ofCols, y |> DM.ofCols) /// The empty dataset static member empty = Dataset(DM.empty, DM.empty) /// Check whether dataset `d` is empty static member isEmpty (d:Dataset) = DM.isEmpty d.X && DM.isEmpty d.Y /// Normalize the values in the input matrix X and output matrix Y of dataset `d` to be in the range [0,1] static member normalize (d:Dataset) = d.Normalize() /// Normalize the values in the input matrix X of dataset `d` to be in the range [0,1] static member normalizeX (d:Dataset) = d.NormalizeX() /// Normalize the values in the output matrix Y of dataset `d` to be in the range [0,1] static member normalizeY (d:Dataset) = d.NormalizeY() /// Standardize the values in the input matrix X and output matrix Y of dataset `d` to have zero mean and unit variance static member standardize (d:Dataset) = d.Standardize() /// Standardize the values in the input matrix X of dataset `d` to have zero mean and unit variance static member standardizeX (d:Dataset) = d.StandardizeX() /// Standardize the values in the output matrix Y of dataset `d` to have zero mean and unit variance static member standardizeY (d:Dataset) = d.StandardizeY() /// Append a new row `v` to the input matrix X of dataset `d` static member appendRowX (v:DV) (d:Dataset) = d.AppendRowX(v) /// Append a new tow `v` to the output matrix Y of dataset `d` static member appendRowY (v:DV) (d:Dataset) = d.AppendRowY(v) /// Append a row of ones to the input matrix X of dataset `d` static member appendBiasRowX (d:Dataset) = d.AppendBiasRowX() /// Get a summary string of dataset `d` static member toString (d:Dataset) = d.ToString() /// Get a string representation of dataset `d` showing all values static member toStringFull (d:Dataset) = d.ToStringFull() /// Get the input-output pairs of dataset `d` as a sequence static member toSeq (d:Dataset) = d.ToSeq() /// The length of dataset `d`, i.e., the number of columns in input matrix X and output matrix Y static member length (d:Dataset) = d.Length /// Sample a random subset of length `n` from dataset `d` static member randomSubset (n:int) (d:Dataset) = d.RandomSubset(n) /// Shuffle the order of elements in dataset `d` static member shuffle (d:Dataset) = d.Shuffle() /// Get the input-output pair with index `i` from dataset `d` static member item (i:int) (d:Dataset) = d.[i] /// Get element `i` member d.Item with get i = d.X.[*,i], d.Y.[*,i] /// The length of the dataset, i.e., the number of columns in input matrix X and output matrix Y member d.Length = d.X.Cols /// Get the input-output pairs as a sequence member d.ToSeq() = Seq.init d.Length (fun i -> d.[i]) /// Sample a random subset of length `n` from this dataset member d.RandomSubset(n) = let bi = Rnd.Permutation(d.Length) let x = Seq.init n (fun i -> d.X.[*, bi.[i]]) let y = Seq.init n (fun i -> d.Y.[*, bi.[i]]) Dataset(DM.ofCols x, DM.ofCols y) /// Normalize the values in the input matrix X and output matrix Y to be in the range [0,1] member d.Normalize() = Dataset(DM.normalize d.X, DM.normalize d.Y) /// Normalize the values in the input matrix X to be in the range [0,1] member d.NormalizeX() = Dataset(DM.normalize d.X, d.Y) /// Normalize the values in the output matrix Y to be in the range [0,1] member d.NormalizeY() = Dataset(d.X, DM.normalize d.Y) /// Standardize the values in the input matrix X and output matrix Y to have zero mean and unit variance member d.Standardize() = Dataset(DM.standardize d.X, DM.standardize d.Y) /// Standardize the values in the input matrix X to have zero mean and unit variance member d.StandardizeX() = Dataset(DM.standardize d.X, d.Y) /// Standardize the values in the output matrix Y to have zero mean and unit variance member d.StandardizeY() = Dataset(d.X, DM.standardize d.Y) /// Shuffle the order of elements in the dataset member d.Shuffle() = d.RandomSubset d.Length /// Get a slice of the dataset between `lower` and `upper` indices member d.GetSlice(lower, upper) = let l = max 0 (defaultArg lower 0) let u = min (d.X.Cols - 1) (defaultArg upper (d.Length - 1)) Dataset(d.X.[*,l..u], d.Y.[*,l..u]) /// Get a new dataset of the entries for which the `predicate` is true member d.Filter (predicate:(DV*DV)->bool) = d.ToSeq() |> Seq.filter predicate |> Dataset /// Append a new row `v` to the input matrix X member d.AppendRowX(v:DV) = Dataset(d.X |> DM.appendRow v, d.Y) /// Append a new row `v` to the output matrix Y member d.AppendRowY(v:DV) = Dataset(d.X, d.Y |> DM.appendRow v) /// Append a row of all ones to the input matrix X member d.AppendBiasRowX() = d.AppendRowX(DV.create d.Length 1.f) /// Get a summary string of this dataset override d.ToString() = "Hype.Dataset\n" + sprintf " X: %i x %i\n" d.X.Rows d.X.Cols + sprintf " Y: %i x %i" d.Y.Rows d.Y.Cols /// Get a string representation of this dataset showing all values member d.ToStringFull() = "Hype.Dataset\n" + sprintf " X:\n%O\n\n" d.X + sprintf " Y:\n%O" d.Y /// Get a string visualization of this dataset member d.Visualize() = "Hype.Dataset\n" + sprintf " X:\n%s\n\n" (d.X.Visualize()) + sprintf " Y:\n%s" (d.Y.Visualize()) /// Visualize the values of the input matrix X where each column will be reshaped to an image with `imagerows` rows member d.VisualizeXColsAsImageGrid(imagerows:int) = d.ToString() + "\n" + "X's columns " + Util.VisualizeDMRowsAsImageGrid(d.X |> DM.transpose, imagerows) /// Visualize the values of the output matrix Y where each column will be reshaped to an image with `imagerows` rows member d.VisualizeYColsAsImageGrid(imagerows:int) = d.ToString() + "\n" + "Y's columns " + Util.VisualizeDMRowsAsImageGrid(d.Y |> DM.transpose, imagerows) /// Various utility functions and Util = static member printLog (s:string) = printfn "[%A] %s" System.DateTime.Now s static member printModel (f:DV->DV) (d:Dataset) = d.ToSeq() |> Seq.map (fun (x, y) -> f x, y) |> Seq.iter (fun (x, y) -> printfn "f x: %A, y: %A" x y) /// Load bitmap image with given `filename` to `DM` static member LoadImage(filename:string) = let bmp = new System.Drawing.Bitmap(filename) let m = DM.init bmp.Height bmp.Width (fun i j -> float32 (bmp.GetPixel(i, j).GetBrightness())) bmp.Dispose() m /// Load values from delimited text file with given `filename` and separator characters `separators` static member LoadDelimited(filename:string, separators:char[]) = System.IO.File.ReadLines(filename) |> Seq.map (fun x -> x.Split(separators) |> Array.map float32) |> Seq.map toDV |> DM.ofRows /// Load values from delimited text file with given `filename` and a default set of separator characters: space, comma, or tab static member LoadDelimited(filename:string) = Util.LoadDelimited(filename, [|' '; ','; '\t'|]) /// Load values from the MNIST database images, from given `filename`, reading `n` number of elements static member LoadMNISTPixels(filename, n) = let d = new BinaryReader(File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.Read)) let magicnumber = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder match magicnumber with | 2051 -> // Images let maxitems = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder let rows = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder let cols = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder let n = min n maxitems d.ReadBytes(n * rows * cols) |> Array.map float32 |> DV |> DM.ofDV n |> DM.transpose | _ -> failwith "Given file is not in the MNIST format." /// Load values from the MNIST database labels, from given `filename`, reading `n` number of elements static member LoadMNISTLabels(filename, n) = let d = new BinaryReader(File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.Read)) let magicnumber = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder match magicnumber with | 2049 -> // Labels let maxitems = d.ReadInt32() |> System.Net.IPAddress.NetworkToHostOrder d.ReadBytes(min n maxitems) |> Array.map int | _ -> failwith "Given file is not in the MNIST format." /// Load values from the MNIST database images, from given `filename`, reading all elements static member LoadMNISTPixels(filename) = Util.LoadMNISTPixels(filename, System.Int32.MaxValue) /// Load values from the MNIST database labels, from given `filename`, reading all elements static member LoadMNISTLabels(filename) = Util.LoadMNISTLabels(filename, System.Int32.MaxValue) /// Generate a string representation of matrix `w`, reshaping each row into an image with `imagerows` rows, and presenting resulting images together in an optimal grid layout. static member VisualizeDMRowsAsImageGrid(w:DM, imagerows:int) = let rows = w.Rows let mm = int (floor (sqrt (float rows))) let nn = int (ceil (float rows / float mm)) let m = imagerows let n = (w.[0, *] |> DV.toDM m).Cols let mutable mat = DM.create (mm * m) (nn * n) (DM.mean w) for i = 0 to mm - 1 do for j = 0 to nn - 1 do let row = i * nn + j if row < w.Rows then mat <- DM.AddSubMatrix(mat, i * m, j * n, w.[row, *] |> DV.toDM m) sprintf "reshaped to (%i x %i), presented in a (%i x %i) grid:\n%s\n" m n mm nn (mat.Visualize()) ================================================ FILE: src/Hype/Hype.fsproj ================================================  netstandard2.0 x64 BSD-2-Clause Hype true true ================================================ FILE: src/Hype/Inference.fs ================================================ // // This file is part of // Hype: Compositional Machine Learning and Hyperparameter Optimization // // Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter) // // Hype is released under the MIT license. // (See accompanying LICENSE file.) // // Written by: // // Atilim Gunes Baydin // atilimgunes.baydin@nuim.ie // // Barak A. Pearlmutter // barak@cs.nuim.ie // // Brain and Computation Lab // Hamilton Institute & Department of Computer Science // National University of Ireland Maynooth // Maynooth, Co. Kildare // Ireland // // www.bcl.hamilton.ie // /// Inference namespace namespace Hype.Inference open Hype open DiffSharp.AD.Float32 open DiffSharp.Util /// Hamiltonian MCMC sampler type HMCSampler() = static member Sample(n, hdelta, hsteps, x0:DV, f:DV->D) = let leapFrog (u:DV->D) (k:DV->D) (d:D) steps (x0, p0) = let hd = d / 2.f [1..steps] |> List.fold (fun (x, p) _ -> let p' = p - hd * grad u x let x' = x + d * grad k p' x', p' - hd * grad u x') (x0, p0) let u x = -log (f x) // potential energy let k p = (p * p) / D 2.f // kinetic energy let hamilton x p = u x + k p let x = ref x0 [|for i in 1..n do let p = DV.init x0.Length (fun _ -> Rnd.Normal()) let x', p' = leapFrog u k hdelta hsteps (!x, p) if Rnd.Uniform() < float32 (exp ((hamilton !x p) - (hamilton x' p'))) then x := x' yield !x|] ================================================ FILE: src/Hype/NLP.fs ================================================ // // This file is part of // Hype: Compositional Machine Learning and Hyperparameter Optimization // // Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter) // // Hype is released under the MIT license. // (See accompanying LICENSE file.) // // Written by: // // Atilim Gunes Baydin // atilimgunes.baydin@nuim.ie // // Barak A. Pearlmutter // barak@cs.nuim.ie // // Brain and Computation Lab // Hamilton Institute & Department of Computer Science // National University of Ireland Maynooth // Maynooth, Co. Kildare // Ireland // // www.bcl.hamilton.ie // /// Natural language processing namespace namespace Hype.NLP open Hype open DiffSharp.AD.Float32 open DiffSharp.Util /// Language model type Language(tokens:string[], punctuation:string[]) = member val Tokens = tokens static member TokenizeWords(text:string, punctuation) = //let mutable t' = text.ToLowerInvariant() let mutable t' = text punctuation |> Array.iter (fun p -> t' <- t'.Replace(p, " " + p + " ")) t'.Split([|" "|], System.StringSplitOptions.RemoveEmptyEntries) new(text:string, punctuation:string[]) = Language(Language.TokenizeWords(text, punctuation) |> Set.ofArray |> Set.toArray, punctuation) new(text:string) = Language(text, [|"."; ","; ":"; ";"; "("; ")"; "!"; "?"|]) member l.Length = l.Tokens.Length member l.EncodeOneHot(x:string) = Language.TokenizeWords(x, punctuation) |> l.EncodeOneHot member l.EncodeOneHot(x:string[]) = try //x |> Array.map (fun v -> v.ToLowerInvariant()) x |> Array.map (fun v -> Array.findIndex (fun t -> t = v) l.Tokens) |> Array.map (DV.standardBasis l.Length) |> DM.ofCols with | _ -> failwith "Given token is not found in the language." member l.DecodeOneHot(x:DM) = try x |> DM.toCols |> Seq.map DV.maxIndex |> Seq.map (fun i -> l.Tokens.[i]) |> Seq.toArray with | _ -> [||] member l.Sample(probs:DM) = probs |> DM.toCols |> Seq.map (fun v -> Rnd.Choice(l.Tokens, v)) |> Seq.toArray member l.Sample(probs:DV) = Rnd.Choice(l.Tokens, probs) member l.Sample(model:DM->DM, start:string, stop:string[], maxlen) = let mutable x = start let mutable i = 0 let mutable t = ([while i < maxlen do yield x let p = x |> l.EncodeOneHot |> model let d = l.Sample(p).[0] match stop |> Array.tryFind (fun p -> p = d) with | Some(_) -> yield d i <- maxlen | _ -> x <- d i <- i + 1] |> List.map ((+) " ") |> List.fold (+) "").Trim() punctuation |> Array.iter (fun p -> t <- t.Replace(" " + p, p)) t ================================================ FILE: src/Hype/Neural.fs ================================================ // // This file is part of // Hype: Compositional Machine Learning and Hyperparameter Optimization // // Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter) // // Hype is released under the MIT license. // (See accompanying LICENSE file.) // // Written by: // // Atilim Gunes Baydin // atilimgunes.baydin@nuim.ie // // Barak A. Pearlmutter // barak@cs.nuim.ie // // Brain and Computation Lab // Hamilton Institute & Department of Computer Science // National University of Ireland Maynooth // Maynooth, Co. Kildare // Ireland // // www.bcl.hamilton.ie // /// Neural networks namespace namespace Hype.Neural open Hype open DiffSharp.AD.Float32 open DiffSharp.Util /// Base type for neural layers [] type Layer() = abstract member Init : unit -> unit abstract member Reset : unit -> unit abstract member Run : DM -> DM abstract member Encode : unit -> DV abstract member EncodeLength : int abstract member Decode : DV -> unit abstract member ToStringFull : unit -> string abstract member Visualize : unit -> string member l.Train(d:Dataset) = Layer.Train(l, d) member l.Train(d:Dataset, v:Dataset) = Layer.Train(l, d, v) member l.Train(d:Dataset, par:Params) = Layer.Train(l, d, par) member l.Train(d:Dataset, v:Dataset, par:Params) = Layer.Train(l, d, v, par) static member init (l:Layer) = l.Init() static member reset (l:Layer) = l.Reset() static member run x (l:Layer) = l.Run(x) static member encode (l:Layer) = l.Encode() static member encodeLength (l:Layer) = l.EncodeLength static member decode (l:Layer) (w:DV) = l.Decode(w) static member toString (l:Layer) = l.ToString() static member toStringFull (l:Layer) = l.ToStringFull() static member visualize (l:Layer) = l.Visualize() static member Train (l:Layer, d:Dataset) = Layer.Train(l, d, Dataset.empty, Params.Default) static member Train (l:Layer, d:Dataset, par:Params) = Layer.Train(l, d, Dataset.empty, par) static member Train (l:Layer, d:Dataset, v:Dataset) = Layer.Train(l, d, v, Params.Default) static member Train (l:Layer, d:Dataset, v:Dataset, par:Params) = let f = fun w x -> l.Decode w l.Run x let w0 = l.Encode() // try // grad (fun w -> Loss.L1Loss.FuncDM(d) (f w)) w0 |> ignore // with // | _ -> failwith "Input/output dimensions mismatch between dataset and the layer." let w, loss, _, lhist = Optimize.Train(f, w0, d, v, par) w |> l.Decode loss, lhist /// Initialization schemes for neural layer weights type Initializer = | InitUniform of D * D | InitNormal of D * D | InitRBM of D | InitReLU | InitSigmoid | InitTanh | InitStandard | InitCustom of (int->int->D) override i.ToString() = match i with | InitUniform(min, max) -> sprintf "Uniform min=%A max=%A" min max | InitNormal(mu, sigma) -> sprintf "Normal mu=%A sigma=%A" mu sigma | InitRBM sigma -> sprintf "RBM sigma=%A" sigma | InitReLU -> "ReLU" | InitSigmoid -> "Sigmoid" | InitTanh -> "Tanh" | InitStandard -> "Standard" | InitCustom f -> "Custom" member i.InitDM(m, n) = let fanOut, fanIn = m, n match i with | InitUniform(min, max) -> Rnd.UniformDM(m, n, min, max) | InitNormal(mu, sigma) -> Rnd.NormalDM(m, n, mu, sigma) | InitRBM sigma -> Rnd.NormalDM(m, n, D 0.f, sigma) | InitReLU -> Rnd.NormalDM(m, n, D 0.f, sqrt (D 2.f / (float32 fanIn))) | InitSigmoid -> let r = D 4.f * sqrt (D 6.f / (fanIn + fanOut)) in Rnd.UniformDM(m, n, -r, r) | InitTanh -> let r = sqrt (D 6.f / (fanIn + fanOut)) in Rnd.UniformDM(m, n, -r, r) | InitStandard -> let r = (D 1.f) / sqrt (float32 fanIn) in Rnd.UniformDM(m, n, -r, r) | InitCustom f -> DM.init m n (fun _ _ -> f fanIn fanOut) member i.InitDM(m:DM) = i.InitDM(m.Rows, m.Cols) /// Linear layer type Linear(inputs:int, outputs:int, initializer:Initializer) = inherit Layer() new(inputs, outputs) = Linear(inputs, outputs, Initializer.InitStandard) member val W = initializer.InitDM(outputs, inputs) with get, set member val b = DV.zeroCreate outputs with get, set override l.Init() = l.W <- initializer.InitDM(l.W) l.b <- DV.zeroCreate l.b.Length override l.Reset() = () override l.Run (x:DM) = (l.W * x) + l.b override l.Encode () = DV.append (DM.toDV l.W) l.b override l.EncodeLength = l.W.Length + l.b.Length override l.Decode w = let ww = w |> DV.split [l.W.Length; l.b.Length] |> Array.ofSeq l.W <- ww.[0] |> DM.ofDV l.W.Rows l.b <- ww.[1] override l.ToString() = "Hype.Neural.Linear\n" + " " + l.W.Cols.ToString() + " -> " + l.W.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " W : %i x %i\n" l.W.Rows l.W.Cols + sprintf " b : %i" l.b.Length override l.ToStringFull() = "Hype.Neural.Linear\n" + " " + l.W.Cols.ToString() + " -> " + l.W.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " W:\n%O\n" l.W + sprintf " b:\n%O" l.b override l.Visualize() = "Hype.Neural.Linear\n" + " " + l.W.Cols.ToString() + " -> " + l.W.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " W:\n%s\n" (l.W.Visualize()) + sprintf " b:\n%s" (l.b.Visualize()) member l.VisualizeWRowsAsImageGrid(imagerows:int) = "Hype.Neural.Linear\n" + " " + l.W.Cols.ToString() + " -> " + l.W.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " W's rows %s\n" (Util.VisualizeDMRowsAsImageGrid(l.W, imagerows)) + sprintf " b:\n%s" (l.b.Visualize()) /// Linear layer with no bias type LinearNoBias(inputs:int, outputs:int, initializer:Initializer) = inherit Layer() new(inputs, outputs) = LinearNoBias(inputs, outputs, Initializer.InitStandard) member val W = initializer.InitDM(outputs, inputs) with get, set override l.Init() = l.W <- initializer.InitDM(l.W) override l.Reset() = () override l.Run (x:DM) = l.W * x override l.Encode () = l.W |> DM.toDV override l.EncodeLength = l.W.Length override l.Decode w = l.W <- w |> DM.ofDV l.W.Rows override l.ToString() = "Hype.Neural.LinearNoBias\n" + " " + l.W.Cols.ToString() + " -> " + l.W.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " W : %i x %i" l.W.Rows l.W.Cols override l.ToStringFull() = "Hype.Neural.LinearNoBias\n" + " " + l.W.Cols.ToString() + " -> " + l.W.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " W:\n%O" l.W override l.Visualize() = "Hype.Neural.LinearNoBias\n" + " " + l.W.Cols.ToString() + " -> " + l.W.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " W:\n%s" (l.W.Visualize()) member l.VisualizeWRowsAsImageGrid(imagerows:int) = "Hype.Neural.LinearNoBias\n" + " " + l.W.Cols.ToString() + " -> " + l.W.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " W's rows %s" (Util.VisualizeDMRowsAsImageGrid(l.W, imagerows)) /// Activation layer with custom functions type Activation(f:DM->DM) = inherit Layer() let f = f override l.Init () = () override l.Reset () = () override l.Run (x:DM) = f x override l.Encode () = DV.empty override l.EncodeLength = 0 override l.Decode w = () override l.ToString() = sprintf "Hype.Neural.Activation" override l.ToStringFull() = l.ToString() override l.Visualize() = l.ToString() /// Feedforward sequence of layers type FeedForward() = inherit Layer() let mutable (layers:Layer[]) = Array.empty let mutable encodelength = 0 let update() = encodelength <- layers |> Array.map Layer.encodeLength |> Array.sum member n.Add(l) = layers <- Array.append layers [|l|] update() member n.Insert(i, l) = let a = ResizeArray(layers) a.Insert(i, l) layers <- a.ToArray() update() member n.Remove(i) = let a = ResizeArray(layers) a.RemoveAt(i) layers <- a.ToArray() update() member n.Add(f:DM->DM) = n.Add(Activation(f)) member n.Insert(i, f:DM->DM) = n.Insert(i, Activation(f)) member n.Length = layers.Length member n.Item with get i = layers.[i] override n.Init() = layers |> Array.iter Layer.init override n.Reset() = layers |> Array.iter Layer.reset override n.Run(x:DM) = Array.fold Layer.run x layers override n.Encode() = layers |> Array.map Layer.encode |> Array.reduce DV.append override n.EncodeLength = encodelength override n.Decode(w) = w |> DV.split (layers |> Array.map Layer.encodeLength) |> Seq.iter2 Layer.decode layers override n.ToString() = let s = System.Text.StringBuilder() if n.Length > 0 then s.Append(" ") |> ignore for i = 0 to layers.Length - 1 do s.Append("(" + i.ToString() + ") -> ") |> ignore s.Remove(s.Length - 4, 4) |> ignore s.Append("\n\n") |> ignore for i = 0 to layers.Length - 1 do s.Append(" (" + i.ToString() + "): " + layers.[i].ToString() + "\n\n") |> ignore "Hype.Neural.FeedForward\n" + sprintf " Learnable parameters: %i\n" encodelength + s.ToString() override n.ToStringFull() = let s = System.Text.StringBuilder() if n.Length > 0 then s.Append(" ") |> ignore for i = 0 to layers.Length - 1 do s.Append("(" + i.ToString() + ") -> ") |> ignore s.Remove(s.Length - 4, 4) |> ignore s.Append("\n\n") |> ignore for i = 0 to layers.Length - 1 do s.Append(" (" + i.ToString() + "): " + layers.[i].ToStringFull() + "\n\n") |> ignore "Hype.Neural.FeedForward\n" + sprintf " Learnable parameters: %i\n" encodelength + s.ToString() override n.Visualize() = let s = System.Text.StringBuilder() if n.Length > 0 then s.Append(" ") |> ignore for i = 0 to layers.Length - 1 do s.Append("(" + i.ToString() + ") -> ") |> ignore s.Remove(s.Length - 4, 4) |> ignore s.Append("\n\n") |> ignore for i = 0 to layers.Length - 1 do s.Append(" (" + i.ToString() + "): " + layers.[i].Visualize() + "\n\n") |> ignore "Hype.Neural.FeedForward\n" + sprintf " Learnable parameters: %i\n" encodelength + s.ToString() /// Vanilla RNN layer type Recurrent(inputs:int, hiddenunits:int, outputs:int, activation:DV->DV, initializer:Initializer) = inherit Layer() new(inputs, hiddenunits, outputs) = Recurrent(inputs, hiddenunits, outputs, tanh, Initializer.InitTanh) new(inputs, hiddenunits, outputs, activation) = Recurrent(inputs, hiddenunits, outputs, activation, Initializer.InitTanh) member val Act = activation with get member val Whh = initializer.InitDM(hiddenunits, hiddenunits) with get, set member val Wxh = initializer.InitDM(hiddenunits, inputs) with get, set member val Why = initializer.InitDM(outputs, hiddenunits) with get, set member val bh = DV.zeroCreate hiddenunits with get, set member val by = DV.zeroCreate outputs with get, set member val h = DV.zeroCreate hiddenunits with get, set override l.Init() = l.Whh <- initializer.InitDM(l.Whh) l.Wxh <- initializer.InitDM(l.Wxh) l.Why <- initializer.InitDM(l.Why) l.bh <- DV.zeroCreate hiddenunits l.by <- DV.zeroCreate outputs l.h <- DV.zeroCreate hiddenunits override l.Reset() = l.h <- DV.zeroCreate hiddenunits override l.Run (x:DM) = let y = x |> DM.mapCols (fun x -> l.h <- l.Act ((l.Whh * l.h) + (l.Wxh * x) + l.bh) (l.Why * l.h) + l.by) l.h <- primalDeep l.h y override l.Encode () = [l.Whh; l.Wxh; l.Why] |> List.map DM.toDV |> List.append [l.bh; l.by] |> Seq.fold DV.append DV.Zero override l.EncodeLength = l.Whh.Length + l.Wxh.Length + l.Why.Length + l.bh.Length + l.by.Length override l.Decode w = let ww = w |> DV.split [l.bh.Length; l.by.Length; l.Whh.Length; l.Wxh.Length; l.Why.Length] |> Array.ofSeq l.bh <- ww.[0] l.by <- ww.[1] l.Whh <- ww.[2] |> DM.ofDV l.Whh.Rows l.Wxh <- ww.[3] |> DM.ofDV l.Wxh.Rows l.Why <- ww.[4] |> DM.ofDV l.Why.Rows l.h <- DV.zeroCreate hiddenunits override l.ToString() = "Hype.Neural.Recurrent\n" + " " + l.Wxh.Cols.ToString() + " -> " + l.Whh.Rows.ToString() + " -> " + l.Why.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Whh : %i x %i\n" l.Whh.Rows l.Whh.Cols + sprintf " Wxh : %i x %i\n" l.Wxh.Rows l.Wxh.Cols + sprintf " Why : %i x %i\n" l.Why.Rows l.Why.Cols + sprintf " bh : %i\n" l.bh.Length + sprintf " by : %i" l.by.Length override l.ToStringFull() = "Hype.Neural.Recurrent\n" + " " + l.Wxh.Cols.ToString() + " -> " + l.Whh.Rows.ToString() + " -> " + l.Why.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Whh:\n%O\n" l.Whh + sprintf " Wxh:\n%O\n" l.Wxh + sprintf " Why:\n%O\n" l.Why + sprintf " bh:\n%O\n" l.bh + sprintf " by:\n%O" l.by override l.Visualize() = "Hype.Neural.Recurrent\n" + " " + l.Wxh.Cols.ToString() + " -> " + l.Whh.Rows.ToString() + " -> " + l.Why.Rows.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Whh:\n%s\n" (l.Whh.Visualize()) + sprintf " Wxh:\n%s\n" (l.Wxh.Visualize()) + sprintf " Why:\n%s\n" (l.Why.Visualize()) + sprintf " bh:\n%s\n" (l.bh.Visualize()) + sprintf " by:\n%s" (l.by.Visualize()) /// Long short-term memory layer type LSTM(inputs:int, memcells:int) = inherit Layer() let initializer = Initializer.InitTanh member val Wxi = initializer.InitDM(memcells, inputs) with get, set member val Whi = initializer.InitDM(memcells, memcells) with get, set member val Wxc = initializer.InitDM(memcells, inputs) with get, set member val Whc = initializer.InitDM(memcells, memcells) with get, set member val Wxf = initializer.InitDM(memcells, inputs) with get, set member val Whf = initializer.InitDM(memcells, memcells) with get, set member val Wxo = initializer.InitDM(memcells, inputs) with get, set member val Who = initializer.InitDM(memcells, memcells) with get, set member val bi = DV.zeroCreate memcells with get, set member val bc = DV.zeroCreate memcells with get, set member val bf = DV.zeroCreate memcells with get, set member val bo = DV.zeroCreate memcells with get, set member val c = DV.zeroCreate memcells with get, set member val h = DV.zeroCreate memcells with get, set override l.Init() = l.Wxi <- initializer.InitDM(l.Wxi) l.Whi <- initializer.InitDM(l.Whi) l.Wxc <- initializer.InitDM(l.Wxc) l.Whc <- initializer.InitDM(l.Whc) l.Wxf <- initializer.InitDM(l.Wxf) l.Whf <- initializer.InitDM(l.Whf) l.Wxo <- initializer.InitDM(l.Wxo) l.Who <- initializer.InitDM(l.Who) l.bi <- DV.zeroCreate memcells l.bc <- DV.zeroCreate memcells l.bf <- DV.zeroCreate memcells l.bo <- DV.zeroCreate memcells l.c <- DV.zeroCreate memcells l.h <- DV.zeroCreate memcells override l.Reset() = l.c <- DV.zeroCreate memcells l.h <- DV.zeroCreate memcells override l.Run (x:DM) = let y = x |> DM.mapCols (fun x -> let i = sigmoid((l.Wxi * x) + (l.Whi * l.h) + l.bi) let c' = tanh((l.Wxc * x) + (l.Whc * l.h) + l.bc) let f = sigmoid((l.Wxf * x) + (l.Whf * l.h) + l.bf) l.c <- (i .* c') + (f .* l.c) let o = sigmoid((l.Wxo * x) + (l.Who * l.h) + l.bo) l.h <- o .* tanh l.c l.h) l.h <- primalDeep l.h l.c <- primalDeep l.c y override l.Encode() = [l.Wxi; l.Whi; l.Wxc; l.Whc; l.Wxf; l.Whf; l.Wxo; l.Who] |> List.map DM.toDV |> List.append [l.bi; l.bc; l.bf; l.bo] |> Seq.fold DV.append DV.Zero override l.EncodeLength = l.Wxi.Length + l.Whi.Length + l.Wxc.Length + l.Whc.Length + l.Wxf.Length + l.Whf.Length + l.Wxo.Length + l.Who.Length + l.bi.Length + l.bc.Length + l.bf.Length + l.bo.Length override l.Decode w = let ww = w |> DV.split [l.bi.Length; l.bc.Length; l.bf.Length; l.bo.Length; l.Wxi.Length; l.Whi.Length; l.Wxc.Length; l.Whc.Length; l.Wxf.Length; l.Whf.Length; l.Wxo.Length; l.Who.Length] |> Array.ofSeq l.bi <- ww.[0] l.bc <- ww.[1] l.bf <- ww.[2] l.bo <- ww.[3] l.Wxi <- ww.[4] |> DM.ofDV l.Wxi.Rows l.Whi <- ww.[5] |> DM.ofDV l.Whi.Rows l.Wxc <- ww.[6] |> DM.ofDV l.Wxc.Rows l.Whc <- ww.[7] |> DM.ofDV l.Whc.Rows l.Wxf <- ww.[8] |> DM.ofDV l.Wxf.Rows l.Whf <- ww.[9] |> DM.ofDV l.Whf.Rows l.Wxo <- ww.[10] |> DM.ofDV l.Wxo.Rows l.Who <- ww.[11] |> DM.ofDV l.Who.Rows l.c <- DV.zeroCreate memcells l.h <- DV.zeroCreate memcells override l.ToString() = "Hype.Neural.LSTM\n" + " " + inputs.ToString() + " -> " + memcells.ToString() + " -> " + memcells.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Wxi : %i x %i\n" l.Wxi.Rows l.Wxi.Cols + sprintf " Whi : %i x %i\n" l.Whi.Rows l.Whi.Cols + sprintf " Wxc : %i x %i\n" l.Wxc.Rows l.Wxc.Cols + sprintf " Whc : %i x %i\n" l.Whc.Rows l.Whc.Cols + sprintf " Wxf : %i x %i\n" l.Wxf.Rows l.Wxf.Cols + sprintf " Whf : %i x %i\n" l.Whf.Rows l.Whf.Cols + sprintf " Wxo : %i x %i\n" l.Wxo.Rows l.Wxo.Cols + sprintf " Who : %i x %i\n" l.Who.Rows l.Who.Cols + sprintf " bi : %i\n" l.bi.Length + sprintf " bc : %i\n" l.bc.Length + sprintf " bf : %i\n" l.bf.Length + sprintf " bo : %i" l.bo.Length override l.ToStringFull() = "Hype.Neural.LSTM\n" + " " + inputs.ToString() + " -> " + memcells.ToString() + " -> " + memcells.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Wxi:\n%O\n" l.Wxi + sprintf " Whi:\n%O\n" l.Whi + sprintf " Wxc:\n%O\n" l.Wxc + sprintf " Whc:\n%O\n" l.Whc + sprintf " Wxf:\n%O\n" l.Wxf + sprintf " Whf:\n%O\n" l.Whf + sprintf " Wxo:\n%O\n" l.Wxo + sprintf " Who:\n%O\n" l.Who + sprintf " bi:\n%O\n" l.bi + sprintf " bc:\n%O\n" l.bc + sprintf " bf:\n%O\n" l.bf + sprintf " bo:\n%O" l.bo override l.Visualize() = "Hype.Neural.LSTM\n" + " " + inputs.ToString() + " -> " + memcells.ToString() + " -> " + memcells.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Wxi:\n%s\n" (l.Wxi.Visualize()) + sprintf " Whi:\n%s\n" (l.Whi.Visualize()) + sprintf " Wxc:\n%s\n" (l.Wxc.Visualize()) + sprintf " Whc:\n%s\n" (l.Whc.Visualize()) + sprintf " Wxf:\n%s\n" (l.Wxf.Visualize()) + sprintf " Whf:\n%s\n" (l.Whf.Visualize()) + sprintf " Wxo:\n%s\n" (l.Wxo.Visualize()) + sprintf " Who:\n%s\n" (l.Who.Visualize()) + sprintf " bi:\n%s\n" (l.bi.Visualize()) + sprintf " bc:\n%s\n" (l.bc.Visualize()) + sprintf " bf:\n%s\n" (l.bf.Visualize()) + sprintf " bo:\n%s" (l.bo.Visualize()) /// Gated recurrent unit layer type GRU(inputs:int, memcells:int) = inherit Layer() let initializer = Initializer.InitStandard member val Wxz = initializer.InitDM(memcells, inputs) with get, set member val Whz = initializer.InitDM(memcells, memcells) with get, set member val Wxr = initializer.InitDM(memcells, inputs) with get, set member val Whr = initializer.InitDM(memcells, memcells) with get, set member val Wxh = initializer.InitDM(memcells, inputs) with get, set member val Whh = initializer.InitDM(memcells, memcells) with get, set member val bz = DV.zeroCreate memcells with get, set member val br = DV.zeroCreate memcells with get, set member val bh = DV.zeroCreate memcells with get, set member val h = DV.zeroCreate memcells with get, set override l.Init() = l.Wxz <- initializer.InitDM(l.Wxz) l.Whz <- initializer.InitDM(l.Whz) l.Wxr <- initializer.InitDM(l.Wxr) l.Whr <- initializer.InitDM(l.Whr) l.Wxh <- initializer.InitDM(l.Wxh) l.Whh <- initializer.InitDM(l.Whh) l.bz <- DV.zeroCreate memcells l.br <- DV.zeroCreate memcells l.bh <- DV.zeroCreate memcells l.h <- DV.zeroCreate memcells override l.Reset() = l.h <- DV.zeroCreate memcells override l.Run(x:DM) = let y = x |> DM.mapCols (fun x -> let z = sigmoid(l.Wxz * x + l.Whz * l.h + l.bz) let r = sigmoid(l.Wxr * x + l.Whr * l.h + l.br) let h' = tanh(l.Wxh * x + l.Whh * (l.h .* r)) l.h <- (1.f - z) .* h' + z .* l.h l.h) l.h <- primalDeep l.h y override l.Encode() = [l.Wxz; l.Whz; l.Wxr; l.Whr; l.Wxh; l.Whh] |> List.map DM.toDV |> List.append [l.bz; l.br; l.bh] |> Seq.fold DV.append DV.Zero override l.EncodeLength = l.Wxz.Length + l.Whz.Length + l.Wxr.Length + l.Whr.Length + l.Wxh.Length + l.Whh.Length + l.bz.Length + l.br.Length + l.bh.Length override l.Decode w = let ww = w |> DV.split [l.bz.Length; l.br.Length; l.bh.Length; l.Wxz.Length; l.Whz.Length; l.Wxr.Length; l.Whr.Length; l.Wxh.Length; l.Whh.Length] |> Array.ofSeq l.bz <- ww.[0] l.br <- ww.[1] l.bh <- ww.[2] l.Wxz <- ww.[3] |> DM.ofDV l.Wxh.Rows l.Whz <- ww.[4] |> DM.ofDV l.Whz.Rows l.Wxr <- ww.[5] |> DM.ofDV l.Wxr.Rows l.Whr <- ww.[6] |> DM.ofDV l.Whr.Rows l.Wxh <- ww.[7] |> DM.ofDV l.Wxh.Rows l.Whh <- ww.[8] |> DM.ofDV l.Whh.Rows override l.ToString() = "Hype.Neural.GRU\n" + " " + inputs.ToString() + " -> " + memcells.ToString() + " -> " + memcells.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Wxz : %i x %i\n" l.Wxz.Rows l.Wxz.Cols + sprintf " Whz : %i x %i\n" l.Whz.Rows l.Whz.Cols + sprintf " Wxr : %i x %i\n" l.Wxr.Rows l.Wxr.Cols + sprintf " Whr : %i x %i\n" l.Whr.Rows l.Whr.Cols + sprintf " Wxh : %i x %i\n" l.Wxh.Rows l.Wxh.Cols + sprintf " Whh : %i x %i\n" l.Whh.Rows l.Whh.Cols + sprintf " bz : %i\n" l.bz.Length + sprintf " br : %i\n" l.br.Length + sprintf " bh : %i\n" l.bh.Length override l.ToStringFull() = "Hype.Neural.GRU\n" + " " + inputs.ToString() + " -> " + memcells.ToString() + " -> " + memcells.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Wxz:\n%O\n" l.Wxz + sprintf " Whz:\n%O\n" l.Whz + sprintf " Wxr:\n%O\n" l.Wxr + sprintf " Whr:\n%O\n" l.Whr + sprintf " Wxh:\n%O\n" l.Wxh + sprintf " Whh:\n%O\n" l.Whh + sprintf " bz:\n%O\n" l.bz + sprintf " br:\n%O\n" l.br + sprintf " bh:\n%O\n" l.bh override l.Visualize() = "Hype.Neural.GRU\n" + " " + inputs.ToString() + " -> " + memcells.ToString() + " -> " + memcells.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Wxz:\n%s\n" (l.Wxz.Visualize()) + sprintf " Whz:\n%s\n" (l.Whz.Visualize()) + sprintf " Wxr:\n%s\n" (l.Wxr.Visualize()) + sprintf " Whr:\n%s\n" (l.Whr.Visualize()) + sprintf " Wxh:\n%s\n" (l.Wxh.Visualize()) + sprintf " Whh:\n%s\n" (l.Whh.Visualize()) + sprintf " bz:\n%s\n" (l.bz.Visualize()) + sprintf " br:\n%s\n" (l.br.Visualize()) + sprintf " bh:\n%s\n" (l.bh.Visualize()) /// Long short-term memory layer (alternative implementation) type LSTMAlt(inputs:int, memcells:int) = inherit Layer() let initializer = Initializer.InitTanh member val Wxh = initializer.InitDM(4 * memcells, inputs) with get, set member val Whh = initializer.InitDM(4 * memcells, memcells) with get, set member val b = DV.zeroCreate (4 * memcells) with get, set member val c = DV.zeroCreate memcells with get, set member val h = DV.zeroCreate memcells with get, set override l.Init() = l.Wxh <- initializer.InitDM(l.Wxh) l.Whh <- initializer.InitDM(l.Whh) l.b <- DV.zeroCreate (4 * memcells) l.c <- DV.zeroCreate memcells l.h <- DV.zeroCreate memcells override l.Reset() = l.c <- DV.zeroCreate memcells l.h <- DV.zeroCreate memcells override l.Run(x:DM) = let y = x |> DM.mapCols (fun x -> let x2h = l.Wxh * x let h2h = l.Whh * l.h let pre = x2h + h2h + l.b let pretan = tanh pre.[..memcells - 1] let presig = sigmoid pre.[memcells..] let c' = pretan let i = presig.[..memcells - 1] let f = presig.[memcells..(2 * memcells) - 1] let o = presig.[(2 * memcells)..] l.c <- (i .* c') + (f .* l.c) l.h <- o .* tanh l.c l.h) l.h <- primalDeep l.h l.c <- primalDeep l.c y override l.Encode() = [l.Wxh |> DM.toDV; l.Whh |> DM.toDV; l.b] |> Seq.fold DV.append DV.Zero override l.EncodeLength = l.Wxh.Length + l.Whh.Length + l.b.Length override l.Decode w = let ww = w |> DV.split [l.Wxh.Length; l.Whh.Length; l.b.Length] |> Array.ofSeq l.Wxh <- ww.[0] |> DM.ofDV l.Wxh.Rows l.Whh <- ww.[1] |> DM.ofDV l.Whh.Rows l.b <- ww.[2] override l.ToString() = "Hype.Neural.LSTMAlt\n" + " " + inputs.ToString() + " -> " + memcells.ToString() + " -> " + memcells.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Wxh : %i x %i\n" l.Wxh.Rows l.Wxh.Cols + sprintf " Whh : %i x %i\n" l.Whh.Rows l.Whh.Cols + sprintf " b : %i\n" l.b.Length override l.ToStringFull() = "Hype.Neural.LSTMAlt\n" + " " + inputs.ToString() + " -> " + memcells.ToString() + " -> " + memcells.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Wxh:\n%O\n" l.Wxh + sprintf " Whh:\n%O\n" l.Whh + sprintf " b:\n%O\n" l.b override l.Visualize() = "Hype.Neural.LSTMAlt\n" + " " + inputs.ToString() + " -> " + memcells.ToString() + " -> " + memcells.ToString() + "\n" + sprintf " Learnable parameters: %i\n" l.EncodeLength + sprintf " Init: %O\n" initializer + sprintf " Wxh:\n%s\n" (l.Wxh.Visualize()) + sprintf " Whh:\n%s\n" (l.Whh.Visualize()) + sprintf " b:\n%s\n" (l.b.Visualize()) ================================================ FILE: src/Hype/Optimize.fs ================================================ // // This file is part of // Hype: Compositional Machine Learning and Hyperparameter Optimization // // Copyright (c) 2015, National University of Ireland Maynooth (Atilim Gunes Baydin, Barak A. Pearlmutter) // // Hype is released under the MIT license. // (See accompanying LICENSE file.) // // Written by: // // Atilim Gunes Baydin // atilimgunes.baydin@nuim.ie // // Barak A. Pearlmutter // barak@cs.nuim.ie // // Brain and Computation Lab // Hamilton Institute & Department of Computer Science // National University of Ireland Maynooth // Maynooth, Co. Kildare // Ireland // // www.bcl.hamilton.ie // /// Optimization namespace namespace Hype open Hype open DiffSharp.AD.Float32 open DiffSharp.Util /// Learning rate schemes type LearningRate = /// Constant | Constant of D /// 1 / t decay, a = a0 / (1 + kt). Initial value, decay rate | Decay of D * D /// Exponential decay, a = a0 * Exp(-kt). Initial value, decay rate | ExpDecay of D * D /// Scheduled learning rate vector, its length overrides Params.Epochs | Schedule of DV /// Backtracking line search. Initial value, c, rho | Backtrack of D * D * D /// Strong Wolfe line search. lmax, c1, c2 | StrongWolfe of D * D * D /// Adagrad. Initial value | AdaGrad of D /// RMSProp. Initial value, decay rate | RMSProp of D * D static member DefaultConstant = Constant (D 0.001f) static member DefaultDecay = Decay (D 1.f, D 0.1f) static member DefaultExpDecay = ExpDecay (D 1.f, D 0.1f) static member DefaultBacktrack = Backtrack (D 1.f, D 0.0001f, D 0.5f) static member DefaultStrongWolfe = StrongWolfe (D 1.f, D 0.0001f, D 0.5f) static member DefaultAdaGrad = AdaGrad (D 0.001f) static member DefaultRMSProp = RMSProp (D 0.001f, D 0.9f) override l.ToString() = match l with | Constant a -> sprintf "Constant a = %A" a | Decay (a0, k) -> sprintf "1/t decay a0 = %A, k = %A" a0 k | ExpDecay (a0, k) -> sprintf "Exponential decay a = %A, k = %A" a0 k | Schedule a -> sprintf "Scheduled of length %A" a.Length | Backtrack (a0, c, r) -> sprintf "Backtracking a0 = %A, c = %A, r = %A" a0 c r | StrongWolfe (amax, c1, c2) -> sprintf "Strong Wolfe amax = %A, c1 = %A, c2 = %A" amax c1 c2 | AdaGrad (a0) -> sprintf "AdaGrad a0 = %A" a0 | RMSProp (a0, k) -> sprintf "RMSProp a0 = %A, k = %A" a0 k member l.Func = let loopLimit = 500 match l with | Constant a -> fun _ _ _ _ _ _ _ -> box a | Decay (a0, k) -> fun i _ _ _ _ _ _ -> box (a0 / (1.f + k * i)) | ExpDecay (a0, k) -> fun i _ _ _ _ _ _ -> box (a0 * exp (-k * i)) | Schedule a -> fun i _ _ _ _ _ _ -> box a.[i % a.Length] | Backtrack (a0, c, r) -> fun _ w f v g _ p -> let mutable a = a0 let mutable i = 0 let mutable found = false while not found do if f (w + a * p) < v + c * a * (p * g) then found <- true else a <- r * a i <- i + 1 if i > loopLimit then found <- true Util.printLog "*** BACKTRACKING DID NOT CONVERGE ***" box a | StrongWolfe (amax, c1, c2) -> fun _ w f v g _ p -> let v0 = v let gp0 = g * p let inline zoom a1 a2 = let mutable al = a1 let mutable ah = a2 let mutable a' = a1 let mutable v'al = f (w + al * p) let mutable i = 0 let mutable found = false while not found do a' <- (al + ah) / D 2.f let v', gg = grad' f (w + a' * p) if (v' > v0 + c1 * a' * gp0) || (v' >= v'al) then ah <- a' else let gp' = gg * p if abs gp' <= -c2 * gp0 then found <- true elif gp' * (ah - al) >= D 0.f then ah <- al al <- a' v'al <- v' i <- i + 1 if i > loopLimit then found <- true Util.printLog "*** STRONG WOLFE (ZOOM) DID NOT CONVERGE ***" a' let mutable v = v0 let mutable v' = v0 let mutable gp' = gp0 let mutable a = D 0.f let mutable a' = Rnd.UniformD(amax) let mutable a'' = a' let mutable i = 1 let mutable found = false while not found do let vv, gg = grad' f (w + a' * p) v' <- vv gp' <- gg * p if (v' > v0 + c1 * a' * gp0) || ((i > 1) && (v' >= v)) then a'' <- zoom a a' found <- true elif (abs gp') <= (-c2 * gp0) then a'' <- a' found <- true elif gp' >= D 0.f then a'' <- zoom a' a found <- true else a <- a' v <- v' a' <- Rnd.UniformD(a', amax) i <- i + 1 if i > loopLimit then found <- true Util.printLog "*** STRONG WOLFE DID NOT CONVERGE ***" box a'' | AdaGrad (a0) -> fun _ _ _ _ g (gcache:DV ref) _ -> gcache := !gcache + (g .* g) box (a0 / sqrt (!gcache + 1e-8f)) | RMSProp (a0, k) -> fun _ _ _ _ g (gcache:DV ref) _ -> gcache := (k * !gcache) + (1.f - k) * (g .* g) box (a0 / sqrt (!gcache + 1e-6f)) /// Training batch configuration type Batch = | Full /// Minibatch of given size | Minibatch of int /// Minibatch with size 1, SGD | Stochastic override b.ToString() = match b with | Full -> "Full" | Minibatch n -> sprintf "Minibatches of %A" n | Stochastic -> "Stochastic (minibatch of 1)" member b.Func = match b with | Full -> fun (d:Dataset) _ -> d | Minibatch n -> fun d i -> d.[(n * i)..((n * i) + n - 1)] | Stochastic -> fun d i -> d.[i..i] /// Gradient-based optimization methods type Method = /// Gradient descent | GD /// Conjugate gradient | CG /// Conjugate descent | CD /// Nonlinear conjugate gradient | NonlinearCG /// Dai & Yuan conjugate gradient | DaiYuanCG /// Newton conjugate gradient | NewtonCG /// Exact Newton | Newton override o.ToString() = match o with | GD -> "Gradient descent" | CG -> "Conjugate gradient" | CD -> "Conjugate descent" | DaiYuanCG -> "Dai & Yuan conjugate gradient" | NonlinearCG -> "Nonlinear conjugate gradient" | NewtonCG -> "Newton conjugate gradient" | Newton -> "Exact Newton" member o.Func = match o with | GD -> fun w (f:DV->D) _ _ gradclip -> let v', g' = grad' f w let g' = gradclip g' let p' = -g' v', g', p' /// Hestenes and Stiefel 1952 | CG -> fun w f g p gradclip -> let v', g' = grad' f w let g' = gradclip g' let y = g' - g let b = (g' * y) / (p * y) let p' = -g' + b * p v', g', p' /// Fletcher 1987 | CD -> fun w f g p gradclip -> let v', g' = grad' f w let g' = gradclip g' let b = (DV.normSq g') / (-p * g) let p' = -g' + b * p v', g', p' /// Dai and Yuan 1999 | DaiYuanCG -> fun w f g p gradclip -> let v', g' = grad' f w let g' = gradclip g' let y = g' - g let b = (DV.normSq g') / (p * y) let p' = -g' + b * p v', g', p' /// Fletcher and Reeves 1964 | NonlinearCG -> fun w f g p gradclip -> let v', g' = grad' f w let g' = gradclip g' let b = (DV.normSq g') / (DV.normSq g) let p' = -g' + b * p v', g', p' | NewtonCG -> fun w f _ p gradclip -> let v', g' = grad' f w let g' = gradclip g' let hv = hessianv f w p let b = (g' * hv) / (p * hv) let p' = -g' + b * p v', g', p' | Newton -> fun w f _ _ gradclip -> let v', g', h' = gradhessian' f w let g' = gradclip g' let p' = -DM.solveSymmetric h' g' v', g', p' /// Momentum configuration type Momentum = /// Default momentum | Momentum of D /// Nesterov momentum | Nesterov of D | NoMomentum static member DefaultMomentum = Momentum (D 0.9f) static member DefaultNesterov = Nesterov (D 0.9f) override m.ToString() = match m with | Momentum m -> sprintf "Standard %A" m | Nesterov m -> sprintf "Nesterov %A" m | NoMomentum -> "None" member m.Func = match m with | Momentum m -> fun (u:DV) (u':DV) -> (m * u) + u' | Nesterov m -> fun u u' -> (m * m * u) + (m + D 1.f) * u' | NoMomentum -> fun _ u' -> u' /// Loss function configuration type Loss = /// L1 norm, least absolute deviations | L1Loss /// L2 norm | L2Loss /// L2 norm squared, least squares | Quadratic /// Cross entropy after linear layer | CrossEntropyOnLinear /// Cross entropy after softmax layer | CrossEntropyOnSoftmax override l.ToString() = match l with | L1Loss -> "L1 norm, least absolute deviations" | L2Loss -> "L2 norm" | Quadratic -> "L2 norm squared, least squares" | CrossEntropyOnLinear -> "Cross entropy after linear layer" | CrossEntropyOnSoftmax -> "Cross entropy after softmax layer" member l.Func = match l with | L1Loss -> fun (d:Dataset) (f:DM->DM) -> ((d.Y - (f d.X)) |> DM.toCols |> Seq.sumBy DV.l1norm) / d.Length | L2Loss -> fun d f -> ((d.Y - (f d.X)) |> DM.toCols |> Seq.sumBy DV.l2norm) / d.Length | Quadratic -> fun d f -> ((d.Y - (f d.X)) |> DM.toCols |> Seq.sumBy DV.l2normSq) / d.Length | CrossEntropyOnLinear -> fun d f -> ((f d.X) |> DM.mapiCols (fun i v -> toDV [(logsumexp v) - v.[d.Yi.[i]]]) |> DM.sum) / d.Length | CrossEntropyOnSoftmax -> fun d f -> -((f d.X) |> DM.mapiCols (fun i v -> toDV [(DV.standardBasis v.Length d.Yi.[i]) * log v]) |> DM.sum) / d.Length /// Regularization configuration type Regularization = /// L1 regularization | L1Reg of D /// L2 regularization | L2Reg of D | NoReg static member DefaultL1Reg = L1Reg (D 0.0001f) static member DefaultL2Reg = L2Reg (D 0.0001f) override r.ToString() = match r with | L1Reg l -> sprintf "L1 lambda = %A" l | L2Reg l -> sprintf "L2 lambda = %A" l | NoReg -> "None" member r.Func = match r with | L1Reg l -> fun (w:DV) -> l * (DV.l1norm w) | L2Reg l -> fun w -> l * (DV.l2normSq w) | NoReg -> fun w -> D 0.f /// Gradient clipping configuration type GradientClipping = /// Norm clipping | NormClip of D | NoClip static member DefaultNormClip = NormClip (D 1.f) override g.ToString() = match g with | NormClip threshold -> sprintf "Norm clipping threshold = %A" threshold | NoClip -> "None" member g.Func = match g with | NormClip threshold -> fun (g:DV) -> let ng = DV.norm g in if ng > threshold then (threshold / ng) * g else g | NoClip -> id /// Early stopping configuration type EarlyStopping = /// Stagnation patience, overfitting patience | Early of int * int | NoEarly static member DefaultEarly = Early (750, 10) override e.ToString() = match e with | Early(s, o) -> sprintf "Stagnation thresh. = %A, overfit. thresh. = %A" s o | NoEarly -> "None" /// Record type holding optimization or training parameters type Params = {Epochs : int Method: Method LearningRate : LearningRate Momentum : Momentum Loss : Loss Regularization : Regularization GradientClipping : GradientClipping Batch : Batch EarlyStopping : EarlyStopping ImprovementThreshold : D Silent : bool ReturnBest : bool ValidationInterval : int LoggingFunction : int->DV->D->unit} [] module Params = let Default = {Epochs = 100 LearningRate = LearningRate.DefaultRMSProp Momentum = NoMomentum Loss = L2Loss Regularization = Regularization.DefaultL2Reg GradientClipping = NoClip Method = GD Batch = Full EarlyStopping = NoEarly ImprovementThreshold = D 0.995f Silent = false ReturnBest = true ValidationInterval = 10 LoggingFunction = fun _ _ _ -> ()} /// Main optimization module type Optimize = /// Minimize vector-to-scalar function `f`, starting from initial parameter vector `w0`. Uses the default optimization configuration in `Params.Default`. static member Minimize (f:DV->D, w0:DV) = Optimize.Minimize(f, w0, Params.Default) /// Minimize vector-to-scalar function `f`, starting from initial parameter vector `w0`. Uses the optimization configuration given in `par`. static member Minimize (f:DV->D, w0:DV, par:Params) = let dir = par.Method.Func let lr = par.LearningRate.Func let gradclip = par.GradientClipping.Func let mom = par.Momentum.Func let iters = match par.LearningRate with | Schedule a -> a.Length | _ -> par.Epochs if not par.Silent then Util.printLog "--- Minimization started" Util.printLog (sprintf "Parameters : %A" w0.Length) Util.printLog (sprintf "Iterations : %A" iters) Util.printLog (sprintf "Valid. interval: %i" par.ValidationInterval) Util.printLog (sprintf "Method : %O" par.Method) Util.printLog (sprintf "Learning rate : %O" par.LearningRate) Util.printLog (sprintf "Momentum : %O" par.Momentum) Util.printLog (sprintf "Gradient clip. : %O" par.GradientClipping) Util.printLog (sprintf "Early stopping : %O" par.EarlyStopping) Util.printLog (sprintf "Improv. thresh.: %A" par.ImprovementThreshold) Util.printLog (sprintf "Return best : %A" par.ReturnBest) let mutable i = 0 let mutable w = w0 let l, g = grad' f w0 let mutable l = l let mutable l' = l let mutable g = g let mutable p = -g let mutable u = DV.ZeroN g.Length let gcache = ref DV.Zero let l0 = l let mutable wbest = w0 let mutable lbest = l0 let mutable repllast = l0 let mutable replbest = l0 let mutable replbestchar = " " let mutable whist = [] let mutable lhist = [] let ldiffchar l = if l < D 0.f then "↓" elif l > D 0.f then "↑" else "-" let ichars = iters.ToString().Length let mutable stagnation = -par.ValidationInterval let mutable earlystop = false let isNice (v:D) = let vf = float32 v if System.Single.IsNaN(vf) then false elif System.Single.IsInfinity(vf) then false elif System.Single.IsNegativeInfinity(vf) then false elif System.Single.IsPositiveInfinity(vf) then false else true let mutable diverged = false let start = System.DateTime.Now while (i < iters) && (not earlystop) do let l'', g', p' = dir w f g p gradclip l' <- l'' if (not (isNice l')) then if not par.Silent then Util.printLog "*** MINIMIZATION DIVERGED: Function value is out of bounds ***" earlystop <- true diverged <- true if (l' < par.ImprovementThreshold * lbest) && (not diverged) then wbest <- w lbest <- l' whist <- [w] @ whist lhist <- [l] @ lhist if i % par.ValidationInterval = 0 then let repldiff = l' - repllast if l' < par.ImprovementThreshold * replbest then replbest <- l' replbestchar <- "▼" stagnation <- 0 else replbestchar <- " " stagnation <- stagnation + par.ValidationInterval match par.EarlyStopping with | Early(s, _) -> if stagnation >= s then if not par.Silent then Util.printLog "*** EARLY STOPPING TRIGGERED: Stagnation ***" earlystop <- true | _ -> () if not par.Silent then match par.EarlyStopping with | Early(s, _) -> Util.printLog (sprintf "%*i/%i | %O [%s%s] | Stag:%*i" ichars (i + 1) iters l' (ldiffchar repldiff) replbestchar (s.ToString().Length) stagnation) | _ -> Util.printLog (sprintf "%*i/%i | %O [%s%s]" ichars (i + 1) iters l' (ldiffchar repldiff) replbestchar) repllast <- l' par.LoggingFunction i w l' let mutable u' = DV.Zero match lr i w f l' g' gcache p' with | :? D as a -> u' <- a * p'; // A scalar learning rate | :? DV as a -> u' <- a .* p'; // Vector of independent learning rates u' <- mom u u' w <- w + u' l <- l' g <- g' p <- p' // Or, p <- u' u <- u' i <- i + 1 if not diverged then let l'', _, _ = dir w f g p gradclip l' <- l'' if l' < par.ImprovementThreshold * lbest then wbest <- w lbest <- l' let duration = System.DateTime.Now.Subtract(start) let wfinal = if par.ReturnBest || diverged then wbest else w let lfinal = if par.ReturnBest || diverged then lbest else l' let lchg = (lfinal - l0) let lchgs = lchg / (float32 duration.TotalSeconds) let es = (float i) / (duration.TotalSeconds) let em = (float i) / (duration.TotalMinutes) if not par.Silent then Util.printLog (sprintf "Duration : %A" duration) Util.printLog (sprintf "Value initial : %O" (primal l0)) Util.printLog (sprintf "Value final : %O %s" (primal lfinal) (if par.ReturnBest then "(Best)" else "(Last)")) Util.printLog (sprintf "Value change : %O (%.2f %%)" (primal lchg) (float32 (100 * lchg /l0))) Util.printLog (sprintf "Value chg. / s : %O" (primal lchgs)) Util.printLog (sprintf "Iter. / s : %A" es) Util.printLog (sprintf "Iter. / min : %A" em) Util.printLog "--- Minimization finished" wfinal, lfinal, (whist |> List.rev |> List.toArray), (lhist |> List.rev |> List.toArray) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the default optimization configuration in `Params.Default` static member Train (f:DV->DV->D, w0:DV, d:Dataset) = Optimize.Train((fun w v -> toDV [f w v]), w0, d) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the optimization configuration given in `par`. static member Train (f:DV->DV->D, w0:DV, d:Dataset, par:Params) = Optimize.Train((fun w v -> toDV [f w v]), w0, d, par) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d` and also monitoring the loss for the validation data given in dataset `v`. Uses the default optimization configuration in `Params.Default` static member Train (f:DV->DV->D, w0:DV, d:Dataset, v:Dataset) = Optimize.Train((fun w v -> toDV [f w v]), w0, d, v) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d` and also monitoring the loss for the validation data given in dataset `v`. Uses the optimization configuration given in `par`. static member Train (f:DV->DV->D, w0:DV, d:Dataset, v:Dataset, par:Params) = Optimize.Train((fun w v -> toDV [f w v]), w0, d, v, par) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the default optimization configuration in `Params.Default`. static member Train (f:DV->DV->DV, w0:DV, d:Dataset) = Optimize.Train(f, w0, d, Dataset.empty, Params.Default) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the optimization configuration given in `par`. static member Train (f:DV->DV->DV, w0:DV, d:Dataset, par:Params) = Optimize.Train(f, w0, d, Dataset.empty, par) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`, and also monitoring the loss for the validation data given in dataset `v`. Uses the default optimization configuration in `Params.Default`. static member Train (f:DV->DV->DV, w0:DV, d:Dataset, v:Dataset) = Optimize.Train(f, w0, d, v, Params.Default) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`, and also monitoring the loss for the validation data given in dataset `v`. Uses the optimization configuration given in `par`. static member Train (f:DV->DV->DV, w0:DV, d:Dataset, v:Dataset, par:Params) = Optimize.Train (f >> DM.mapCols, w0, d, v, par) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the default optimization configuration in `Params.Default`. static member Train (f:DV->DM->DM, w0:DV, d:Dataset) = Optimize.Train(f, w0, d, Dataset.empty, Params.Default) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`. Uses the optimization configuration given in `par`. static member Train (f:DV->DM->DM, w0:DV, d:Dataset, par:Params) = Optimize.Train(f, w0, d, Dataset.empty, par) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`, and also monitoring the loss for the validation data given in dataset `v`. Uses the default optimization configuration in `Params.Default`. static member Train (f:DV->DM->DM, w0:DV, d:Dataset, v:Dataset) = Optimize.Train(f, w0, d, v, Params.Default) /// Train model function `f`, starting from initial parameter vector `w0`, by computing the loss for the training data given in dataset `d`, and also monitoring the loss for the validation data given in dataset `v`. Uses the optimization configuration given in `par`. static member Train (f:DV->DM->DM, w0:DV, d:Dataset, v:Dataset, par:Params) = let b = par.Batch.Func let dir = par.Method.Func let lr = par.LearningRate.Func let gradclip = par.GradientClipping.Func let mom = par.Momentum.Func let reg = par.Regularization.Func let epochs = match par.LearningRate with | Schedule l -> l.Length | _ -> par.Epochs let loss = par.Loss.Func let batches, batchsize = match par.Batch with | Full -> 1, d.Length | Minibatch n -> d.Length / n, n | Stochastic -> d.Length, 1 let iters = epochs * batches if not par.Silent then Util.printLog "--- Training started" Util.printLog (sprintf "Parameters : %A" w0.Length) Util.printLog (sprintf "Iterations : %A" iters) Util.printLog (sprintf "Epochs : %A" epochs) Util.printLog (sprintf "Batches : %O (%A per epoch)" par.Batch batches) Util.printLog (sprintf "Training data : %i" d.Length) if Dataset.isEmpty v then Util.printLog (sprintf "Validation data: None") else Util.printLog (sprintf "Validation data: %i" v.Length) Util.printLog (sprintf "Valid. interval: %i" par.ValidationInterval) Util.printLog (sprintf "Method : %O" par.Method) Util.printLog (sprintf "Learning rate : %O" par.LearningRate) Util.printLog (sprintf "Momentum : %O" par.Momentum) Util.printLog (sprintf "Loss : %O" par.Loss) Util.printLog (sprintf "Regularizer : %O" par.Regularization) Util.printLog (sprintf "Gradient clip. : %O" par.GradientClipping) Util.printLog (sprintf "Early stopping : %O" par.EarlyStopping) Util.printLog (sprintf "Improv. thresh.: %A" par.ImprovementThreshold) Util.printLog (sprintf "Return best : %A" par.ReturnBest) let q i w = (loss (b d i) (f w)) + reg w let qvalid = if Dataset.isEmpty v then fun _ -> D 0.f else fun w -> (loss v (f w)) + reg w // i : epoch // w : previous weights // w' : new weights // l : previous loss // l' : new loss // g : previous gradient // g' : next gradient // p : previous direction // p' : next direction // u : previous velocity // u' : next velocity let mutable epoch = 0 let mutable batch = 0 let mutable w = w0 let l, g = grad' (q 0) w0 let mutable l = l let mutable l' = l let mutable g = g let mutable p = -g let mutable u = DV.ZeroN g.Length let gcache = ref DV.Zero let l0 = l let mutable wbest = w0 let mutable lbest = l0 let mutable repllast= l0 let mutable replbest = l0 let mutable replbestchar = " " let mutable repvllast = if Dataset.isEmpty v then D 0.f else qvalid w0 let mutable repvlbest = repvllast let mutable repvlbestchar = " " let ldiffchar l = if l < D 0.f then "↓" elif l > D 0.f then "↑" else "-" let mutable whist = [] let mutable lhist = [] let mutable stagnation = -par.ValidationInterval let mutable overfitting = 0 let mutable validlimproved = false let mutable earlystop = false let echars = epochs.ToString().Length let bchars = batches.ToString().Length let ichars = (epochs * d.Length).ToString().Length let isNice (v:D) = let vf = float32 v if System.Single.IsNaN(vf) then false elif System.Single.IsInfinity(vf) then false elif System.Single.IsNegativeInfinity(vf) then false elif System.Single.IsPositiveInfinity(vf) then false else true let mutable diverged = false let start = System.DateTime.Now while (epoch < epochs) && (not earlystop) do batch <- 0 while (batch < batches) && (not earlystop) do let l'', g', p' = dir w (q batch) g p gradclip l' <- l'' if (not (isNice l')) then if not par.Silent then Util.printLog "*** TRAINING DIVERGED: Loss is out of bounds ***" earlystop <- true diverged <- true whist <- w :: whist lhist <- l :: lhist if (l' < par.ImprovementThreshold * lbest) && (not diverged) then wbest <- w lbest <- l' if not (Dataset.isEmpty v) then if not validlimproved then overfitting <- overfitting + 1 match par.EarlyStopping with | Early(_, o) -> if overfitting >= o then if not par.Silent then Util.printLog "*** EARLY STOPPING TRIGGERED: Overfitting ***" earlystop <- true | _ -> () if batch % par.ValidationInterval = 0 then let repldiff = l' - repllast if l' < par.ImprovementThreshold * replbest then replbest <- l' replbestchar <- "▼" else replbestchar <- " " if Dataset.isEmpty v then stagnation <- stagnation + par.ValidationInterval match par.EarlyStopping with | Early(s, _) -> if stagnation >= s then if not par.Silent then Util.printLog "*** EARLY STOPPING TRIGGERED: Stagnation of training loss ***" earlystop <- true | _ -> () repllast <- l' if Dataset.isEmpty v then if not par.Silent then match par.EarlyStopping with | Early(s, _) -> Util.printLog (sprintf "%*i/%i | Batch %*i/%i | %O [%s%s] | Stag:%*i" echars (epoch + 1) epochs bchars (batch + 1) batches l' (ldiffchar repldiff) replbestchar (s.ToString().Length) stagnation) | _ -> Util.printLog (sprintf "%*i/%i | Batch %*i/%i | %O [%s%s]" echars (epoch + 1) epochs bchars (batch + 1) batches l' (ldiffchar repldiff) replbestchar) else let vl' = qvalid w let repvldiff = vl' - repvllast if vl' < par.ImprovementThreshold * repvlbest then repvlbest <- vl' repvlbestchar <- "▼" validlimproved <- true stagnation <- 0 overfitting <- 0 else repvlbestchar <- " " validlimproved <- false stagnation <- stagnation + par.ValidationInterval match par.EarlyStopping with | Early(s, _) -> if stagnation >= s then if not par.Silent then Util.printLog "*** EARLY STOPPING TRIGGERED: Stagnation of validation loss ***" earlystop <- true | _ -> () if not par.Silent then match par.EarlyStopping with | Early(s, o) -> Util.printLog (sprintf "%*i/%i | Batch %*i/%i | %O [%s%s] | Valid %O [%s%s] | Stag:%*i Ovfit:%*i" echars (epoch + 1) epochs bchars (batch + 1) batches l' (ldiffchar repldiff) replbestchar vl' (ldiffchar repvldiff) repvlbestchar (s.ToString().Length) stagnation (o.ToString().Length) overfitting) | _ -> Util.printLog (sprintf "%*i/%i | Batch %*i/%i | %O [%s%s] | Valid %O [%s%s]" echars (epoch + 1) epochs bchars (batch + 1) batches l' (ldiffchar repldiff) replbestchar vl' (ldiffchar repvldiff) repvlbestchar) repvllast <- vl' par.LoggingFunction epoch w l' let mutable u' = DV.Zero match lr epoch w (q batch) l' g' gcache p' with | :? D as a -> u' <- a * p' // A scalar learning rate | :? DV as a -> u' <- a .* p' // Vector of independent learning rates u' <- mom u u' w <- w + u' l <- l' g <- g' p <- p' // Or, p <- u' u <- u' batch <- batch + 1 let iter = batches * epoch + batch if iter >= iters then earlystop <- true epoch <- epoch + 1 if not diverged then let l'', _, _ = dir w (q 0) g p gradclip l' <- l'' if l' < par.ImprovementThreshold * lbest then wbest <- w lbest <- l' let duration = System.DateTime.Now.Subtract(start) let wfinal = if par.ReturnBest || diverged then wbest else w let lfinal = if par.ReturnBest || diverged then lbest else l' let lchg = (lfinal - l0) let lchgs = lchg / (float32 duration.TotalSeconds) let es = (float epoch) / (duration.TotalSeconds) let em = (float epoch) / (duration.TotalMinutes) if not par.Silent then Util.printLog (sprintf "Duration : %A" duration) Util.printLog (sprintf "Loss initial : %O" (primal l0)) Util.printLog (sprintf "Loss final : %O %s" (primal lfinal) (if par.ReturnBest then "(Best)" else "(Last)")) Util.printLog (sprintf "Loss change : %O (%.2f %%)" (primal lchg) (float32 (100 * (lchg) / l0))) Util.printLog (sprintf "Loss chg. / s : %O" (primal lchgs)) Util.printLog (sprintf "Epochs / s : %A" es) Util.printLog (sprintf "Epochs / min : %A" em) Util.printLog "--- Training finished" wfinal, lfinal, (whist |> List.rev |> List.toArray), (lhist |> List.rev |> List.toArray) ================================================ FILE: src/Hype/app.config ================================================  True True True True True True True True ================================================ FILE: src/Hype/paket.references ================================================ DiffSharp FSharp.Core System.Drawing.Common