Repository: PCJones/MediathekArr Branch: main Commit: 2a1fa58a382b Files: 73 Total size: 170.8 KB Directory structure: gitextract_umgqsiw6/ ├── .gitattributes ├── .gitignore ├── Dockerfile ├── Dockerfile.arm64 ├── LICENSE.md ├── MediathekArr/ │ ├── Controllers/ │ │ ├── DownloadController.cs │ │ └── TController.cs │ ├── MediathekArrDownloader.csproj │ ├── Models/ │ │ ├── HistoryWrapper.cs │ │ ├── QueueWrapper.cs │ │ ├── SabnzbdDownloadStatus.cs │ │ ├── SabnzbdHistory.cs │ │ ├── SabnzbdHistoryItem.cs │ │ ├── SabnzbdQueue.cs │ │ └── SabnzbdQueueItem.cs │ ├── Program.cs │ ├── Properties/ │ │ └── launchSettings.json │ ├── Services/ │ │ ├── DownloadService.cs │ │ ├── ItemLookupService.cs │ │ └── MediathekSearchService.cs │ ├── appsettings.Development.json │ ├── appsettings.Production.json │ └── appsettings.json ├── MediathekArr.sln ├── MediathekArr.slnLaunch ├── MediathekArrLib/ │ ├── MediathekArrLib.csproj │ ├── Models/ │ │ ├── ApiResultItem.cs │ │ ├── MediathekApiResponse.cs │ │ ├── MediathekApiResult.cs │ │ ├── Newznab/ │ │ │ ├── Attribute.cs │ │ │ ├── Channel.cs │ │ │ ├── Enclosure.cs │ │ │ ├── Guid.cs │ │ │ ├── Item.cs │ │ │ ├── Response.cs │ │ │ └── Rss.cs │ │ ├── QueryInfo.cs │ │ ├── Rulesets/ │ │ │ ├── EpisodeType.cs │ │ │ ├── Filter.cs │ │ │ ├── IdentificationResult.cs │ │ │ ├── MatchType.cs │ │ │ ├── MatchedEpisodeInfo.cs │ │ │ ├── MatchingStrategy.cs │ │ │ ├── Media.cs │ │ │ ├── Pagination.cs │ │ │ ├── RegexRule.cs │ │ │ ├── Ruleset.cs │ │ │ ├── RulesetApiResponse.cs │ │ │ ├── TitleRegexRule.cs │ │ │ └── TitleRegexRuleType.cs │ │ ├── TvdbAlias.cs │ │ ├── TvdbData.cs │ │ ├── TvdbEpisode.cs │ │ └── TvdbInfoResponse.cs │ └── Utilities/ │ ├── JsonConverter.cs │ └── NewznabUtils.cs ├── MediathekArrServer/ │ ├── Controllers/ │ │ └── TController.cs │ ├── MediathekArrServer.csproj │ ├── Program.cs │ ├── Properties/ │ │ └── launchSettings.json │ ├── Services/ │ │ ├── ItemLookupService.cs │ │ ├── MediathekSearchFallbackHandler.cs │ │ ├── MediathekSearchService.cs │ │ └── RulesetBackgroundService.cs │ ├── appsettings.Development.json │ ├── appsettings.Production.json │ └── appsettings.json ├── README.md ├── api/ │ └── v1/ │ ├── db.php │ ├── get_show.php │ └── token_manager.php ├── build_and_push_docker_image.bat └── docker-compose.yml ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitattributes ================================================ ############################################################################### # Set default behavior to automatically normalize line endings. ############################################################################### * text=auto ############################################################################### # Set default behavior for command prompt diff. # # This is need for earlier builds of msysgit that does not have it on by # default for csharp files. # Note: This is only used by command line ############################################################################### #*.cs diff=csharp ############################################################################### # Set the merge driver for project and solution files # # Merging from the command prompt will add diff markers to the files if there # are conflicts (Merging from VS is not affected by the settings below, in VS # the diff markers are never inserted). Diff markers may cause the following # file extensions to fail to load in VS. An alternative would be to treat # these files as binary and thus will always conflict and require user # intervention with every merge. To do so, just uncomment the entries below ############################################################################### #*.sln merge=binary #*.csproj merge=binary #*.vbproj merge=binary #*.vcxproj merge=binary #*.vcproj merge=binary #*.dbproj merge=binary #*.fsproj merge=binary #*.lsproj merge=binary #*.wixproj merge=binary #*.modelproj merge=binary #*.sqlproj merge=binary #*.wwaproj merge=binary ############################################################################### # behavior for image files # # image files are treated as binary by default. ############################################################################### #*.jpg binary #*.png binary #*.gif binary ############################################################################### # diff behavior for common document formats # # Convert binary document formats to text before diffing them. This feature # is only available from the command line. Turn it on by uncommenting the # entries below. ############################################################################### #*.doc diff=astextplain #*.DOC diff=astextplain #*.docx diff=astextplain #*.DOCX diff=astextplain #*.dot diff=astextplain #*.DOT diff=astextplain #*.pdf diff=astextplain #*.PDF diff=astextplain #*.rtf diff=astextplain #*.RTF diff=astextplain ================================================ FILE: .gitignore ================================================ ## Ignore Visual Studio temporary files, build results, and ## files generated by popular Visual Studio add-ons. ## ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore # User-specific files *.rsuser *.suo *.user *.userosscache *.sln.docstates # User-specific files (MonoDevelop/Xamarin Studio) *.userprefs # Mono auto generated files mono_crash.* # Build results [Dd]ebug/ [Dd]ebugPublic/ [Rr]elease/ [Rr]eleases/ x64/ x86/ [Ww][Ii][Nn]32/ [Aa][Rr][Mm]/ [Aa][Rr][Mm]64/ bld/ [Bb]in/ [Oo]bj/ [Oo]ut/ [Ll]og/ [Ll]ogs/ # Visual Studio 2015/2017 cache/options directory .vs/ # Uncomment if you have tasks that create the project's static files in wwwroot #wwwroot/ # Visual Studio 2017 auto generated files Generated\ Files/ # MSTest test Results [Tt]est[Rr]esult*/ [Bb]uild[Ll]og.* # NUnit *.VisualState.xml TestResult.xml nunit-*.xml # Build Results of an ATL Project [Dd]ebugPS/ [Rr]eleasePS/ dlldata.c # Benchmark Results BenchmarkDotNet.Artifacts/ # .NET Core project.lock.json project.fragment.lock.json artifacts/ # ASP.NET Scaffolding ScaffoldingReadMe.txt # StyleCop StyleCopReport.xml # Files built by Visual Studio *_i.c *_p.c *_h.h *.ilk *.meta *.obj *.iobj *.pch *.pdb *.ipdb *.pgc *.pgd *.rsp *.sbr *.tlb *.tli *.tlh *.tmp *.tmp_proj *_wpftmp.csproj *.log *.vspscc *.vssscc .builds *.pidb *.svclog *.scc # Chutzpah Test files _Chutzpah* # Visual C++ cache files ipch/ *.aps *.ncb *.opendb *.opensdf *.sdf *.cachefile *.VC.db *.VC.VC.opendb # Visual Studio profiler *.psess *.vsp *.vspx *.sap # Visual Studio Trace Files *.e2e # TFS 2012 Local Workspace $tf/ # Guidance Automation Toolkit *.gpState # ReSharper is a .NET coding add-in _ReSharper*/ *.[Rr]e[Ss]harper *.DotSettings.user # TeamCity is a build add-in _TeamCity* # DotCover is a Code Coverage Tool *.dotCover # AxoCover is a Code Coverage Tool .axoCover/* !.axoCover/settings.json # Coverlet is a free, cross platform Code Coverage Tool coverage*.json coverage*.xml coverage*.info # Visual Studio code coverage results *.coverage *.coveragexml # NCrunch _NCrunch_* .*crunch*.local.xml nCrunchTemp_* # MightyMoose *.mm.* AutoTest.Net/ # Web workbench (sass) .sass-cache/ # Installshield output folder [Ee]xpress/ # DocProject is a documentation generator add-in DocProject/buildhelp/ DocProject/Help/*.HxT DocProject/Help/*.HxC DocProject/Help/*.hhc DocProject/Help/*.hhk DocProject/Help/*.hhp DocProject/Help/Html2 DocProject/Help/html # Click-Once directory publish/ # Publish Web Output *.[Pp]ublish.xml *.azurePubxml # Note: Comment the next line if you want to checkin your web deploy settings, # but database connection strings (with potential passwords) will be unencrypted *.pubxml *.publishproj # Microsoft Azure Web App publish settings. Comment the next line if you want to # checkin your Azure Web App publish settings, but sensitive information contained # in these scripts will be unencrypted PublishScripts/ # NuGet Packages *.nupkg # NuGet Symbol Packages *.snupkg # The packages folder can be ignored because of Package Restore **/[Pp]ackages/* # except build/, which is used as an MSBuild target. !**/[Pp]ackages/build/ # Uncomment if necessary however generally it will be regenerated when needed #!**/[Pp]ackages/repositories.config # NuGet v3's project.json files produces more ignorable files *.nuget.props *.nuget.targets # Microsoft Azure Build Output csx/ *.build.csdef # Microsoft Azure Emulator ecf/ rcf/ # Windows Store app package directories and files AppPackages/ BundleArtifacts/ Package.StoreAssociation.xml _pkginfo.txt *.appx *.appxbundle *.appxupload # Visual Studio cache files # files ending in .cache can be ignored *.[Cc]ache # but keep track of directories ending in .cache !?*.[Cc]ache/ # Others ClientBin/ ~$* *~ *.dbmdl *.dbproj.schemaview *.jfm *.pfx *.publishsettings orleans.codegen.cs # Including strong name files can present a security risk # (https://github.com/github/gitignore/pull/2483#issue-259490424) #*.snk # Since there are multiple workflows, uncomment next line to ignore bower_components # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) #bower_components/ # RIA/Silverlight projects Generated_Code/ # Backup & report files from converting an old project file # to a newer Visual Studio version. Backup files are not needed, # because we have git ;-) _UpgradeReport_Files/ Backup*/ UpgradeLog*.XML UpgradeLog*.htm ServiceFabricBackup/ *.rptproj.bak # SQL Server files *.mdf *.ldf *.ndf # Business Intelligence projects *.rdl.data *.bim.layout *.bim_*.settings *.rptproj.rsuser *- [Bb]ackup.rdl *- [Bb]ackup ([0-9]).rdl *- [Bb]ackup ([0-9][0-9]).rdl # Microsoft Fakes FakesAssemblies/ # GhostDoc plugin setting file *.GhostDoc.xml # Node.js Tools for Visual Studio .ntvs_analysis.dat node_modules/ # Visual Studio 6 build log *.plg # Visual Studio 6 workspace options file *.opt # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) *.vbw # Visual Studio LightSwitch build output **/*.HTMLClient/GeneratedArtifacts **/*.DesktopClient/GeneratedArtifacts **/*.DesktopClient/ModelManifest.xml **/*.Server/GeneratedArtifacts **/*.Server/ModelManifest.xml _Pvt_Extensions # Paket dependency manager .paket/paket.exe paket-files/ # FAKE - F# Make .fake/ # CodeRush personal settings .cr/personal # Python Tools for Visual Studio (PTVS) __pycache__/ *.pyc # Cake - Uncomment if you are using it # tools/** # !tools/packages.config # Tabs Studio *.tss # Telerik's JustMock configuration file *.jmconfig # BizTalk build output *.btp.cs *.btm.cs *.odx.cs *.xsd.cs # OpenCover UI analysis results OpenCover/ # Azure Stream Analytics local run output ASALocalRun/ # MSBuild Binary and Structured Log *.binlog # NVidia Nsight GPU debugger configuration file *.nvuser # MFractors (Xamarin productivity tool) working folder .mfractor/ # Local History for Visual Studio .localhistory/ # BeatPulse healthcheck temp database healthchecksdb # Backup folder for Package Reference Convert tool in Visual Studio 2017 MigrationBackup/ # Ionide (cross platform F# VS Code tools) working folder .ionide/ # Fody - auto-generated XML schema FodyWeavers.xsd # MediathekArr specifics tvdb_cache.sqlite ================================================ FILE: Dockerfile ================================================ FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build-env WORKDIR /app # Copy and restore dependencies COPY . ./ RUN dotnet restore RUN dotnet publish -c Release -o out FROM mcr.microsoft.com/dotnet/aspnet:9.0 RUN apt-get update && apt-get install -y tar xz-utils && rm -rf /var/lib/apt/lists/* # Set working directory WORKDIR /app # Set up environment variables for user IDs #ARG PUID=1000 #ARG PGID=1000 #ENV PUID=${PUID} \ #PGID=${PGID} # Create a user and group with specified IDs #RUN addgroup --gid ${PGID} appgroup && \ # adduser --disabled-password --gecos "" --uid ${PUID} --gid ${PGID} appuser # Copy the built app from the build environment COPY --from=build-env /app/out . # Change ownership to non-root user #RUN chown -R appuser:appgroup /app #USER appuser ENTRYPOINT ["dotnet", "MediathekArrServer.dll"] ================================================ FILE: Dockerfile.arm64 ================================================ FROM --platform=linux/arm64 mcr.microsoft.com/dotnet/sdk:9.0 AS build-env WORKDIR /app # Copy and restore dependencies COPY . ./ RUN dotnet restore RUN dotnet publish -c Release -o out FROM --platform=linux/arm64 mcr.microsoft.com/dotnet/aspnet:9.0 RUN apt-get update && apt-get install -y tar xz-utils && rm -rf /var/lib/apt/lists/* # Set working directory WORKDIR /app # Set up environment variables for user IDs #ARG PUID=1000 #ARG PGID=1000 #ENV PUID=${PUID} \ #PGID=${PGID} # Create a user and group with specified IDs #RUN addgroup --gid ${PGID} appgroup && \ # adduser --disabled-password --gecos "" --uid ${PUID} --gid ${PGID} appuser # Copy the built app from the build environment COPY --from=build-env /app/out . # Change ownership to non-root user #RUN chown -R appuser:appgroup /app #USER appuser ENTRYPOINT ["dotnet", "MediathekArrServer.dll"] ================================================ FILE: LICENSE.md ================================================ # MediathekArr License ## Source Code License MediathekArr source code is licensed under the **MIT License**. See the [MIT License text below](#mit-license-text) for full details. --- ## Third-Party Dependencies This project includes and/or uses the following third-party software with their respective licenses: ### Open Source Components | Component | License | Source | |-----------|---------|--------| | **FFmpeg** | LGPL v2.1+ | https://ffmpeg.org | | **MKVToolNix** (mkvmerge) | GPL v2+ | https://mkvtoolnix.download | | **gosu** | Apache 2.0 | https://github.com/tianon/gosu | | **Debian/Linux Packages** | Various (GPL, LGPL, others) | https://packages.debian.org | ### Important: Docker Image Distribution When MediathekArr is distributed as a **Docker image** (via Docker Hub or similar), the image layers include GPL-licensed components (primarily MKVToolNix/mkvmerge and FFmpeg). **This means:** 1. **Source Code Availability**: The source code for GPL-licensed components must remain publicly available. These are available from: - FFmpeg: https://github.com/FFmpeg/FFmpeg - MKVToolNix: https://github.com/mkvtoolnix/mkvtoolnix - Debian packages: Available via `deb-src` repositories at https://deb.debian.org 2. **Attribution**: The GPL and LGPL licenses require that copyright notices and license attributions be preserved. This file serves as that attribution for the Docker image distribution. 3. **User Rights**: Users of the Docker image have the right to: - Access the source code of GPL/LGPL components - Modify and rebuild the image with modified components - Rebuild the Dockerfile from this repository ### How to Comply If you redistribute this Docker image or derivative works: 1. **Include this LICENSE.md file** or equivalent attribution 2. **Preserve the Dockerfile** (which documents the build process) 3. **Link to source repositories**: Users should be able to obtain GPL source code via: - The Dockerfile recipe (which references Debian packages) - Direct links to FFmpeg and MKVToolNix repositories - Debian's source package repositories ### Relicensing Note The **MediathekArr source code** remains MIT-licensed. However, when you **distribute the compiled Docker image**, it becomes a derivative work that includes GPL-licensed software. This doesn't change the MIT license of the source code, but it means the distributed artifact must comply with GPL requirements for GPL-licensed components it contains. For details on license compatibility, see: https://www.gnu.org/licenses/license-list.html --- ## MIT License Text MIT License Copyright (c) 2026 PCJones Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: MediathekArr/Controllers/DownloadController.cs ================================================ using MediathekArr.Models; using MediathekArr.Services; using Microsoft.AspNetCore.Mvc; using System.Reflection; using System.Text.RegularExpressions; namespace MediathekArr.Controllers; [ApiController] [Route("[controller]")] public partial class DownloadController(DownloadService downloadService) : ControllerBase { private readonly DownloadService _downloadService = downloadService; [HttpGet("api")] public IActionResult GetVersion([FromQuery] string mode, [FromQuery] string? name = null, [FromQuery] string? value = null, [FromQuery] int? del_files = 0) { return mode switch { "version" => Ok(new { version = "4.3.3" }), "get_config" => Content(GetConfigResponse(), "application/json"), "queue" => Ok(GetQueue()), "history" => (name == "delete" && !string.IsNullOrEmpty(value)) ? DeleteHistoryItem(value, del_files.GetValueOrDefault() == 1) : Ok(GetHistory()), _ => BadRequest(new { error = "Invalid mode" }), }; } private IActionResult DeleteHistoryItem(string nzoId, bool delFiles) { // Call the DeleteHistoryItem method in the service bool isDeleted = _downloadService.DeleteHistoryItem(nzoId, delFiles); // Return success or failure response based on deletion result return isDeleted ? Ok(new { status = true }) : NotFound(new { status = false, error = "Item not found" }); } [HttpPost("api")] public async Task AddFile([FromQuery] string mode, [FromQuery] string cat) { if (mode != "addfile") { return BadRequest(new { error = "Invalid mode" }); } // Read the fake NZB file from the request body using var reader = new StreamReader(Request.Body); var requestBody = await reader.ReadToEndAsync(); var filenameMatch = FileNameRegex().Match(requestBody); var urlMatch = UrlRegex().Match(requestBody); if (!filenameMatch.Success || !urlMatch.Success) { return BadRequest(new { error = "Invalid NZB format" }); } var fileName = filenameMatch.Groups[1].Value; var downloadUrl = urlMatch.Groups[1].Value; // Add to the download queue using DownloadService and capture the created queue item var queueItem = _downloadService.AddToQueue(downloadUrl, fileName, cat); // Return response in the specified format return Ok(new { status = true, nzo_ids = new[] { queueItem.Id} }); } private QueueWrapper GetQueue() { var queueItems = _downloadService.GetQueue(); var queue = new SabnzbdQueue { Items = queueItems.ToList() }; return new QueueWrapper { Queue = queue }; } private HistoryWrapper GetHistory() { var historytems = _downloadService.GetHistory(); var history = new SabnzbdHistory { Items = historytems.ToList() }; return new HistoryWrapper { History = history }; } private static string GetConfigResponse() { var startupPath = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) ?? string.Empty; var downloadFolderPathMapping = Environment.GetEnvironmentVariable("DOWNLOAD_FOLDER_PATH_MAPPING"); var completeDir = !string.IsNullOrEmpty(downloadFolderPathMapping) ? Path.Combine(downloadFolderPathMapping) : Path.Combine(startupPath, "downloads"); ; return @$"{{ ""config"": {{ ""misc"": {{ ""complete_dir"": ""{completeDir.Replace("\\", "/")}"", ""enable_tv_sorting"": false, ""enable_movie_sorting"": false, ""pre_check"": false, ""history_retention"": ""all"" }}, ""categories"": [ {{ ""name"": ""sonarr"", ""pp"": """", ""script"": ""Default"", ""dir"": """", ""priority"": -100 }}, {{ ""name"": ""tv"", ""pp"": """", ""script"": ""Default"", ""dir"": """", ""priority"": -100 }}, {{ ""name"": ""radarr"", ""pp"": """", ""script"": ""Default"", ""dir"": """", ""priority"": -100 }}, {{ ""name"": ""movies"", ""pp"": """", ""script"": ""Default"", ""dir"": """", ""priority"": -100 }}, {{ ""name"": ""sonarr_blackhole"", ""pp"": """", ""script"": ""Default"", ""dir"": """", ""priority"": -100 }}, {{ ""name"": ""radarr_blackhole"", ""pp"": """", ""script"": ""Default"", ""dir"": """", ""priority"": -100 }}, ], ""sorters"": [] }} }}"; } [GeneratedRegex(@"filename=""([^""]+)\.nzb""")] private static partial Regex FileNameRegex(); [GeneratedRegex(@"")] private static partial Regex UrlRegex(); } ================================================ FILE: MediathekArr/Controllers/TController.cs ================================================ using MediathekArr.Services; using Microsoft.AspNetCore.Mvc; using System.Text; namespace MediathekArr.Controllers; [ApiController] [Route("api")] public class TController(MediathekSearchService mediathekSearchService, ItemLookupService itemLookupService) : ControllerBase { private readonly MediathekSearchService _mediathekSearchService = mediathekSearchService; private readonly ItemLookupService _itemLookupService = itemLookupService; [HttpGet] public async Task GetCapsXml([FromQuery] string t) { string q = HttpContext.Request.Query["q"]; string imdbid = HttpContext.Request.Query["imdbid"]; string tvdbid = HttpContext.Request.Query["tvdbid"]; string season = HttpContext.Request.Query["season"]; string episode = HttpContext.Request.Query["ep"]; string cat = HttpContext.Request.Query["cat"]; if (t == "caps") { string xmlContent = @" "; return Content(xmlContent, "application/xml", Encoding.UTF8); } else if (t == "tvsearch" || t == "search" || t == "movie") { try { if (!string.IsNullOrEmpty(tvdbid) && int.TryParse(tvdbid, out var parsedTvdbid)) { var tvdbData = (await _itemLookupService.GetShowInfoByTvdbId(parsedTvdbid)).Data; string searchResults = await _mediathekSearchService.FetchSearchResultsFromApiById(tvdbData, season, episode); return Content(searchResults, "application/xml", Encoding.UTF8); } else { string searchResults = await _mediathekSearchService.FetchSearchResultsFromApiByString(q, season); return Content(searchResults, "application/xml", Encoding.UTF8); } } catch (HttpRequestException ex) { return BadRequest(new { error = ex.Message }); } } return NotFound(); } [HttpGet("fake_nzb_download")] public IActionResult FakeNzbDownload([FromQuery] string encodedUrl, [FromQuery] string encodedTitle) { string decodedUrl; string decodedTitle; try { var base64EncodedBytesUrl = Convert.FromBase64String(encodedUrl); decodedUrl = Encoding.UTF8.GetString(base64EncodedBytesUrl); var base64EncodedBytesTitle = Convert.FromBase64String(encodedTitle); decodedTitle = Encoding.UTF8.GetString(base64EncodedBytesTitle); } catch (FormatException) { return BadRequest("Invalid base64 string."); } // Define a basic NZB XML structure with the comment and encoded URL. var nzbContent = $@" a.b.zdf ExampleSegmentID@news.example.com "; // Convert the NZB XML content to byte array var fileContent = Encoding.UTF8.GetBytes(nzbContent); // Set the .nzb file name var nzbFileName = $"mediathek-{DateTime.Now:yyyy-MM-dd_HH-mm-ss}.nzb"; return File(fileContent, "application/x-nzb", nzbFileName); } } ================================================ FILE: MediathekArr/MediathekArrDownloader.csproj ================================================  net9.0 enable enable linux-x64 True mcr.microsoft.com/dotnet/aspnet:9.0 c655a1a3-0f6d-45f1-9615-dc576c4c0b84 ================================================ FILE: MediathekArr/Models/HistoryWrapper.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArr.Models; public class HistoryWrapper { [JsonPropertyName("history")] public SabnzbdHistory History { get; set; } } ================================================ FILE: MediathekArr/Models/QueueWrapper.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArr.Models; public class QueueWrapper { [JsonPropertyName("queue")] public SabnzbdQueue Queue { get; set; } } ================================================ FILE: MediathekArr/Models/SabnzbdDownloadStatus.cs ================================================ namespace MediathekArr.Models; public enum SabnzbdDownloadStatus { Completed, Failed, Downloading, Queued, Extracting } ================================================ FILE: MediathekArr/Models/SabnzbdHistory.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArr.Models; public class SabnzbdHistory { [JsonPropertyName("slots")] public List Items { get; set; } } ================================================ FILE: MediathekArr/Models/SabnzbdHistoryItem.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArr.Models; public class SabnzbdHistoryItem { [JsonPropertyName("fail_message")] public string FailMessage { get; set; } [JsonPropertyName("bytes")] public long Size { get; set; } [JsonPropertyName("category")] public string Category { get; set; } [JsonPropertyName("nzb_name")] public string NzbName { get; set; } [JsonPropertyName("download_time")] public int DownloadTime { get; set; } [JsonPropertyName("storage")] public string Storage { get; set; } [JsonPropertyName("status")] [JsonConverter(typeof(JsonStringEnumConverter))] public SabnzbdDownloadStatus Status { get; set; } [JsonPropertyName("nzo_id")] public string Id { get; set; } [JsonPropertyName("name")] public string Title { get; set; } } ================================================ FILE: MediathekArr/Models/SabnzbdQueue.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArr.Models; public class SabnzbdQueue { [JsonPropertyName("paused")] public bool Paused => false; [JsonPropertyName("slots")] public List Items { get; set; } } ================================================ FILE: MediathekArr/Models/SabnzbdQueueItem.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArr.Models; public class SabnzbdQueueItem { [JsonPropertyName("status")] [JsonConverter(typeof(JsonStringEnumConverter))] public SabnzbdDownloadStatus Status { get; set; } [JsonPropertyName("index")] public int Index { get; set; } [JsonPropertyName("timeleft")] public string Timeleft { get; set; } // "0:00:00" [JsonPropertyName("mb")] public string Size { get; set; } // "1163.54" [JsonPropertyName("filename")] public string Title { get; set; } [JsonPropertyName("priority")] public string Priority => "Normal"; [JsonPropertyName("cat")] public string Category { get; set; } [JsonPropertyName("mbleft")] public string Sizeleft { get; set; } // "756.4 MB" [JsonPropertyName("percentage")] public string Percentage { get; set; } // "34" [JsonPropertyName("nzo_id")] public string Id { get; set; } = System.Guid.NewGuid().ToString(); } ================================================ FILE: MediathekArr/Program.cs ================================================ using MediathekArr.Services; using Scalar.AspNetCore; var builder = WebApplication.CreateBuilder(args); builder.Services.AddControllers(); builder.Services.AddOpenApi(); builder.Services.AddMemoryCache(); builder.Services.AddHttpClient("MediathekClient", client => { client.DefaultRequestHeaders.UserAgent.ParseAdd("Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:131.0) Gecko/20100101 Firefox/131.0"); client.DefaultRequestHeaders.AcceptEncoding.ParseAdd("gzip"); client.DefaultRequestHeaders.Accept.ParseAdd("application/json"); }) .ConfigurePrimaryHttpMessageHandler(() => new HttpClientHandler { AutomaticDecompression = System.Net.DecompressionMethods.GZip | System.Net.DecompressionMethods.Deflate }); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); var app = builder.Build(); // Middleware to log all incoming requests app.Use(async (context, next) => { // Log the incoming request details var logger = app.Services.GetRequiredService>(); var request = context.Request; logger.LogInformation("Incoming Request: {method} {url}", request.Method, request.Path + request.QueryString); // Check if the request is a POST and has a body if (request.Method == HttpMethods.Post && request.ContentLength > 0) { // Enable buffering so the request can be read multiple times request.EnableBuffering(); } // Call the next middleware in the pipeline await next.Invoke(); }); // Configure the HTTP request pipeline. if (app.Environment.IsDevelopment()) { app.MapOpenApi(); app.MapScalarApiReference(); } app.UseHttpsRedirection(); app.UseAuthorization(); app.MapControllers(); app.Run(); ================================================ FILE: MediathekArr/Properties/launchSettings.json ================================================ { "profiles": { "http": { "commandName": "Project", "launchBrowser": true, "launchUrl": "scalar/v1", "environmentVariables": { "ASPNETCORE_ENVIRONMENT": "Development", "ASPNETCORE_URLS": "http://localhost:5007" }, "dotnetRunMessages": true, "applicationUrl": "http://localhost:5007" }, "https": { "commandName": "Project", "launchBrowser": true, "launchUrl": "scalar/v1", "environmentVariables": { "ASPNETCORE_ENVIRONMENT": "Development", "ASPNETCORE_URLS": "https://localhost:5007" }, "dotnetRunMessages": true, "applicationUrl": "https://localhost:5007" }, "Container (.NET SDK)": { "commandName": "SdkContainer", "launchBrowser": true, "launchUrl": "{Scheme}://{ServiceHost}:{ServicePort}/scalar/v1", "environmentVariables": { "ASPNETCORE_HTTPS_PORTS": "8081", "ASPNETCORE_HTTP_PORTS": "8080" }, "publishAllPorts": true, "useSSL": true } }, "$schema": "http://json.schemastore.org/launchsettings.json" } ================================================ FILE: MediathekArr/Services/DownloadService.cs ================================================ using MediathekArr.Models; using Microsoft.Extensions.Logging; using System.Collections.Concurrent; using System.Diagnostics; using System.IO.Compression; using System.Reflection; using System.Runtime.InteropServices; namespace MediathekArr.Services; public partial class DownloadService { private readonly ILogger _logger; private readonly ConcurrentQueue _downloadQueue = new(); private readonly List _downloadHistory = new(); private static readonly HttpClient _httpClient = new(); private static readonly SemaphoreSlim _semaphore = new(2); // Limit concurrent downloads to 2 private readonly string _completeDir; private readonly string _ffmpegPath; private readonly bool _isWindows; public DownloadService(ILogger logger) { _logger = logger; _isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); // Set complete_dir based on the application's startup path var startupPath = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) ?? string.Empty; _completeDir = Path.Combine(startupPath, "downloads"); _ffmpegPath = Path.Combine(startupPath, "ffmpeg", _isWindows ? "ffmpeg.exe" : "ffmpeg"); // Ensure FFmpeg is available Task.Run(EnsureFfmpegExistsAsync).Wait(); } public IEnumerable GetQueue() => [.. _downloadQueue]; public IEnumerable GetHistory() => _downloadHistory; public SabnzbdQueueItem AddToQueue(string url, string fileName, string category) { var queueItem = new SabnzbdQueueItem { Status = SabnzbdDownloadStatus.Queued, Index = _downloadQueue.Count, Timeleft = "10:00:00", Size = "Unknown", Title = fileName, Category = category, Sizeleft = "Unknown", Percentage = "0" }; _downloadQueue.Enqueue(queueItem); Task.Run(() => StartDownloadAsync(url, queueItem)); return queueItem; } private async Task StartDownloadAsync(string url, SabnzbdQueueItem queueItem) { await _semaphore.WaitAsync(); var stopwatch = Stopwatch.StartNew(); try { _logger.LogInformation("Starting download for {Title} from URL: {URL}", queueItem.Title, url); await DownloadFileAsync(url, queueItem); if (queueItem.Status != SabnzbdDownloadStatus.Failed) { _logger.LogInformation("Download complete for {Title}. Starting conversion to MKV.", queueItem.Title); await ConvertMp4ToMkvAsync(queueItem, stopwatch); } else { _logger.LogWarning("Download failed for {Title}, skipping conversion.", queueItem.Title); } } catch (Exception ex) { _logger.LogError(ex, "Error occurred during the download or conversion of {Title}.", queueItem.Title); } finally { _semaphore.Release(); _downloadQueue.TryDequeue(out _); stopwatch.Stop(); } } private async Task DownloadFileAsync(string url, SabnzbdQueueItem queueItem) { try { var categoryDir = Path.Combine(_completeDir, queueItem.Category); _logger.LogInformation("Ensuring directory exists for category {Category} at path: {Path}", queueItem.Category, categoryDir); Directory.CreateDirectory(categoryDir); var fileExtension = Path.GetExtension(url) ?? ".mp4"; var filePath = Path.Combine(categoryDir, queueItem.Title + fileExtension); _logger.LogInformation("Starting download of file to path: {Path} with extension {Extension}", filePath, fileExtension); queueItem.Status = SabnzbdDownloadStatus.Downloading; var response = await _httpClient.GetAsync(url, HttpCompletionOption.ResponseHeadersRead); var totalSize = response.Content.Headers.ContentLength ?? 0; queueItem.Size = (totalSize / (1024.0 * 1024.0)).ToString("F2"); _logger.LogInformation("Total file size for {Title}: {Size} MB", queueItem.Title, queueItem.Size); using (var contentStream = await response.Content.ReadAsStreamAsync()) using (var fileStream = new FileStream(filePath, FileMode.Create, FileAccess.Write, FileShare.None)) { var buffer = new byte[8192]; var totalRead = 0L; int bytesRead; while ((bytesRead = await contentStream.ReadAsync(buffer.AsMemory(0, buffer.Length))) > 0) { await fileStream.WriteAsync(buffer.AsMemory(0, bytesRead)); totalRead += bytesRead; // Update queue item progress queueItem.Sizeleft = ((totalSize - totalRead) / (1024.0 * 1024.0)).ToString("F2"); queueItem.Percentage = (totalRead / (double)totalSize * 100).ToString("F0"); _logger.LogDebug("Download progress for {Title}: {Percentage}% - {SizeLeft} MB remaining", queueItem.Title, queueItem.Percentage, queueItem.Sizeleft); } } queueItem.Timeleft = "00:00:00"; _logger.LogInformation("Download completed for {Title}. File saved to {Path}", queueItem.Title, filePath); } catch (Exception ex) { queueItem.Status = SabnzbdDownloadStatus.Failed; _logger.LogError(ex, "Download failed for {Title}. Adding to download history as failed.", queueItem.Title); _downloadHistory.Add(new SabnzbdHistoryItem { Title = queueItem.Title, NzbName = queueItem.Title, Category = queueItem.Category, Size = 0, DownloadTime = 0, Storage = null, Status = SabnzbdDownloadStatus.Failed, Id = queueItem.Id }); } } public bool DeleteHistoryItem(string nzoId, bool delFiles) { var item = _downloadHistory.FirstOrDefault(h => h.Id == nzoId); if (item == null) { return false; } // Optionally delete the associated file if (delFiles && !string.IsNullOrEmpty(item.Storage) && File.Exists(item.Storage)) { try { File.Delete(item.Storage); } catch (Exception ex) { Console.WriteLine($"Error deleting file: {ex.Message}"); } } // Remove the item from the history list _downloadHistory.Remove(item); return true; } private async Task ConvertMp4ToMkvAsync(SabnzbdQueueItem queueItem, Stopwatch stopwatch) { var categoryDir = Path.Combine(_completeDir, queueItem.Category); var mp4Path = Path.Combine(categoryDir, queueItem.Title + ".mp4"); var mkvPath = Path.Combine(categoryDir, queueItem.Title + ".mkv"); if (!File.Exists(mp4Path)) { queueItem.Status = SabnzbdDownloadStatus.Failed; _logger.LogWarning("MP4 file not found for conversion. Path: {Mp4Path}. Marking as failed.", mp4Path); return; } queueItem.Status = SabnzbdDownloadStatus.Extracting; _logger.LogInformation("Starting conversion of {Title} from MP4 to MKV. MP4 Path: {Mp4Path}, MKV Path: {MkvPath}", queueItem.Title, mp4Path, mkvPath); var ffmpegArgs = $"-i \"{mp4Path}\" -map 0:v -map 0:a -c copy -metadata:s:v:0 language=ger -metadata:s:a:0 language=ger \"{mkvPath}\""; var process = new Process { StartInfo = new ProcessStartInfo { FileName = _ffmpegPath, Arguments = ffmpegArgs, RedirectStandardOutput = true, RedirectStandardError = true, UseShellExecute = false, CreateNoWindow = true } }; try { process.Start(); _logger.LogInformation("FFmpeg process started for {Title} with arguments: {Arguments}", queueItem.Title, ffmpegArgs); var standardErrorTask = process.StandardError.ReadToEndAsync(); await process.WaitForExitAsync(); string ffmpegOutput = await standardErrorTask; if (process.ExitCode == 0) { queueItem.Status = SabnzbdDownloadStatus.Completed; _logger.LogInformation("Conversion completed successfully for {Title}. Output path: {MkvPath}", queueItem.Title, mkvPath); } else { queueItem.Status = SabnzbdDownloadStatus.Failed; _logger.LogError("FFmpeg conversion failed for {Title}. Exit code: {ExitCode}. Error output: {ErrorOutput}", queueItem.Title, process.ExitCode, ffmpegOutput); } File.Delete(mp4Path); double sizeInMB = 0; if (double.TryParse(queueItem.Size.Replace("GB", "").Replace("MB", "").Trim(), out double size)) { sizeInMB = queueItem.Size.Contains("GB") ? size * 1024 : size; } var downloadFolderPathMapping = Environment.GetEnvironmentVariable("DOWNLOAD_FOLDER_PATH_MAPPING"); var storagePath = !string.IsNullOrEmpty(downloadFolderPathMapping) ? Path.Combine(downloadFolderPathMapping, queueItem.Category, queueItem.Title + ".mkv") : mkvPath; // Move completed download to history var historyItem = new SabnzbdHistoryItem { Title = queueItem.Title, NzbName = queueItem.Title, Category = queueItem.Category, Size = (long)(sizeInMB * 1024 * 1024), // Convert MB to bytes DownloadTime = (int)stopwatch.Elapsed.TotalSeconds, Storage = storagePath, Status = queueItem.Status, Id = queueItem.Id }; _downloadHistory.Add(historyItem); _logger.LogInformation("Download history updated for {Title}. Status: {Status}, Download Time: {DownloadTime}s, Size: {Size} bytes", queueItem.Title, queueItem.Status, historyItem.DownloadTime, historyItem.Size); } catch (Exception ex) { queueItem.Status = SabnzbdDownloadStatus.Failed; _logger.LogError(ex, "An error occurred during the conversion of {Title} from MP4 to MKV.", queueItem.Title); } } private async Task EnsureFfmpegExistsAsync() { if (!File.Exists(_ffmpegPath)) { _logger.LogInformation("FFmpeg not found at path {FfmpegPath}. Starting download...", _ffmpegPath); // URLs for downloading FFmpeg based on OS string ffmpegDownloadUrl = _isWindows ? "https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-essentials.zip" : "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz"; var tempFilePath = Path.Combine(Path.GetTempPath(), _isWindows ? "ffmpeg.zip" : "ffmpeg.tar.xz"); var ffmpegDir = Path.Combine(Path.GetDirectoryName(_ffmpegPath) ?? string.Empty); try { // Download FFmpeg file using (var response = await _httpClient.GetAsync(ffmpegDownloadUrl, HttpCompletionOption.ResponseHeadersRead)) using (var fileStream = new FileStream(tempFilePath, FileMode.Create, FileAccess.Write, FileShare.None)) { await response.Content.CopyToAsync(fileStream); _logger.LogInformation("FFmpeg downloaded to temporary path {TempFilePath}", tempFilePath); } Directory.CreateDirectory(ffmpegDir); _logger.LogInformation("FFmpeg directory ensured at {FfmpegDir}", ffmpegDir); // Extract FFmpeg based on the OS if (_isWindows) { ZipFile.ExtractToDirectory(tempFilePath, ffmpegDir); _logger.LogInformation("FFmpeg extracted in Windows environment."); // Move extracted ffmpeg.exe to the expected path var extractedPath = Directory.GetFiles(ffmpegDir, "ffmpeg.exe", SearchOption.AllDirectories).FirstOrDefault(); if (extractedPath != null) { File.Move(extractedPath, _ffmpegPath, true); _logger.LogInformation("FFmpeg moved to final path {FfmpegPath}", _ffmpegPath); } } else { // Linux/macOS extraction var extractionDir = Path.Combine(ffmpegDir, "extracted"); Directory.CreateDirectory(extractionDir); _logger.LogInformation("Starting extraction of FFmpeg in Linux environment."); var tarProcess = new Process { StartInfo = new ProcessStartInfo { FileName = "tar", Arguments = $"-xf \"{tempFilePath}\" -C \"{extractionDir}\"", RedirectStandardOutput = true, RedirectStandardError = true, UseShellExecute = false, CreateNoWindow = true } }; tarProcess.Start(); await tarProcess.WaitForExitAsync(); if (tarProcess.ExitCode != 0) { string error = await tarProcess.StandardError.ReadToEndAsync(); _logger.LogError("Error extracting FFmpeg: {Error}", error); return; } _logger.LogInformation("FFmpeg extraction completed."); // Locate the extracted FFmpeg binary var extractedPath = Directory.GetFiles(extractionDir, "ffmpeg", SearchOption.AllDirectories).FirstOrDefault(); if (extractedPath != null) { File.Move(extractedPath, _ffmpegPath, true); _logger.LogInformation("FFmpeg moved to final path {FfmpegPath}", _ffmpegPath); // Ensure the binary is executable var chmodProcess = new Process { StartInfo = new ProcessStartInfo { FileName = "chmod", Arguments = $"+x \"{_ffmpegPath}\"", RedirectStandardOutput = true, RedirectStandardError = true, UseShellExecute = false, CreateNoWindow = true } }; chmodProcess.Start(); await chmodProcess.WaitForExitAsync(); _logger.LogInformation("Executable permissions set for FFmpeg at {FfmpegPath}", _ffmpegPath); } else { _logger.LogError("FFmpeg binary not found after extraction."); } } } catch (Exception ex) { _logger.LogError(ex, "An error occurred during FFmpeg download or extraction."); } finally { if (File.Exists(tempFilePath)) { File.Delete(tempFilePath); _logger.LogInformation("Temporary download file deleted at {TempFilePath}", tempFilePath); } var extractionDir = Path.Combine(ffmpegDir, "extracted"); if (Directory.Exists(extractionDir)) { Directory.Delete(extractionDir, true); _logger.LogInformation("Temporary extraction directory deleted at {ExtractionDir}", extractionDir); } } _logger.LogInformation("FFmpeg download and setup complete."); } else { _logger.LogInformation("FFmpeg already exists at path {FfmpegPath}. Skipping download.", _ffmpegPath); } } } ================================================ FILE: MediathekArr/Services/ItemLookupService.cs ================================================ using MediathekArrLib.Models; using Microsoft.Extensions.Caching.Memory; using System.Text.Json; namespace MediathekArr.Services; public class ItemLookupService(IHttpClientFactory httpClientFactory, IConfiguration configuration, IMemoryCache memoryCache) { private readonly HttpClient _httpClient = httpClientFactory.CreateClient(); private readonly string _apiBaseUrl = configuration["MEDIATHEKARR_API_BASE_URL"] ?? "https://mediathekarr.pcjones.de/api/v1"; private readonly IMemoryCache _memoryCache = memoryCache; private static JsonSerializerOptions GetJsonSerializerOptions() { return new JsonSerializerOptions { PropertyNameCaseInsensitive = true }; } public async Task GetShowInfoByTvdbId(int tvdbid) { var cacheKey = $"TvdbInfo_{tvdbid}"; if (_memoryCache.TryGetValue(cacheKey, out TvdbInfoResponse? cachedTvdbInfo)) { if (cachedTvdbInfo != null) { return cachedTvdbInfo; } } var requestUrl = $"{_apiBaseUrl}/get_show.php?tvdbid={tvdbid}"; var response = await _httpClient.GetAsync(requestUrl); if (!response.IsSuccessStatusCode) { var errorContent = await response.Content.ReadAsStringAsync(); throw new HttpRequestException($"Error fetching data: {errorContent}"); } var jsonResponse = await response.Content.ReadAsStringAsync(); var tvdbInfo = JsonSerializer.Deserialize(jsonResponse, GetJsonSerializerOptions()); if (tvdbInfo == null || tvdbInfo.Status != "success" || tvdbInfo.Data == null) { throw new HttpRequestException($"Failed to fetch TVDB data. Response: {jsonResponse}"); } _memoryCache.Set(cacheKey, tvdbInfo, TimeSpan.FromHours(12)); return tvdbInfo; } } ================================================ FILE: MediathekArr/Services/MediathekSearchService.cs ================================================ using System.Globalization; using System.Text; using System.Text.Json; using System.Text.RegularExpressions; using System.Xml.Serialization; using MediathekArrLib.Models; using MediathekArrLib.Models.Newznab; using Microsoft.Extensions.Caching.Memory; using Guid = MediathekArrLib.Models.Newznab.Guid; namespace MediathekArr.Services { public partial class MediathekSearchService(IHttpClientFactory httpClientFactory, IMemoryCache cache) { private readonly IMemoryCache _cache = cache; private readonly HttpClient _httpClient = httpClientFactory.CreateClient("MediathekClient"); private readonly TimeSpan _cacheTimeSpan = TimeSpan.FromMinutes(55); private static readonly string[] SkipKeywords = ["Audiodeskription", "(klare Sprache)", "(Gebärdensprache)", "Trailer", "Outtakes:"]; private static readonly string[] queryField = ["topic"]; public async Task FetchSearchResultsFromApiById(TvdbData tvdbData, string? season, string? episodeNumber) { var cacheKey = $"tvdb_{tvdbData.Id}_{season ?? "null"}_{episodeNumber ?? "null"}"; if (_cache.TryGetValue(cacheKey, out string? cachedResponse)) { return cachedResponse ?? ""; } // Find correct episode in tvdbData TvdbEpisode? episode; if (season?.Length == 4 && (episodeNumber?.Contains('/') ?? false)) { var episodeNumberSplitted = episodeNumber?.Split('/'); if (episodeNumberSplitted?.Length == 2 && DateTime.TryParse($"{season}-{episodeNumberSplitted[0]}-{episodeNumberSplitted[1]}", out DateTime searchAirDate)) { episode = tvdbData.FindEpisodeByAirDate(searchAirDate); } else { episode = null; } } else { episode = tvdbData.FindEpisodeBySeasonAndNumber(season, episodeNumber); } if (episode is null || episode.Aired is null || episode.Aired.Value.Year <= 1970) { _cache.Set(cacheKey, string.Empty, _cacheTimeSpan); return ConvertIdSearchApiResponseToRss(null, string.Empty, string.Empty, tvdbData); } var queries = new List(); var searchName = string.IsNullOrEmpty(tvdbData.GermanName) ? tvdbData.Name : tvdbData.GermanName; queries.Add(new { fields = queryField, query = searchName }); var requestBody = new { queries, sortBy = "timestamp", sortOrder = "desc", future = true, offset = 0, size = 5000 // 5000 for id search }; var requestContent = new StringContent(JsonSerializer.Serialize(requestBody), Encoding.UTF8); var response = await _httpClient.PostAsync("https://mediathekviewweb.de/api/query", requestContent); if (response.IsSuccessStatusCode) { var apiResponse = await response.Content.ReadAsStringAsync(); var filteredResponse = ApplyFilters(apiResponse, episode); var newznabRssResponse = ConvertIdSearchApiResponseToRss(filteredResponse, episode.SeasonNumber.ToString(), episode.EpisodeNumber.ToString(), tvdbData); _cache.Set(cacheKey, newznabRssResponse, _cacheTimeSpan); return newznabRssResponse; } return null; } private static MediathekApiResponse? ApplyFilters(string apiResponse, TvdbEpisode episode) { var responseObject = JsonSerializer.Deserialize(apiResponse); if (responseObject?.Result?.Results == null) { return null; } var initialResults = responseObject.Result.Results; var resultsFilteredByRuntime = FilterByRuntime(initialResults, episode.Runtime); var resultsByAiredDate = FilterByAiredDate(resultsFilteredByRuntime, episode.Aired!.Value).Where(item => !ShouldSkipItem(item)).ToList(); var resultsByTitleDate = FilterByTitleDate(resultsFilteredByRuntime, episode.Aired.Value).Where(item => !ShouldSkipItem(item)).ToList(); var resultsByDescriptionDate = FilterByDescriptionDate(resultsFilteredByRuntime, episode.Aired.Value).Where(item => !ShouldSkipItem(item)).ToList(); var resultsByEpisodeTitleMatch = FilterByEpisodeTitleMatch(resultsFilteredByRuntime, episode.Name).Where(item => !ShouldSkipItem(item)).ToList(); List resultsBySeasonEpisodeMatch = []; // if more than 3 results we assume episode title match wasn't correct if (resultsByEpisodeTitleMatch.Count > 3) { resultsByEpisodeTitleMatch.Clear(); } // if we have episode title match that is the best we got if (resultsByEpisodeTitleMatch.Count > 0) { // we ignore air date in this case as it is not as reliable resultsByAiredDate.Clear(); } if (resultsByAiredDate.Count == 0 && resultsByTitleDate.Count == 0 && resultsByDescriptionDate.Count == 0 && resultsByEpisodeTitleMatch.Count == 0) { // Only trust Mediathek season/episode if no other match: resultsBySeasonEpisodeMatch = FilterBySeasonEpisodeMatch(resultsFilteredByRuntime, episode.SeasonNumber.ToString(), episode.EpisodeNumber.ToString()) .Where(item => !ShouldSkipItem(item)).ToList(); ; } // HashSet to remove duplicates HashSet filteredResults = [.. resultsByAiredDate, .. resultsByTitleDate, .. resultsByDescriptionDate, .. resultsByEpisodeTitleMatch, .. resultsBySeasonEpisodeMatch]; // Create a filtered API response var filteredApiResponse = new MediathekApiResponse { Result = new MediathekApiResult { Results = [.. filteredResults], QueryInfo = responseObject.Result.QueryInfo }, Err = responseObject.Err }; return filteredApiResponse; } private static List FilterByRuntime(List results, int? runtime) { if (runtime is null || runtime is 0) { return results; } var minRuntime = Math.Max(5, (int)(runtime * 0.65)) * 60; var maxRuntime = (int)(runtime * 1.35) * 60; return results.Where(item => item.Duration >= minRuntime && item.Duration <= maxRuntime) .ToList(); } private static List FilterByAiredDate(List results, DateTime airedDate) { return results.Where(item => ConvertToBerlinTimezone(UnixTimeStampToDateTime(item.Timestamp)).Date == airedDate) .ToList(); } private static List FilterByTitleDate(List results, DateTime airedDate) { var formattedAiredDate = airedDate.ToString("yyyy-MM-dd"); return results.Where(item => { var extractedDate = ExtractDate(item.Title); return !string.IsNullOrEmpty(extractedDate) && extractedDate == formattedAiredDate; }).ToList(); } private static List FilterByDescriptionDate(List results, DateTime airedDate) { var formattedAiredDate = airedDate.ToString("yyyy-MM-dd"); return results.Where(item => { var extractedDate = ExtractDate(item.Description); return !string.IsNullOrEmpty(extractedDate) && extractedDate == formattedAiredDate; }).ToList(); } private static List FilterByEpisodeTitleMatch(List results, string episodeName) { var normalizedEpisodeName = NormalizeString(episodeName); return results.Where(item => { var normalizedTitle = NormalizeString(item.Title); if (normalizedTitle.Contains(normalizedEpisodeName, StringComparison.OrdinalIgnoreCase)) { return true; } else if (normalizedEpisodeName.Length >= 13 && normalizedTitle.Length >= 10) { return normalizedEpisodeName.Contains(normalizedTitle, StringComparison.OrdinalIgnoreCase); } else { return false; } }).ToList(); } private static List FilterBySeasonEpisodeMatch(List results, string season, string episode) { var zeroBasedSeason = season.Length >= 2 ? season : $"0{season}"; var zeroBasedEpisode = episode.Length >= 2 ? episode : $"0{episode}"; return results.Where(item => { return item.Title.Contains($"S{zeroBasedSeason}") && item.Title.Contains($"E{zeroBasedEpisode}"); }).ToList(); } // Normalize a string to remove special characters and retain only A-Z, äöüÄÖÜß private static string NormalizeString(string input) { var regex = NormalizeRegex(); return regex.Replace(input, "").ToLowerInvariant(); } public async Task FetchSearchResultsFromApiByString(string? q, string? season) { var cacheKey = $"q_{q ?? "null"}_{season ?? "null"}"; if (_cache.TryGetValue(cacheKey, out string? cachedResponse)) { return cachedResponse ?? ""; } var zeroBasedSeason = season == null || season.Length >= 2 ? season : $"0{season}"; var queries = new List(); if (q != null) { queries.Add(new { fields = queryField, query = q }); } if (!string.IsNullOrEmpty(season)) { if (season.Length == 4 && season.StartsWith("20") || season.StartsWith("19")) { queries.Add(new { fields = new[] { "title" }, query = $"{season}" }); } else { queries.Add(new { fields = new[] { "title" }, query = $"S{zeroBasedSeason}" }); } } var requestBody = new { queries, sortBy = "timestamp", sortOrder = "desc", future = true, offset = 0, size = 300 // 300 for RSS sync and string search }; var requestContent = new StringContent(JsonSerializer.Serialize(requestBody), Encoding.UTF8); var response = await _httpClient.PostAsync("https://mediathekviewweb.de/api/query", requestContent); if (response.IsSuccessStatusCode) { var apiResponse = await response.Content.ReadAsStringAsync(); var newznabRssResponse = ConvertStringSearchApiResponseToRss(apiResponse, season); _cache.Set(cacheKey, newznabRssResponse, _cacheTimeSpan); return newznabRssResponse; } return null; } private string ConvertIdSearchApiResponseToRss(MediathekApiResponse? filteredResponse, string season, string episode, TvdbData tvdbData) { if (filteredResponse is null || filteredResponse.Result.Results == null) { return SerializeRss(GetEmptyRssResult()); } var rss = new Rss { Channel = new Channel { Title = "MediathekArr", Description = "MediathekArr API results", Response = new Response { Offset = 0, Total = filteredResponse.Result.QueryInfo.ResultCount }, Items = filteredResponse.Result.Results // .Where(item => !ShouldSkipItem(item)) we already do this earlier for id searches in ApplyFilters .SelectMany(item => GenerateRssItems(item, season, episode, tvdbData)) // Generate RSS items for each link .ToList() } }; return SerializeRss(rss); } private string ConvertStringSearchApiResponseToRss(string apiResponse, string? season = null, bool sonarr = true) { if (string.IsNullOrWhiteSpace(apiResponse)) { return SerializeRss(GetEmptyRssResult()); } var responseObject = JsonSerializer.Deserialize(apiResponse); if (responseObject?.Result?.Results == null) { return SerializeRss(GetEmptyRssResult()); } var rss = new Rss { Channel = new Channel { Title = "MediathekArr", Description = "MediathekArr API results", Response = new Response { Offset = 0, Total = responseObject.Result.QueryInfo.ResultCount }, Items = responseObject.Result.Results .Where(item => !ShouldSkipItem(item)) .SelectMany(item => GenerateRssItems(item, season, null)) // Generate RSS items for each link .ToList() } }; return SerializeRss(rss); } private Rss GetEmptyRssResult() { return new Rss { Channel = new Channel { Title = "MediathekArr", Description = "MediathekArr API results", Response = new Response { Offset = 0, Total = 0 }, Items = [] } }; } private List GenerateRssItems(ApiResultItem item, string? season, string? episode, TvdbData? tvdbData = null) { var items = new List(); string[] categories = ["5000", "2000"]; if (!string.IsNullOrEmpty(item.UrlVideoHd)) { items.AddRange(CreateRssItems(item, season, episode, tvdbData, "1080p", 1.6, "TV > HD", [..categories, "5040", "2040"], item.UrlVideoHd)); } if (!string.IsNullOrEmpty(item.UrlVideo)) { items.AddRange(CreateRssItems(item, season, episode, tvdbData, "720p", 1.0, "TV > HD", [.. categories, "5040", "2040"], item.UrlVideo)); } if (!string.IsNullOrEmpty(item.UrlVideoLow)) { items.AddRange(CreateRssItems(item, season, episode, tvdbData, "480p", 0.4, "TV > SD", [.. categories, "5030", "2030"], item.UrlVideoLow)); } return items; } private List CreateRssItems(ApiResultItem item, string? season, string? episode, TvdbData? tvdbData, string quality, double sizeMultiplier, string category, string[] categoryValues, string url) { var items = new List(); // Generate title with season and formatted date var formattedDate = ExtractDate(item.Title); // Create two items if both season and formatted date are present if (!string.IsNullOrEmpty(formattedDate)) { // Title with formattedDate in it if (!string.IsNullOrEmpty(formattedDate)) { items.Add(CreateRssItem(item, formattedDate.Split('-')[0], null, episode, tvdbData, quality, sizeMultiplier, category, categoryValues, url, formattedDate)); } } items.Add(CreateRssItem(item, null, season, episode, tvdbData, quality, sizeMultiplier, category, categoryValues, url)); return items; } private static string FormatTitle(string title) { // Replace German Umlaute and special characters title = title.Replace("ä", "ae") .Replace("ö", "oe") .Replace("ü", "ue") .Replace("ß", "ss") .Replace("Ä", "Ae") .Replace("Ö", "Oe") .Replace("Ü", "Ue"); // Remove unwanted characters title = TitleRegexUnd().Replace(title, "und"); title = TitleRegexSymbols().Replace(title, ""); // Remove various symbols title = TitleRegexWhitespace().Replace(title, ".").Replace("..", "."); return title; } private Item CreateRssItem(ApiResultItem item, string? yearSeason, string? season, string? episode, TvdbData? tvdbData, string quality, double sizeMultiplier, string category, string[] categoryValues, string url, string? formattedDate = null) { var adjustedSize = (long)(item.Size * sizeMultiplier); var parsedTitle = GenerateTitle(item.Topic, item.Title, quality, formattedDate, season, episode); var formattedTitle = FormatTitle(parsedTitle); //var translatedTitle = TranslateTitle(formattedTitle, tvdbData); var translatedTitle = formattedTitle; // TODO see if translation is needed var encodedTitle = Convert.ToBase64String(Encoding.UTF8.GetBytes(translatedTitle)); var encodedUrl = Convert.ToBase64String(Encoding.UTF8.GetBytes(url)); // Generate the full URL for the fake_nzb_download endpoint var fakeDownloadUrl = $"/api/fake_nzb_download?encodedUrl={encodedUrl}&encodedTitle={encodedTitle}"; return new Item { Title = translatedTitle, Guid = new Guid { IsPermaLink = true, Value = $"{item.UrlWebsite}#{quality}{(string.IsNullOrEmpty(formattedDate) ? "" : "-a")}", }, Link = url, Comments = item.UrlWebsite, PubDate = DateTimeOffset.FromUnixTimeSeconds(item.Timestamp).ToString("R"), Category = category, Description = item.Description, Enclosure = new Enclosure { Url = fakeDownloadUrl, Length = adjustedSize, Type = "application/x-nzb" }, Attributes = GenerateAttributes(yearSeason ?? season, categoryValues) }; } private static string TranslateTitle(string title, TvdbData? tvdbData) { if (tvdbData is null) { return title; } return title.Replace(tvdbData.GermanName, tvdbData.Name, StringComparison.OrdinalIgnoreCase); } // TODO refactor and make this look good, It's too late right now:D // TODO now it's even worse :D oh god private string GenerateTitle(string topic, string title, string quality, string? formattedDate, string? seasonOverride, string? episodeOverride) { if (!string.IsNullOrEmpty(formattedDate)) { var cleanedTitle = EpisodeRegex().Replace(title, "").Trim(); if (cleanedTitle == topic) { cleanedTitle = null; } return $"{topic}.{formattedDate}.{(cleanedTitle != null ? $"{cleanedTitle}." : "")}GERMAN.{quality}.WEB.h264-MEDiATHEK".Replace(" ", "."); } var episodePattern = @"S\d{1,4}/E\d{1,4}"; var match = Regex.Match(title, episodePattern); if (match.Success) { var seasonAndEpisode = match.Value.Replace("/", ""); var cleanedTitle = EpisodeRegex().Replace(title, "").Replace($"({match.Value})", "").Trim(); if (cleanedTitle == topic) { cleanedTitle = null; } if (seasonOverride is null || episodeOverride is null) { // use data from mediathek return $"{topic}.{seasonAndEpisode}.{(cleanedTitle != null ? $"{cleanedTitle}." : "")}GERMAN.{quality}.WEB.h264-MEDiATHEK".Replace(" ", "."); } // use overwrite data var zeroBasedSeason = seasonOverride.Length >= 2 ? seasonOverride : $"0{seasonOverride}"; var zeroBasedEpisode = episodeOverride.Length >= 2 ? episodeOverride : $"0{episodeOverride}"; return $"{topic}.S{zeroBasedSeason}E{zeroBasedEpisode}.{(cleanedTitle != null ? $"{cleanedTitle}." : "")}GERMAN.{quality}.WEB.h264-MEDiATHEK".Replace(" ", "."); } if (seasonOverride is null || episodeOverride is null) { return title; } else { var cleanedTitle = EpisodeRegex().Replace(title, "").Trim(); if (cleanedTitle == topic) { cleanedTitle = null; } var zeroBasedSeason = seasonOverride.Length >= 2 ? seasonOverride : $"0{seasonOverride}"; var zeroBasedEpisode = episodeOverride.Length >= 2 ? episodeOverride : $"0{episodeOverride}"; return $"{topic}.S{zeroBasedSeason}E{zeroBasedEpisode}.{(cleanedTitle != null ? $"{cleanedTitle}." : title)}GERMAN.{quality}.WEB.h264-MEDiATHEK".Replace(" ", "."); } } private static string ExtractDate(string title) { // Numeric format pattern (e.g., "24.10.2024" or "24.10.24") var numericDatePattern = @"(\d{1,2})\.(\d{1,2})\.(\d{2}|\d{4})"; // Nonth name format pattern (e.g., "16. Juli 2024") var germanMonthPattern = @"(\d{1,2})\.\s*(\w+)\s+(\d{4})"; var numericDateMatch = Regex.Match(title, numericDatePattern); if (numericDateMatch.Success) { int day = int.Parse(numericDateMatch.Groups[1].Value); int month = int.Parse(numericDateMatch.Groups[2].Value); int year = int.Parse(numericDateMatch.Groups[3].Value); if (year < 100) { year += 2000; } DateTime date = new(year, month, day); return date.ToString("yyyy-MM-dd"); } var longMonthMatch = Regex.Match(title, germanMonthPattern); if (longMonthMatch.Success) { int day = int.Parse(longMonthMatch.Groups[1].Value); string monthName = longMonthMatch.Groups[2].Value; int year = int.Parse(longMonthMatch.Groups[3].Value); var germanCulture = new CultureInfo("de-DE"); if (DateTime.TryParseExact($"{day} {monthName} {year}", "d MMMM yyyy", germanCulture, DateTimeStyles.None, out DateTime date)) { return date.ToString("yyyy-MM-dd"); } } return string.Empty; } private List GenerateAttributes(string? season, string[] categoryValues) { var attributes = new List(); foreach (var categoryValue in categoryValues) { attributes.Add(new MediathekArrLib.Models.Newznab.Attribute { Name = "category", Value = categoryValue }); } if (season != null) { attributes.Add(new MediathekArrLib.Models.Newznab.Attribute { Name = "season", Value = season }); } return attributes; } private static bool ShouldSkipItem(ApiResultItem item) { return item.UrlVideo.EndsWith(".m3u8") || SkipKeywords.Any(item.Title.Contains); } private string SerializeRss(Rss rss) { var serializer = new XmlSerializer(typeof(Rss)); // Define the namespaces and add the newznab namespace var namespaces = new XmlSerializerNamespaces(); namespaces.Add("newznab", "http://www.newznab.com/DTD/2010/feeds/attributes/"); using var stringWriter = new StringWriter(); serializer.Serialize(stringWriter, rss, namespaces); // TODO quick fix string result = stringWriter.ToString(); result = result.Replace(":newznab_x003A_", ":"); return result; } private static DateTime UnixTimeStampToDateTime(long unixTimeStamp) { return DateTimeOffset.FromUnixTimeSeconds(unixTimeStamp).UtcDateTime; } private static DateTime ConvertToBerlinTimezone(DateTime utcDateTime) { var berlinTimeZone = TimeZoneInfo.FindSystemTimeZoneById("Europe/Berlin"); return TimeZoneInfo.ConvertTimeFromUtc(utcDateTime, berlinTimeZone); } [GeneratedRegex(@"[&]")] private static partial Regex TitleRegexUnd(); [GeneratedRegex(@"[/:;""'@#?$%^*+=!<>,()]")] private static partial Regex TitleRegexSymbols(); [GeneratedRegex(@"\s+")] private static partial Regex TitleRegexWhitespace(); [GeneratedRegex(@"Folge\s*\d+:\s*")] private static partial Regex EpisodeRegex(); [GeneratedRegex("[^a-zA-ZäöüÄÖÜß]")] private static partial Regex NormalizeRegex(); } } ================================================ FILE: MediathekArr/appsettings.Development.json ================================================ { "Logging": { "LogLevel": { "Default": "Information", "Microsoft.AspNetCore": "Warning" } } } ================================================ FILE: MediathekArr/appsettings.Production.json ================================================ { "Kestrel": { "Endpoints": { "Http": { "Url": "http://[::]:5007" } } } } ================================================ FILE: MediathekArr/appsettings.json ================================================ { "Logging": { "LogLevel": { "Default": "Information", "Microsoft.AspNetCore": "Warning" } }, "AllowedHosts": "*" } ================================================ FILE: MediathekArr.sln ================================================  Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio Version 17 VisualStudioVersion = 17.9.34728.123 MinimumVisualStudioVersion = 10.0.40219.1 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MediathekArrDownloader", "MediathekArr\MediathekArrDownloader.csproj", "{325043A5-5585-4C48-B947-A1E69EAE8343}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MediathekArrServer", "MediathekArrServer\MediathekArrServer.csproj", "{F6A03A18-04C6-4BC1-8969-A8BADDD64718}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MediathekArrLib", "MediathekArrLib\MediathekArrLib.csproj", "{E6785AB3-92DA-4DF8-8EAC-362BD3DE8AE2}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{02EA681E-C7D8-13C7-8484-4AC65E1B71E8}" ProjectSection(SolutionItems) = preProject .env.example = .env.example .gitattributes = .gitattributes .gitignore = .gitignore build_and_push_docker_image.bat = build_and_push_docker_image.bat docker-compose.yml = docker-compose.yml Dockerfile = Dockerfile LICENSE = LICENSE README.md = README.md EndProjectSection EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU Release|Any CPU = Release|Any CPU EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {325043A5-5585-4C48-B947-A1E69EAE8343}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {325043A5-5585-4C48-B947-A1E69EAE8343}.Debug|Any CPU.Build.0 = Debug|Any CPU {325043A5-5585-4C48-B947-A1E69EAE8343}.Release|Any CPU.ActiveCfg = Release|Any CPU {325043A5-5585-4C48-B947-A1E69EAE8343}.Release|Any CPU.Build.0 = Release|Any CPU {F6A03A18-04C6-4BC1-8969-A8BADDD64718}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {F6A03A18-04C6-4BC1-8969-A8BADDD64718}.Debug|Any CPU.Build.0 = Debug|Any CPU {F6A03A18-04C6-4BC1-8969-A8BADDD64718}.Release|Any CPU.ActiveCfg = Release|Any CPU {F6A03A18-04C6-4BC1-8969-A8BADDD64718}.Release|Any CPU.Build.0 = Release|Any CPU {E6785AB3-92DA-4DF8-8EAC-362BD3DE8AE2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E6785AB3-92DA-4DF8-8EAC-362BD3DE8AE2}.Debug|Any CPU.Build.0 = Debug|Any CPU {E6785AB3-92DA-4DF8-8EAC-362BD3DE8AE2}.Release|Any CPU.ActiveCfg = Release|Any CPU {E6785AB3-92DA-4DF8-8EAC-362BD3DE8AE2}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {F9EB85C0-479B-49B9-92A6-79FF03690BCA} EndGlobalSection EndGlobal ================================================ FILE: MediathekArr.slnLaunch ================================================ [ { "Name": "Downloader+Server", "Projects": [ { "Path": "MediathekArr\\MediathekArrDownloader.csproj", "Action": "Start", "DebugTarget": "http" }, { "Path": "MediathekArrServer\\MediathekArrServer.csproj", "Action": "Start", "DebugTarget": "http" } ] } ] ================================================ FILE: MediathekArrLib/MediathekArrLib.csproj ================================================  net9.0 enable enable ================================================ FILE: MediathekArrLib/Models/ApiResultItem.cs ================================================ using MediathekArrLib.Utilities; using System.Text.Json.Serialization; namespace MediathekArrLib.Models; public class ApiResultItem { [JsonPropertyName("channel")] public string Channel { get; set; } [JsonPropertyName("topic")] public string Topic { get; set; } [JsonPropertyName("title")] public string Title { get; set; } [JsonPropertyName("description")] public string Description { get; set; } [JsonPropertyName("filmlisteTimestamp")] [JsonConverter(typeof(NumberOrEmptyConverter))] public long Timestamp { get; set; } [JsonPropertyName("duration")] [JsonConverter(typeof(NumberOrEmptyConverter))] public int Duration { get; set; } [JsonPropertyName("size")] [JsonConverter(typeof(NumberOrEmptyConverter))] public long Size { get; set; } [JsonPropertyName("url_website")] public string UrlWebsite { get; set; } [JsonPropertyName("url_video")] public string UrlVideo { get; set; } [JsonPropertyName("url_video_low")] public string UrlVideoLow { get; set; } [JsonPropertyName("url_video_hd")] public string UrlVideoHd { get; set; } } ================================================ FILE: MediathekArrLib/Models/MediathekApiResponse.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArrLib.Models; public class MediathekApiResponse { [JsonPropertyName("result")] public MediathekApiResult Result { get; set; } [JsonPropertyName("err")] public object? Err { get; set; } } ================================================ FILE: MediathekArrLib/Models/MediathekApiResult.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArrLib.Models; public class MediathekApiResult { [JsonPropertyName("results")] public List Results { get; set; } [JsonPropertyName("queryInfo")] public QueryInfo QueryInfo { get; set; } } ================================================ FILE: MediathekArrLib/Models/Newznab/Attribute.cs ================================================ using System.Xml; using System.Xml.Serialization; namespace MediathekArrLib.Models.Newznab; public class Attribute { [XmlAttribute("name")] public string Name { get; set; } [XmlAttribute("value")] public string Value { get; set; } } ================================================ FILE: MediathekArrLib/Models/Newznab/Channel.cs ================================================ using System.Xml; using System.Xml.Serialization; namespace MediathekArrLib.Models.Newznab; public class Channel { [XmlElement("title")] public string Title { get; set; } [XmlElement("description")] public string Description { get; set; } [XmlElement("newznab:response", Namespace = "http://www.newznab.com/DTD/2010/feeds/attributes/")] public Response Response { get; set; } [XmlElement("item")] public List Items { get; set; } = []; } ================================================ FILE: MediathekArrLib/Models/Newznab/Enclosure.cs ================================================ using System.Xml; using System.Xml.Serialization; namespace MediathekArrLib.Models.Newznab; public class Enclosure { [XmlAttribute("url")] public string Url { get; set; } [XmlAttribute("length")] public long Length { get; set; } [XmlAttribute("type")] public string Type { get; set; } } ================================================ FILE: MediathekArrLib/Models/Newznab/Guid.cs ================================================ using System.Xml; using System.Xml.Serialization; namespace MediathekArrLib.Models.Newznab; public class Guid { [XmlAttribute("isPermaLink")] public bool IsPermaLink { get; set; } [XmlText] public string Value { get; set; } } ================================================ FILE: MediathekArrLib/Models/Newznab/Item.cs ================================================ using System.Xml; using System.Xml.Serialization; namespace MediathekArrLib.Models.Newznab; public class Item { [XmlElement("title")] public string Title { get; set; } [XmlElement("guid")] public Guid Guid { get; set; } [XmlElement("link")] public string Link { get; set; } [XmlElement("comments")] public string Comments { get; set; } [XmlElement("pubDate")] public string PubDate { get; set; } [XmlElement("category")] public string Category { get; set; } [XmlElement("description")] public string Description { get; set; } [XmlElement("enclosure")] public Enclosure Enclosure { get; set; } [XmlElement("newznab:attr", Namespace = "http://www.newznab.com/DTD/2010/feeds/attributes/")] public List Attributes { get; set; } = []; } ================================================ FILE: MediathekArrLib/Models/Newznab/Response.cs ================================================ using System.Xml; using System.Xml.Serialization; namespace MediathekArrLib.Models.Newznab; public class Response { [XmlAttribute("offset")] public int Offset { get; set; } [XmlAttribute("total")] public int Total { get; set; } } ================================================ FILE: MediathekArrLib/Models/Newznab/Rss.cs ================================================ using System.Xml; using System.Xml.Serialization; namespace MediathekArrLib.Models.Newznab; [XmlRoot("rss")] public class Rss { [XmlAttribute("version")] public string Version { get; set; } = "2.0"; [XmlElement("channel")] public Channel Channel { get; set; } [XmlNamespaceDeclarations] public XmlSerializerNamespaces Xmlns { get; } = new XmlSerializerNamespaces( [ new XmlQualifiedName("newznab", "http://www.newznab.com/DTD/2010/feeds/attributes/") ]); } ================================================ FILE: MediathekArrLib/Models/QueryInfo.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArrLib.Models; public class QueryInfo { [JsonPropertyName("filmlisteTimestamp")] public long FilmlisteTimestamp { get; set; } [JsonPropertyName("searchEngineTime")] public string SearchEngineTime { get; set; } [JsonPropertyName("resultCount")] public int ResultCount { get; set; } [JsonPropertyName("totalResults")] public int TotalResults { get; set; } } ================================================ FILE: MediathekArrLib/Models/Rulesets/EpisodeType.cs ================================================ namespace MediathekArrLib.Models.Rulesets; public enum EpisodeType { Standard, Daily, Anime } ================================================ FILE: MediathekArrLib/Models/Rulesets/Filter.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArrLib.Models.Rulesets; public class Filter { [JsonPropertyName("attribute")] public string Attribute { get; set; } = string.Empty; [JsonPropertyName("type")] [JsonConverter(typeof(JsonStringEnumConverter))] public MatchType Type { get; set; } [JsonPropertyName("value")] public object Value { get; set; } = string.Empty; } ================================================ FILE: MediathekArrLib/Models/Rulesets/IdentificationResult.cs ================================================ namespace MediathekArrLib.Models.Rulesets; public record IdentificationResult(string UsedRuleset, string Name, string GermanName, int? SeasonNumber, int? EpisodeNumber, string ItemTitle, TvdbEpisode MatchedEpisode); ================================================ FILE: MediathekArrLib/Models/Rulesets/MatchType.cs ================================================ namespace MediathekArrLib.Models.Rulesets; public enum MatchType { ExactMatch, Contains, Regex, GreaterThan, LessThan } ================================================ FILE: MediathekArrLib/Models/Rulesets/MatchedEpisodeInfo.cs ================================================ namespace MediathekArrLib.Models.Rulesets; public record MatchedEpisodeInfo(TvdbEpisode Episode, ApiResultItem Item, string ShowName, string MatchedTitle); ================================================ FILE: MediathekArrLib/Models/Rulesets/MatchingStrategy.cs ================================================ namespace MediathekArrLib.Models.Rulesets; public enum MatchingStrategy { SeasonAndEpisodeNumber, // Use season + episode number for matching ItemTitleIncludes, // Match episodes where the tvdb episode name contains this title ItemTitleExact, // Match episodes with an exact itemTitle ItemTitleEqualsAirdate } ================================================ FILE: MediathekArrLib/Models/Rulesets/Media.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArrLib.Models.Rulesets; public class Media { [JsonPropertyName("media_id")] public int Id { get; set; } [JsonPropertyName("media_name")] public string Name { get; set; } = string.Empty; [JsonPropertyName("media_type")] public string Type { get; set; } = string.Empty; [JsonPropertyName("media_tmdbId")] public int? TmdbId { get; set; } [JsonPropertyName("media_imdbId")] public string? ImdbId { get; set; } [JsonPropertyName("media_tvdbId")] public int? TvdbId { get; set; } } ================================================ FILE: MediathekArrLib/Models/Rulesets/Pagination.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArrLib.Models.Rulesets; public class Pagination { [JsonPropertyName("currentPage")] public int CurrentPage { get; set; } [JsonPropertyName("totalPages")] public int TotalPages { get; set; } [JsonPropertyName("totalItems")] public int TotalItems { get; set; } [JsonPropertyName("itemsPerPage")] public int ItemsPerPage { get; set; } } ================================================ FILE: MediathekArrLib/Models/Rulesets/RegexRule.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArrLib.Models.Rulesets; public class RegexRule { [JsonPropertyName("field")] public string Field { get; set; } = string.Empty; [JsonPropertyName("pattern")] public string Pattern { get; set; } = string.Empty; } ================================================ FILE: MediathekArrLib/Models/Rulesets/Ruleset.cs ================================================ using System.Text.Json; using System.Text.Json.Serialization; namespace MediathekArrLib.Models.Rulesets; public class Ruleset { [JsonPropertyName("id")] public int Id { get; set; } [JsonPropertyName("mediaId")] public int MediaId { get; set; } [JsonPropertyName("topic")] public string Topic { get; set; } = string.Empty; [JsonPropertyName("priority")] public int Priority { get; set; } [JsonPropertyName("filters")] public string FiltersJson { get; set; } = string.Empty; [JsonIgnore] public List Filters { get { return JsonSerializer.Deserialize>(FiltersJson) ?? []; } } [JsonPropertyName("titleRegexRules")] public string TitleRegexRulesJson { get; set; } = string.Empty; [JsonIgnore] public List TitleRegexRules { get { var options = new JsonSerializerOptions { PropertyNameCaseInsensitive = true, Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) } }; return JsonSerializer.Deserialize>(TitleRegexRulesJson, options) ?? []; } } [JsonPropertyName("episodeRegex")] public string? EpisodeRegex { get; set; } = string.Empty; [JsonPropertyName("seasonRegex")] public string? SeasonRegex { get; set; } = string.Empty; [JsonPropertyName("matchingStrategy")] [JsonConverter(typeof(JsonStringEnumConverter))] public MatchingStrategy MatchingStrategy { get; set; } [JsonPropertyName("media")] public Media Media { get; set; } = new Media(); } ================================================ FILE: MediathekArrLib/Models/Rulesets/RulesetApiResponse.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArrLib.Models.Rulesets; public class RulesetApiResponse { [JsonPropertyName("rulesets")] public List Rulesets { get; set; } = []; [JsonPropertyName("pagination")] public Pagination Pagination { get; set; } = new(); } ================================================ FILE: MediathekArrLib/Models/Rulesets/TitleRegexRule.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Text.Json.Serialization; using System.Threading.Tasks; namespace MediathekArrLib.Models.Rulesets; public class TitleRegexRule { [JsonPropertyName("type")] public TitleRegexRuleType Type { get; set; } [JsonPropertyName("value")] public string? Value { get; set; } // For static text [JsonPropertyName("field")] public string? Field { get; set; } // API field to extract from [JsonPropertyName("pattern")] public string? Pattern { get; set; } // Regex pattern } ================================================ FILE: MediathekArrLib/Models/Rulesets/TitleRegexRuleType.cs ================================================ namespace MediathekArrLib.Models.Rulesets; public enum TitleRegexRuleType { Static, // Static text to include in the title Regex // Regex to extract text from an API field } ================================================ FILE: MediathekArrLib/Models/TvdbAlias.cs ================================================ namespace MediathekArrLib.Models; public record TvdbAlias(string Language, string Name); ================================================ FILE: MediathekArrLib/Models/TvdbData.cs ================================================ using System.Text.Json.Serialization; namespace MediathekArrLib.Models; public record TvdbData(int Id, string Name, [property: JsonPropertyName("german_name")] string GermanName, List Aliases, List Episodes) { /// /// Finds an episode by its air date. /// /// The air date to search for. /// The TvdbEpisode if found, or null if not found. public TvdbEpisode? FindEpisodeByAirDate(DateTime airDate) { return Episodes?.FirstOrDefault(episode => episode.Aired?.Date == airDate.Date); } /// /// Finds episodes by their air month. /// /// The year of the episodes to search for. /// The month of the episodes to search for. /// A list of TvdbEpisode objects that aired in the specified year and month. public List? FindEpisodeByAirMonth(int year, int month) { return Episodes? .Where(episode => episode.Aired.HasValue && episode.Aired.Value.Year == year && episode.Aired.Value.Month == month) .ToList(); } /// /// Finds all episodes aired in a specified year. /// /// The year to search for. /// A list of TvdbEpisode objects aired in the specified year, or an empty list if none are found. public List FindEpisodesByAirYear(int year) { return Episodes? .Where(episode => episode.Aired?.Year == year) .ToList() ?? []; } /// /// Finds all episodes from a given season. /// /// The season number to search for. /// A list of TvdbEpisode objects in the specified season, or an empty list if none are found. public List FindEpisodesBySeason(int seasonNumber) { return Episodes?.Where(episode => episode.SeasonNumber == seasonNumber).ToList() ?? []; } /// /// Finds all episodes from a given season. /// /// The season number to search for. /// A list of TvdbEpisode objects in the specified season, or an empty list if none are found. public List FindEpisodesBySeason(string? seasonNumber) { if (int.TryParse(seasonNumber, out int parsedSeason)) { return FindEpisodesBySeason(parsedSeason); } return []; } /// /// Finds a specific episode by season and episode number. /// /// The season number of the episode. /// The episode number within the season. /// The TvdbEpisode if found, or null if not found. public TvdbEpisode? FindEpisodeBySeasonAndNumber(int seasonNumber, int episodeNumber) { return Episodes?.FirstOrDefault(episode => episode.SeasonNumber == seasonNumber && episode.EpisodeNumber == episodeNumber); } /// /// Finds a specific episode by season and episode number. /// /// The season number of the episode. /// The episode number within the season. /// The TvdbEpisode if found, or null if not found. public TvdbEpisode? FindEpisodeBySeasonAndNumber(string? seasonNumber, string? episodeNumber) { if (int.TryParse(seasonNumber, out int parsedSeason) && int.TryParse(episodeNumber, out int parsedEpisode)) { return FindEpisodeBySeasonAndNumber(parsedSeason, parsedEpisode); } return null; } } ================================================ FILE: MediathekArrLib/Models/TvdbEpisode.cs ================================================ namespace MediathekArrLib.Models; public record TvdbEpisode(string Name, DateTime? Aired, int? Runtime, int SeasonNumber, int EpisodeNumber) { public string PaddedSeason => SeasonNumber.ToString("D2"); public string PaddedEpisode => EpisodeNumber.ToString("D2"); }; ================================================ FILE: MediathekArrLib/Models/TvdbInfoResponse.cs ================================================ namespace MediathekArrLib.Models; public record TvdbInfoResponse(string Status, TvdbData Data); ================================================ FILE: MediathekArrLib/Utilities/JsonConverter.cs ================================================ using System.Text.Json.Serialization; using System.Text.Json; namespace MediathekArrLib.Utilities; public class NumberOrEmptyConverter : JsonConverter where T : struct, IConvertible { public override T Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { if (reader.TokenType == JsonTokenType.Null || (reader.TokenType == JsonTokenType.String && reader.GetString() == "")) { return default; // Return default value, which will be 0 for int, long, etc. } // Convert to the target numeric type (int, long, etc.) try { if (reader.TokenType == JsonTokenType.Number) { // Handle numeric values directly if (typeof(T) == typeof(int)) { return (T)(object)reader.GetInt32(); } else if (typeof(T) == typeof(long)) { return (T)(object)reader.GetInt64(); } } else if (reader.TokenType == JsonTokenType.String) { // Try parsing string as a number string? stringValue = reader.GetString(); if (!string.IsNullOrEmpty(stringValue)) { if (typeof(T) == typeof(int) && int.TryParse(stringValue, out int intValue)) { return (T)(object)intValue; } else if (typeof(T) == typeof(long) && long.TryParse(stringValue, out long longValue)) { return (T)(object)longValue; } } } } catch (Exception ex) { throw new JsonException($"Error converting value to type {typeof(T)}: {ex.Message}", ex); } throw new NotSupportedException($"The converter does not support type {typeof(T)}."); } public override void Write(Utf8JsonWriter writer, T value, JsonSerializerOptions options) { writer.WriteNumberValue(Convert.ToDouble(value)); } } ================================================ FILE: MediathekArrLib/Utilities/NewznabUtils.cs ================================================ using MediathekArrLib.Models; using MediathekArrLib.Models.Newznab; using System.Xml.Serialization; namespace MediathekArrLib.Utilities; public static class NewznabUtils { public static List GenerateAttributes(string? season, string[] categoryValues) { var attributes = new List(); foreach (var categoryValue in categoryValues) { attributes.Add(new Models.Newznab.Attribute { Name = "category", Value = categoryValue }); } if (season != null) { attributes.Add(new Models.Newznab.Attribute { Name = "season", Value = season }); } return attributes; } public static string SerializeRss(Rss rss) { var serializer = new XmlSerializer(typeof(Rss)); // Define the namespaces and add the newznab namespace var namespaces = new XmlSerializerNamespaces(); namespaces.Add("newznab", "http://www.newznab.com/DTD/2010/feeds/attributes/"); using var stringWriter = new StringWriter(); serializer.Serialize(stringWriter, rss, namespaces); // TODO quick fix string result = stringWriter.ToString(); result = result.Replace(":newznab_x003A_", ":"); return result; } public static Rss GetEmptyRssResult() { return new Rss { Channel = new Channel { Title = "MediathekArr", Description = "MediathekArr API results", Response = new Response { Offset = 0, Total = 0 }, Items = [] } }; } } ================================================ FILE: MediathekArrServer/Controllers/TController.cs ================================================ using MediathekArrServer.Services; using Microsoft.AspNetCore.Mvc; using System.Text; namespace MediathekArrServer.Controllers; [ApiController] [Route("api")] public class TController(MediathekSearchService mediathekSearchService, ItemLookupService itemLookupService) : ControllerBase { private readonly MediathekSearchService _mediathekSearchService = mediathekSearchService; private readonly ItemLookupService _itemLookupService = itemLookupService; [HttpGet] public async Task GetCapsXml([FromQuery] string t) { var limit = int.TryParse(HttpContext.Request.Query["limit"], out var parsedLimit) ? parsedLimit : 100; var offset = int.TryParse(HttpContext.Request.Query["offset"], out var parsedOffset) ? parsedOffset: 0; string q = HttpContext.Request.Query["q"]; string imdbid = HttpContext.Request.Query["imdbid"]; string tvdbid = HttpContext.Request.Query["tvdbid"]; string tmdbid = HttpContext.Request.Query["tmdbid"]; string season = HttpContext.Request.Query["season"]; string episode = HttpContext.Request.Query["ep"]; string cat = HttpContext.Request.Query["cat"]; if (t == "caps") { string xmlContent = @" "; return Content(xmlContent, "application/xml", Encoding.UTF8); } else if (t == "tvsearch" || t == "search" || t == "movie") { try { if (!string.IsNullOrEmpty(tvdbid) && int.TryParse(tvdbid, out var parsedTvdbid)) { var tvdbData = await _itemLookupService.GetShowInfoByTvdbId(parsedTvdbid); string searchResults = await _mediathekSearchService.FetchSearchResultsFromApiById(tvdbData, season, episode, limit, offset); return Content(searchResults, "application/xml", Encoding.UTF8); } else if (q is null && season is null && imdbid is null && tvdbid is null && tmdbid is null) { string searchResults = await _mediathekSearchService.FetchSearchResultsForRssSync(limit, offset); return Content(searchResults, "application/xml", Encoding.UTF8); } else { string searchResults = await _mediathekSearchService.FetchSearchResultsFromApiByString(q, season, limit, offset); return Content(searchResults, "application/xml", Encoding.UTF8); } } catch (HttpRequestException ex) { return BadRequest(new { error = ex.Message }); } } return NotFound(); } [HttpGet("fake_nzb_download")] public IActionResult FakeNzbDownload([FromQuery] string encodedUrl, [FromQuery] string encodedTitle) { string decodedUrl; string decodedTitle; try { var base64EncodedBytesUrl = Convert.FromBase64String(encodedUrl); decodedUrl = Encoding.UTF8.GetString(base64EncodedBytesUrl); var base64EncodedBytesTitle = Convert.FromBase64String(encodedTitle); decodedTitle = Encoding.UTF8.GetString(base64EncodedBytesTitle); } catch (FormatException) { return BadRequest("Invalid base64 string."); } // Define a basic NZB XML structure with the comment and encoded URL. var nzbContent = $@" a.b.zdf ExampleSegmentID@news.example.com "; // Convert the NZB XML content to byte array var fileContent = Encoding.UTF8.GetBytes(nzbContent); // Set the .nzb file name var nzbFileName = $"mediathek-{DateTime.Now:yyyy-MM-dd_HH-mm-ss}.nzb"; return File(fileContent, "application/x-nzb", nzbFileName); } } ================================================ FILE: MediathekArrServer/MediathekArrServer.csproj ================================================  net9.0 enable enable linux-x64 True mcr.microsoft.com/dotnet/aspnet:9.0 6f9f5643-8dc2-4efe-8c30-608b5c2bb8c5 PreserveNewest true PreserveNewest PreserveNewest true PreserveNewest PreserveNewest true PreserveNewest ================================================ FILE: MediathekArrServer/Program.cs ================================================ using MediathekArrServer.Services; using Scalar.AspNetCore; var builder = WebApplication.CreateBuilder(args); builder.Services.AddControllers(); builder.Services.AddOpenApi(); builder.Services.AddMemoryCache(); builder.Services.AddHttpClient("MediathekClient", client => { client.DefaultRequestHeaders.UserAgent.ParseAdd("Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:131.0) Gecko/20100101 Firefox/131.0"); client.DefaultRequestHeaders.AcceptEncoding.ParseAdd("gzip"); client.DefaultRequestHeaders.Accept.ParseAdd("application/json"); }) .ConfigurePrimaryHttpMessageHandler(() => new HttpClientHandler { AutomaticDecompression = System.Net.DecompressionMethods.GZip | System.Net.DecompressionMethods.Deflate }); builder.Services.AddHostedService(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); var app = builder.Build(); // Middleware to log all incoming requests app.Use(async (context, next) => { // Log the incoming request details var logger = app.Services.GetRequiredService>(); var request = context.Request; logger.LogInformation("Incoming Request: {method} {url}", request.Method, request.Path + request.QueryString); // Check if the request is a POST and has a body if (request.Method == HttpMethods.Post && request.ContentLength > 0) { // Enable buffering so the request can be read multiple times request.EnableBuffering(); } // Call the next middleware in the pipeline await next.Invoke(); }); // Configure the HTTP request pipeline. if (app.Environment.IsDevelopment()) { app.MapOpenApi(); app.MapScalarApiReference(); } app.UseHttpsRedirection(); app.UseAuthorization(); app.MapControllers(); app.Run(); ================================================ FILE: MediathekArrServer/Properties/launchSettings.json ================================================ { "profiles": { "http": { "commandName": "Project", "launchBrowser": true, "launchUrl": "scalar/v1", "environmentVariables": { "ASPNETCORE_ENVIRONMENT": "Development", "ASPNETCORE_URLS": "http://localhost:5008" }, "dotnetRunMessages": true, "applicationUrl": "http://localhost:5008" }, "https": { "commandName": "Project", "launchBrowser": true, "launchUrl": "scalar/v1", "environmentVariables": { "ASPNETCORE_ENVIRONMENT": "Development", "ASPNETCORE_URLS": "https://localhost:5008" }, "dotnetRunMessages": true, "applicationUrl": "https://localhost:5008" }, "Container (.NET SDK)": { "commandName": "SdkContainer", "launchBrowser": true, "launchUrl": "{Scheme}://{ServiceHost}:{ServicePort}/scalar/v1", "environmentVariables": { "ASPNETCORE_HTTPS_PORTS": "8081", "ASPNETCORE_HTTP_PORTS": "8080" }, "publishAllPorts": true, "useSSL": true } }, "$schema": "http://json.schemastore.org/launchsettings.json" } ================================================ FILE: MediathekArrServer/Services/ItemLookupService.cs ================================================ using MediathekArrLib.Models; using Microsoft.Extensions.Caching.Memory; using System.Text.Json; namespace MediathekArrServer.Services; public class ItemLookupService(IHttpClientFactory httpClientFactory, IConfiguration configuration, IMemoryCache memoryCache) { private readonly HttpClient _httpClient = httpClientFactory.CreateClient(); private readonly string _apiBaseUrl = configuration["MEDIATHEKARR_API_BASE_URL"] ?? "https://mediathekarr.pcjones.de/api/v1"; private readonly IMemoryCache _memoryCache = memoryCache; private static JsonSerializerOptions GetJsonSerializerOptions() { return new JsonSerializerOptions { PropertyNameCaseInsensitive = true }; } public async Task GetShowInfoByTvdbId(int? tvdbid) { if (tvdbid == null) { return null; } var cacheKey = $"TvdbInfo_{tvdbid}"; if (_memoryCache.TryGetValue(cacheKey, out TvdbData? cachedTvdbInfo)) { if (cachedTvdbInfo != null) { return cachedTvdbInfo; } } var requestUrl = $"{_apiBaseUrl}/get_show.php?tvdbid={tvdbid}"; var response = await _httpClient.GetAsync(requestUrl); if (!response.IsSuccessStatusCode) { var errorContent = await response.Content.ReadAsStringAsync(); throw new HttpRequestException($"Error fetching data: {errorContent}"); } var jsonResponse = await response.Content.ReadAsStringAsync(); var tvdbInfo = JsonSerializer.Deserialize(jsonResponse, GetJsonSerializerOptions()); if (tvdbInfo == null || tvdbInfo.Status != "success" || tvdbInfo.Data == null) { throw new HttpRequestException($"Failed to fetch TVDB data. Response: {jsonResponse}"); // TODO log and return null } _memoryCache.Set(cacheKey, tvdbInfo.Data, TimeSpan.FromHours(12)); return tvdbInfo.Data; } } ================================================ FILE: MediathekArrServer/Services/MediathekSearchFallbackHandler.cs ================================================ using MediathekArrLib.Models; using MediathekArrLib.Models.Newznab; using MediathekArrLib.Utilities; using System.Globalization; using System.Text; using System.Text.Json; using System.Text.RegularExpressions; using Guid = MediathekArrLib.Models.Newznab.Guid; namespace MediathekArrServer.Services; public partial class MediathekSearchFallbackHandler { public static List GetFallbackSearchResultItemsById(string? apiResponse, TvdbEpisode episode, TvdbData tvdbData) { if (string.IsNullOrWhiteSpace(apiResponse)) { return []; } var filteredResponse = ApplyFilters(apiResponse, episode); var seasonNumber = episode.SeasonNumber.ToString(); var episodeNumber = episode.EpisodeNumber.ToString(); return filteredResponse?.Result.Results.SelectMany(item => GenerateRssItems(item, seasonNumber, episodeNumber, tvdbData)).ToList() ?? []; } public static List GetFallbackSearchResultItemsByString(List? unmatchedFilteredResultItems, string? season) { if (unmatchedFilteredResultItems is null || unmatchedFilteredResultItems.Count == 0) { return []; } return unmatchedFilteredResultItems.SelectMany(item => GenerateRssItems(item, season, null)).ToList() ?? []; } private static List GenerateRssItems(ApiResultItem item, string? season, string? episode, TvdbData? tvdbData = null) { var items = new List(); string[] categories = ["5000", "2000"]; if (!string.IsNullOrEmpty(item.UrlVideoHd)) { items.AddRange(CreateRssItems(item, season, episode, tvdbData, "1080p", 1.6, "TV > HD", [.. categories, "5040", "2040"], item.UrlVideoHd)); } if (!string.IsNullOrEmpty(item.UrlVideo)) { items.AddRange(CreateRssItems(item, season, episode, tvdbData, "720p", 1.0, "TV > HD", [.. categories, "5040", "2040"], item.UrlVideo)); } if (!string.IsNullOrEmpty(item.UrlVideoLow)) { items.AddRange(CreateRssItems(item, season, episode, tvdbData, "480p", 0.4, "TV > SD", [.. categories, "5030", "2030"], item.UrlVideoLow)); } return items; } private static List CreateRssItems(ApiResultItem item, string? season, string? episode, TvdbData? tvdbData, string quality, double sizeMultiplier, string category, string[] categoryValues, string url) { var items = new List(); // Generate title with season and formatted date var formattedDate = ExtractDate(item.Title); // Create two items if both season and formatted date are present if (!string.IsNullOrEmpty(formattedDate)) { // Title with formattedDate in it if (!string.IsNullOrEmpty(formattedDate)) { items.Add(CreateRssItem(item, formattedDate.Split('-')[0], null, episode, tvdbData, quality, sizeMultiplier, category, categoryValues, url, formattedDate)); } } items.Add(CreateRssItem(item, null, season, episode, tvdbData, quality, sizeMultiplier, category, categoryValues, url)); return items; } private static Item CreateRssItem(ApiResultItem item, string? yearSeason, string? season, string? episode, TvdbData? tvdbData, string quality, double sizeMultiplier, string category, string[] categoryValues, string url, string? formattedDate = null) { var adjustedSize = (long)(item.Size * sizeMultiplier); var parsedTitle = GenerateTitle(item.Topic, item.Title, quality, formattedDate, season, episode); var formattedTitle = FormatTitle(parsedTitle); //var translatedTitle = TranslateTitle(formattedTitle, tvdbData); var translatedTitle = formattedTitle; // TODO see if translation is needed var encodedTitle = Convert.ToBase64String(Encoding.UTF8.GetBytes(translatedTitle)); var encodedUrl = Convert.ToBase64String(Encoding.UTF8.GetBytes(url)); // Generate the full URL for the fake_nzb_download endpoint var fakeDownloadUrl = $"/api/fake_nzb_download?encodedUrl={encodedUrl}&encodedTitle={encodedTitle}"; return new Item { Title = translatedTitle, Guid = new Guid { IsPermaLink = true, Value = $"{item.UrlWebsite}#{quality}{(string.IsNullOrEmpty(formattedDate) ? "" : "-a")}", }, Link = url, Comments = item.UrlWebsite, PubDate = DateTimeOffset.FromUnixTimeSeconds(item.Timestamp).ToString("R"), Category = category, Description = item.Description, Enclosure = new Enclosure { Url = fakeDownloadUrl, Length = adjustedSize, Type = "application/x-nzb" }, Attributes = NewznabUtils.GenerateAttributes(yearSeason ?? season, categoryValues) }; } // TODO refactor and make this look good, It's too late right now:D // TODO now it's even worse :D oh god private static string GenerateTitle(string topic, string title, string quality, string? formattedDate, string? seasonOverride, string? episodeOverride) { if (!string.IsNullOrEmpty(formattedDate)) { var cleanedTitle = EpisodeRegex().Replace(title, "").Trim(); if (cleanedTitle == topic) { cleanedTitle = null; } return $"{topic}.{formattedDate}.{(cleanedTitle != null ? $"{cleanedTitle}." : "")}GERMAN.{quality}.WEB.h264.MATCH.UNCERTAIN-MEDiATHEK".Replace(" ", "."); } var episodePattern = @"S\d{1,4}/E\d{1,4}"; var match = Regex.Match(title, episodePattern); if (match.Success) { var seasonAndEpisode = match.Value.Replace("/", ""); var cleanedTitle = EpisodeRegex().Replace(title, "").Replace($"({match.Value})", "").Trim(); if (cleanedTitle == topic) { cleanedTitle = null; } if (seasonOverride is null || episodeOverride is null) { // use data from mediathek return $"{topic}.{seasonAndEpisode}.{(cleanedTitle != null ? $"{cleanedTitle}." : "")}GERMAN.{quality}.WEB.h264.MATCH.UNCERTAIN-MEDiATHEK".Replace(" ", "."); } // use overwrite data var zeroBasedSeason = seasonOverride.Length >= 2 ? seasonOverride : $"0{seasonOverride}"; var zeroBasedEpisode = episodeOverride.Length >= 2 ? episodeOverride : $"0{episodeOverride}"; return $"{topic}.S{zeroBasedSeason}E{zeroBasedEpisode}.{(cleanedTitle != null ? $"{cleanedTitle}." : "")}GERMAN.{quality}.WEB.h264.MATCH.UNCERTAIN-MEDiATHEK".Replace(" ", "."); } if (seasonOverride is null || episodeOverride is null) { return $"{topic} - {title}.GERMAN.{quality}.WEB.h264.NO.MATCH-MEDiATHEK"; } else { var cleanedTitle = EpisodeRegex().Replace(title, "").Trim(); if (cleanedTitle == topic) { cleanedTitle = null; } var zeroBasedSeason = seasonOverride.Length >= 2 ? seasonOverride : $"0{seasonOverride}"; var zeroBasedEpisode = episodeOverride.Length >= 2 ? episodeOverride : $"0{episodeOverride}"; return $"{topic}.S{zeroBasedSeason}E{zeroBasedEpisode}.{(cleanedTitle != null ? $"{cleanedTitle}." : title)}GERMAN.{quality}.WEB.h264.MATCH.UNCERTAIN-MEDiATHEK".Replace(" ", "."); } } private static string FormatTitle(string title) { // Replace German Umlaute and special characters title = title.Replace("ä", "ae") .Replace("ö", "oe") .Replace("ü", "ue") .Replace("ß", "ss") .Replace("Ä", "Ae") .Replace("Ö", "Oe") .Replace("Ü", "Ue"); // Remove unwanted characters title = TitleRegexUnd().Replace(title, "und"); title = TitleRegexSymbols().Replace(title, ""); // Remove various symbols title = TitleRegexWhitespace().Replace(title, ".").Replace("..", "."); return title; } private static MediathekApiResponse? ApplyFilters(string apiResponse, TvdbEpisode episode) { var responseObject = JsonSerializer.Deserialize(apiResponse); if (responseObject?.Result?.Results == null) { return null; } var initialResults = responseObject.Result.Results; var resultsFilteredByRuntime = FilterByRuntime(initialResults, episode.Runtime); var resultsByAiredDate = FilterByAiredDate(resultsFilteredByRuntime, episode.Aired!.Value).Where(item => !MediathekSearchService.ShouldSkipItem(item)).ToList(); var resultsByTitleDate = FilterByTitleDate(resultsFilteredByRuntime, episode.Aired.Value).Where(item => !MediathekSearchService.ShouldSkipItem(item)).ToList(); var resultsByDescriptionDate = FilterByDescriptionDate(resultsFilteredByRuntime, episode.Aired.Value).Where(item => !MediathekSearchService.ShouldSkipItem(item)).ToList(); var resultsByEpisodeTitleMatch = FilterByEpisodeTitleMatch(resultsFilteredByRuntime, episode.Name).Where(item => !MediathekSearchService.ShouldSkipItem(item)).ToList(); List resultsBySeasonEpisodeMatch = []; // if more than 3 results we assume episode title match wasn't correct if (resultsByEpisodeTitleMatch.Count > 3) { resultsByEpisodeTitleMatch.Clear(); } // if we have episode title match that is the best we got if (resultsByEpisodeTitleMatch.Count > 0) { // we ignore air date in this case as it is not as reliable resultsByAiredDate.Clear(); } if (resultsByAiredDate.Count == 0 && resultsByTitleDate.Count == 0 && resultsByDescriptionDate.Count == 0 && resultsByEpisodeTitleMatch.Count == 0) { // Only trust Mediathek season/episode if no other match: resultsBySeasonEpisodeMatch = FilterBySeasonEpisodeMatch(resultsFilteredByRuntime, episode.SeasonNumber.ToString(), episode.EpisodeNumber.ToString()) .Where(item => !MediathekSearchService.ShouldSkipItem(item)).ToList(); ; } // HashSet to remove duplicates HashSet filteredResults = [.. resultsByAiredDate, .. resultsByTitleDate, .. resultsByDescriptionDate, .. resultsByEpisodeTitleMatch, .. resultsBySeasonEpisodeMatch]; // Create a filtered API response var filteredApiResponse = new MediathekApiResponse { Result = new MediathekApiResult { Results = [.. filteredResults], QueryInfo = responseObject.Result.QueryInfo }, Err = responseObject.Err }; return filteredApiResponse; } private static List FilterByRuntime(List results, int? runtime) { if (runtime is null || runtime is 0) { return results; } var minRuntime = Math.Max(5, (int)(runtime * 0.65)) * 60; var maxRuntime = (int)(runtime * 1.35) * 60; return results.Where(item => item.Duration >= minRuntime && item.Duration <= maxRuntime) .ToList(); } private static List FilterByAiredDate(List results, DateTime airedDate) { return results.Where(item => ConvertToBerlinTimezone(UnixTimeStampToDateTime(item.Timestamp)).Date == airedDate) .ToList(); } private static List FilterByTitleDate(List results, DateTime airedDate) { var formattedAiredDate = airedDate.ToString("yyyy-MM-dd"); return results.Where(item => { var extractedDate = ExtractDate(item.Title); return !string.IsNullOrEmpty(extractedDate) && extractedDate == formattedAiredDate; }).ToList(); } private static List FilterByDescriptionDate(List results, DateTime airedDate) { var formattedAiredDate = airedDate.ToString("yyyy-MM-dd"); return results.Where(item => { var extractedDate = ExtractDate(item.Description); return !string.IsNullOrEmpty(extractedDate) && extractedDate == formattedAiredDate; }).ToList(); } private static List FilterByEpisodeTitleMatch(List results, string episodeName) { var normalizedEpisodeName = NormalizeString(episodeName); return results.Where(item => { var normalizedTitle = NormalizeString(item.Title); return normalizedTitle.Contains(normalizedEpisodeName, StringComparison.OrdinalIgnoreCase); }).ToList(); } private static List FilterBySeasonEpisodeMatch(List results, string season, string episode) { var zeroBasedSeason = season.Length >= 2 ? season : $"0{season}"; var zeroBasedEpisode = episode.Length >= 2 ? episode : $"0{episode}"; return results.Where(item => { return item.Title.Contains($"S{zeroBasedSeason}") && item.Title.Contains($"E{zeroBasedEpisode}"); }).ToList(); } // Normalize a string to remove special characters and retain only A-Z, äöüÄÖÜß private static string NormalizeString(string input) { var regex = NormalizeRegex(); return regex.Replace(input, "").ToLowerInvariant(); } private static string ExtractDate(string title) { // Numeric format pattern (e.g., "24.10.2024" or "24.10.24") var numericDatePattern = @"(\d{1,2})\.(\d{1,2})\.(\d{2}|\d{4})"; // Nonth name format pattern (e.g., "16. Juli 2024") var germanMonthPattern = @"(\d{1,2})\.\s*(\w+)\s+(\d{4})"; var numericDateMatch = Regex.Match(title, numericDatePattern); if (numericDateMatch.Success) { int day = int.Parse(numericDateMatch.Groups[1].Value); int month = int.Parse(numericDateMatch.Groups[2].Value); int year = int.Parse(numericDateMatch.Groups[3].Value); if (year < 100) { year += 2000; } DateTime date = new(year, month, day); return date.ToString("yyyy-MM-dd"); } var longMonthMatch = Regex.Match(title, germanMonthPattern); if (longMonthMatch.Success) { int day = int.Parse(longMonthMatch.Groups[1].Value); string monthName = longMonthMatch.Groups[2].Value; int year = int.Parse(longMonthMatch.Groups[3].Value); var germanCulture = new CultureInfo("de-DE"); if (DateTime.TryParseExact($"{day} {monthName} {year}", "d MMMM yyyy", germanCulture, DateTimeStyles.None, out DateTime date)) { return date.ToString("yyyy-MM-dd"); } } return string.Empty; } private static DateTime UnixTimeStampToDateTime(long unixTimeStamp) { return DateTimeOffset.FromUnixTimeSeconds(unixTimeStamp).UtcDateTime; } private static DateTime ConvertToBerlinTimezone(DateTime utcDateTime) { var berlinTimeZone = TimeZoneInfo.FindSystemTimeZoneById("Europe/Berlin"); return TimeZoneInfo.ConvertTimeFromUtc(utcDateTime, berlinTimeZone); } [GeneratedRegex(@"[&]")] private static partial Regex TitleRegexUnd(); [GeneratedRegex(@"[/:;""'@#?$%^*+=!<>],()")] private static partial Regex TitleRegexSymbols(); [GeneratedRegex(@"\s+")] private static partial Regex TitleRegexWhitespace(); [GeneratedRegex(@"Folge\s*\d+:\s*")] private static partial Regex EpisodeRegex(); [GeneratedRegex("[^a-zA-ZäöüÄÖÜß]")] private static partial Regex NormalizeRegex(); } ================================================ FILE: MediathekArrServer/Services/MediathekSearchService.cs ================================================ using System.Collections.Concurrent; using System.Globalization; using System.Text; using System.Text.Json; using System.Text.RegularExpressions; using MediathekArrLib.Models; using MediathekArrLib.Models.Newznab; using MediathekArrLib.Models.Rulesets; using MediathekArrLib.Utilities; using Microsoft.Extensions.Caching.Memory; using Guid = MediathekArrLib.Models.Newznab.Guid; using MatchType = MediathekArrLib.Models.Rulesets.MatchType; namespace MediathekArrServer.Services; public partial class MediathekSearchService(IHttpClientFactory httpClientFactory, IMemoryCache cache, ItemLookupService itemLookupService) { private readonly IMemoryCache _cache = cache; private readonly ItemLookupService _itemLookupService = itemLookupService; private readonly HttpClient _httpClient = httpClientFactory.CreateClient("MediathekClient"); private readonly TimeSpan _cacheTimeSpan = TimeSpan.FromMinutes(55); private static readonly string[] _skipKeywords = ["Audiodeskription", "Hörfassung", "(klare Sprache)", "(Gebärdensprache)", "Trailer", "Outtakes:"]; private static readonly string[] _queryFields = ["topic", "title"]; private readonly ConcurrentDictionary> _rulesetsByTopic = new(); public async Task UpdateRulesetsAsync() { var allRulesets = new List(); int currentPage = 1; while (true && currentPage < 100) { var response = await _httpClient.GetAsync($"https://mediathekarr.pcjones.de/metadata/api/rulesets.php?page={currentPage++}"); if (response.IsSuccessStatusCode) { var responseContent = await response.Content.ReadAsStringAsync(); var rulesetResponse = JsonSerializer.Deserialize(responseContent); if (rulesetResponse?.Rulesets != null) { allRulesets.AddRange(rulesetResponse.Rulesets); } if (rulesetResponse?.Pagination?.CurrentPage >= rulesetResponse?.Pagination.TotalPages) { break; } } else { // Exit if the request fails Console.WriteLine("Failed to fetch rulesets from the API."); break; } } _rulesetsByTopic.Clear(); foreach (var group in allRulesets.GroupBy(r => r.Topic)) { // Sort each group by priority before adding it _rulesetsByTopic[group.Key] = [.. group.OrderBy(ruleset => ruleset.Priority)]; } } private async Task FetchMediathekViewApiResponseAsync(List queries, int size) { var requestBody = new { queries, sortBy = "filmlisteTimestamp", sortOrder = "desc", future = true, offset = 0, size }; var requestContent = new StringContent(JsonSerializer.Serialize(requestBody), Encoding.UTF8); var response = await _httpClient.PostAsync("https://mediathekviewweb.de/api/query", requestContent); if (response.IsSuccessStatusCode) { return await response.Content.ReadAsStringAsync(); } return string.Empty; } public async Task FetchSearchResultsFromApiById(TvdbData tvdbData, string? season, string? episodeNumber, int limit, int offset) { var cacheKey = $"tvdb_{tvdbData.Id}_{season ?? "null"}_{episodeNumber ?? "null"}_{limit}_{offset}"; if (_cache.TryGetValue(cacheKey, out string? cachedResponse)) { return cachedResponse ?? ""; } List? desiredEpisodes = GetDesiredEpisodes(tvdbData, season, episodeNumber); if (season != null && desiredEpisodes?.Count == 0) { var response = NewznabUtils.SerializeRss(NewznabUtils.GetEmptyRssResult()); _cache.Set(cacheKey, response, _cacheTimeSpan); return response; } var mediathekViewRequestCacheKey = $"mediathekapi_{tvdbData.Id}"; string apiResponse; if (_cache.TryGetValue(mediathekViewRequestCacheKey, out string? cachedApiResponse)) { apiResponse = cachedApiResponse ?? string.Empty; } else { var queries = new List { new { fields = _queryFields, query = tvdbData.GermanName ?? tvdbData.Name } }; apiResponse = await FetchMediathekViewApiResponseAsync(queries, 10000); if (string.IsNullOrEmpty(apiResponse)) { return NewznabUtils.SerializeRss(NewznabUtils.GetEmptyRssResult()); } _cache.Set(mediathekViewRequestCacheKey, apiResponse, _cacheTimeSpan); } var results = JsonSerializer.Deserialize(apiResponse)?.Result.Results ?? []; var (matchedEpisodes, _) = await ApplyRulesetFilters(results, tvdbData); var matchedDesiredEpisodes = ApplyDesiredEpisodeFilter(matchedEpisodes, desiredEpisodes); List? newznabItems; if (matchedDesiredEpisodes.Count == 0 && desiredEpisodes?.Count > 0) { // Fallback to best effort matching newznabItems = desiredEpisodes .SelectMany(episode => MediathekSearchFallbackHandler.GetFallbackSearchResultItemsById(apiResponse, episode, tvdbData)) .ToList(); } else { newznabItems = matchedDesiredEpisodes.SelectMany(GenerateRssItems).ToList(); } var newznabRssResponse = ConvertNewznabItemsToRss(newznabItems, limit, offset); _cache.Set(cacheKey, newznabRssResponse, _cacheTimeSpan); return newznabRssResponse; } private static List? GetDesiredEpisodes(TvdbData tvdbData, string? season, string? episodeNumber) { List? desiredEpisodes; if (season != null) { desiredEpisodes = []; if (episodeNumber is null) { desiredEpisodes.AddRange(tvdbData.FindEpisodesBySeason(season)); if (season.Length == 4 && int.TryParse(season, out var year)) { if (year >= 1900 && year <= 2100) { desiredEpisodes.AddRange(tvdbData.FindEpisodesByAirYear(year)); desiredEpisodes = desiredEpisodes.Distinct().ToList(); } } } else { TvdbEpisode? desiredEpisode; if (season?.Length == 4 && episodeNumber.Contains('/')) { var episodeNumberSplitted = episodeNumber?.Split('/'); if (episodeNumberSplitted?.Length == 2 && DateTime.TryParse($"{season}-{episodeNumberSplitted[0]}-{episodeNumberSplitted[1]}", out DateTime searchAirDate)) { desiredEpisode = tvdbData.FindEpisodeByAirDate(searchAirDate); } else { desiredEpisode = null; } } else { desiredEpisode = tvdbData.FindEpisodeBySeasonAndNumber(season, episodeNumber); } if (desiredEpisode != null) { desiredEpisodes.Add(desiredEpisode); } } } else { desiredEpisodes = null; } return desiredEpisodes; } private static string ConvertNewznabItemsToRss(List items, int limit, int offset) { if (items == null || items.Count == 0) { return NewznabUtils.SerializeRss(NewznabUtils.GetEmptyRssResult()); } var paginatedItems = items.Skip(offset).Take(limit).ToList(); var rss = new Rss { Channel = new Channel { Title = "MediathekArr", Description = "MediathekArr API results", Response = new Response { Offset = offset, Total = items.Count }, Items = paginatedItems, } }; return NewznabUtils.SerializeRss(rss); } private static List ApplyDesiredEpisodeFilter(List matchedEpisodes, List? desiredEpisodes) { if (desiredEpisodes is null) { return matchedEpisodes; } return matchedEpisodes.Where(matched => desiredEpisodes.Any(desiredEpisode => desiredEpisode.SeasonNumber == matched.Episode.SeasonNumber && desiredEpisode.EpisodeNumber == matched.Episode.EpisodeNumber ) ).ToList(); } private async Task MatchesSeasonAndEpisode(ApiResultItem item, Ruleset ruleset) { // Fetch TVDB episode information var tvdbData = await _itemLookupService.GetShowInfoByTvdbId(ruleset.Media.TvdbId); if (tvdbData?.Episodes == null || tvdbData.Episodes.Count == 0) { return null; } // Extract season and episode from the item using the ruleset string? season = ExtractValueUsingRegex(item, ruleset.SeasonRegex); string? episode = ExtractValueUsingRegex(item, ruleset.EpisodeRegex); if (string.IsNullOrEmpty(season) || string.IsNullOrEmpty(episode)) { return null; } if (!int.TryParse(season, out var seasonNumber) || !int.TryParse(episode, out var episodeNumber)) { return null; // Invalid season or episode format } // Find the matching episode in the TVDB data var matchedEpisode = tvdbData.FindEpisodeBySeasonAndNumber(seasonNumber, episodeNumber); if (matchedEpisode == null) { return null; // No matching episode found } return new MatchedEpisodeInfo( Episode: matchedEpisode, Item: item, ShowName: string.IsNullOrEmpty(tvdbData.Name) ? tvdbData.GermanName : tvdbData.Name, MatchedTitle: $"S{season}E{episode}" ); } /// /// Extracts a value from the item using the specified regex rule. /// /// The API result item. /// The regex rule. /// The extracted value, or null if not found. private static string? ExtractValueUsingRegex(ApiResultItem item, string? pattern) { if (string.IsNullOrEmpty(pattern)) { return null; } string fieldValue = GetFieldValue(item, "title"); if (string.IsNullOrEmpty(fieldValue)) { return null; } var match = Regex.Match(fieldValue, pattern); return match.Success && match.Groups.Count > 1 ? match.Groups[1].Value : null; } private async Task MatchesItemTitleIncludes(ApiResultItem item, Ruleset ruleset) { // Fetch TVDB episode information var tvdbData = await _itemLookupService.GetShowInfoByTvdbId(ruleset.Media.TvdbId); if (tvdbData?.Episodes == null || tvdbData.Episodes.Count == 0) { return null; } // Construct the title based on ruleset var constructedTitle = BuildTitleFromRegexRules(item, ruleset.TitleRegexRules); if (constructedTitle is null) { return null; } // Check if the constructed title is included in any episode title var matchedEpisode = tvdbData.Episodes .FirstOrDefault(episode => FormatTitle(episode.Name) .Contains(FormatTitle(constructedTitle), StringComparison.OrdinalIgnoreCase)); if (matchedEpisode is null) { return null; } return new MatchedEpisodeInfo( Episode: matchedEpisode, Item: item, ShowName: string.IsNullOrEmpty(tvdbData.Name) ? tvdbData.GermanName : tvdbData.Name, MatchedTitle: constructedTitle ); } private async Task MatchesItemTitleExact(ApiResultItem item, Ruleset ruleset) { // Fetch TVDB episode information var tvdbData = await _itemLookupService.GetShowInfoByTvdbId(ruleset.Media.TvdbId); if (tvdbData?.Episodes == null || tvdbData.Episodes.Count == 0) { return null; } // Construct the title based on ruleset var constructedTitle = BuildTitleFromRegexRules(item, ruleset.TitleRegexRules); if (constructedTitle is null) { return null; } var formattedConstructedTitle = FormatTitle(constructedTitle); // Check if the constructed title matches any episode title exactly var matchedEpisodes = tvdbData.Episodes .Where(episode => FormatTitle(episode.Name) .Equals(formattedConstructedTitle, StringComparison.OrdinalIgnoreCase)) .ToArray(); TvdbEpisode? matchedEpisode = GuessCorrectMatch(item, matchedEpisodes); if (matchedEpisode != null) { return new MatchedEpisodeInfo( Episode: matchedEpisode, Item: item, ShowName: string.IsNullOrEmpty(tvdbData.Name) ? tvdbData.GermanName : tvdbData.Name, MatchedTitle: constructedTitle ); } return null; } private static TvdbEpisode? GuessCorrectMatch(ApiResultItem item, TvdbEpisode[] matchedEpisodes) { if (matchedEpisodes.Length == 1) { return matchedEpisodes[0]; } else // multiple matched episodes found, we try to guess which one is the best { // Try to match by aired date var matchedEpisodeByAirDate = matchedEpisodes.FirstOrDefault(episode => episode.Aired == DateTimeOffset.FromUnixTimeSeconds(item.Timestamp).UtcDateTime.Date); if (matchedEpisodeByAirDate != null) { return matchedEpisodeByAirDate; } // chose the newest one return matchedEpisodes.OrderByDescending(episode => episode.Aired).FirstOrDefault(); } } private async Task MatchesItemTitleEqualsAirdate(ApiResultItem item, Ruleset ruleset) { // Fetch TVDB episode information var tvdbData = await _itemLookupService.GetShowInfoByTvdbId(ruleset.Media.TvdbId); if (tvdbData?.Episodes == null || tvdbData.Episodes.Count == 0) { return null; } // Construct the title based on ruleset var constructedTitle = BuildTitleFromRegexRules(item, ruleset.TitleRegexRules); if (constructedTitle is null) { return null; } if (TryParseDate(constructedTitle, out var parsedDate)) { // Find the episode by airdate var matchedEpisode = tvdbData.FindEpisodeByAirDate(parsedDate); if (matchedEpisode != null) { return new MatchedEpisodeInfo( Episode: matchedEpisode, Item: item, ShowName: string.IsNullOrEmpty(tvdbData.Name) ? tvdbData.GermanName : tvdbData.Name, MatchedTitle: constructedTitle ); } } return null; } private static bool TryParseDate(string dateString, out DateTime date) { // Attempt parsing with various formats var formats = new[] { "d. MMMM yyyy", // e.g., "7. Juni 2024" "dd.MM.yyyy", // e.g., "31.12.2017" "yyyy-MM-dd", // e.g., "2017-12-01" "yyyyMMdd", // e.g., "20171201" "dd. MMMM yyyy", // e.g., "07. Juni 2024" }; return DateTime.TryParseExact( dateString, formats, CultureInfo.GetCultureInfo("de-DE"), DateTimeStyles.None, out date ); } private static string? BuildTitleFromRegexRules(ApiResultItem item, List titleRegexRules) { var stringBuilder = new StringBuilder(); foreach (var rule in titleRegexRules) { switch (rule.Type) { case TitleRegexRuleType.Static: // Append the static value directly if (!string.IsNullOrEmpty(rule.Value)) { stringBuilder.Append(rule.Value); } break; case TitleRegexRuleType.Regex: // Extract substring using the regex pattern from the specified field if (!string.IsNullOrEmpty(rule.Pattern) && !string.IsNullOrEmpty(rule.Field)) { var fieldValue = GetFieldValue(item, rule.Field); if (!string.IsNullOrEmpty(fieldValue)) { var match = Regex.Match(fieldValue, rule.Pattern); if (match.Success && match.Groups[^1].Length > 0) { // Use the last group stringBuilder.Append(match.Groups[^1].Value); } else { // abort if regex match failed return null; } } } break; } } return stringBuilder.ToString(); } private static string GetFieldValue(ApiResultItem item, string fieldName) { return fieldName switch { "channel" => item.Channel, "topic" => item.Topic, "title" => item.Title, "description" => item.Description, "timestamp" => item.Timestamp.ToString(), "duration" => item.Duration.ToString(), "size" => item.Size.ToString(), "url_website" => item.UrlWebsite, "url_video" => item.UrlVideo, "url_video_low" => item.UrlVideoLow, "url_video_hd" => item.UrlVideoHd, _ => string.Empty }; } private static bool FilterMatches(ApiResultItem item, Filter filter) { string? attributeValue = GetFieldValue(item, filter.Attribute); return filter.Type switch { MatchType.ExactMatch => attributeValue.Equals(filter.Value.ToString(), StringComparison.OrdinalIgnoreCase), MatchType.Contains => attributeValue.Contains(filter.Value.ToString(), StringComparison.OrdinalIgnoreCase), MatchType.Regex => Regex.IsMatch(attributeValue, filter.Value.ToString()), MatchType.GreaterThan => double.TryParse(attributeValue, out var attrValue) && double.TryParse(filter.Value.ToString(), out var filterValue) && attrValue > filterValue * 60, MatchType.LessThan => double.TryParse(attributeValue, out var attrValue) && double.TryParse(filter.Value.ToString(), out var filterValue) && attrValue < filterValue * 60, _ => false, }; } private List GetRulesetsForTopic(string topic) { return _rulesetsByTopic.TryGetValue(topic, out var rulesets) ? rulesets : []; } private async Task<(List matchedEpisodes, List unmatchedFilteredResultItems)> ApplyRulesetFilters(List results, TvdbData? tvdbData = null) { var matchedFilteredResults = new List(); var unmatchedFilteredResults = new List(results); foreach (var item in results) { if(ShouldSkipItem(item)) { unmatchedFilteredResults.Remove(item); continue; } // Get applicable rulesets for the topic or specific TVDB data var rulesets = tvdbData is null ? GetRulesetsForTopic(item.Topic) : GetRulesetsForTopic(item.Topic).Where(r => r.Media?.TvdbId == tvdbData.Id).ToList(); foreach (var ruleset in rulesets) { if (!ruleset.Filters.All(filter => FilterMatches(item, filter))) { unmatchedFilteredResults.Remove(item); continue; // Skip this ruleset if any filter fails } MatchedEpisodeInfo? matchInfo = null; switch (ruleset.MatchingStrategy) { case MatchingStrategy.SeasonAndEpisodeNumber: matchInfo = await MatchesSeasonAndEpisode(item, ruleset); break; case MatchingStrategy.ItemTitleIncludes: matchInfo = await MatchesItemTitleIncludes(item, ruleset); break; case MatchingStrategy.ItemTitleExact: matchInfo = await MatchesItemTitleExact(item, ruleset); break; case MatchingStrategy.ItemTitleEqualsAirdate: matchInfo = await MatchesItemTitleEqualsAirdate(item, ruleset); break; } if (matchInfo != null) { matchedFilteredResults.Add(matchInfo); break; } else { unmatchedFilteredResults.Remove(item); } } } return (matchedFilteredResults, unmatchedFilteredResults); } public async Task FetchSearchResultsForRssSync(int limit, int offset) { var cacheKey = $"rss_{limit}_{offset}"; // Return cached response if it exists if (_cache.TryGetValue(cacheKey, out string? cachedResponse)) { return cachedResponse ?? ""; } var mediathekViewRequestCacheKey = "rss_mediathekview_results"; List results; if (_cache.TryGetValue(mediathekViewRequestCacheKey, out List? cachedResults)) { results = cachedResults ?? []; } else { var queries = new List(); var apiResponse = await FetchMediathekViewApiResponseAsync(queries, 6000); if (string.IsNullOrEmpty(apiResponse)) { return NewznabUtils.SerializeRss(NewznabUtils.GetEmptyRssResult()); } results = JsonSerializer.Deserialize(apiResponse)?.Result.Results ?? []; _cache.Set(mediathekViewRequestCacheKey, results, TimeSpan.FromMinutes(20)); } // Deserialize the API response and apply ruleset filters var (matchedEpisodes, unmatchedFilteredResultItems) = await ApplyRulesetFilters(results); List? newznabItemsByRuleset = matchedEpisodes.SelectMany(GenerateRssItems).ToList(); List? newznabItemsByFallback = MediathekSearchFallbackHandler.GetFallbackSearchResultItemsByString(unmatchedFilteredResultItems, null); // Combine the results from ruleset matching and fallback handler var newznabRssResponse = ConvertNewznabItemsToRss([.. newznabItemsByRuleset, .. newznabItemsByFallback], limit, offset); // Cache the response and return it _cache.Set(cacheKey, newznabRssResponse, _cacheTimeSpan); return newznabRssResponse; } public async Task FetchSearchResultsFromApiByString(string? q, string? season, int limit, int offset) { var cacheKey = $"q_{q ?? "null"}_{season ?? "null"}_{limit}_{offset}"; // Return cached response if it exists if (_cache.TryGetValue(cacheKey, out string? cachedResponse)) { return cachedResponse ?? ""; } var mediathekViewRequestCacheKey = $"mediathekapi_{q ?? "null"}_{season ?? "null"}"; string apiResponse; if (_cache.TryGetValue(mediathekViewRequestCacheKey, out string? cachedApiResponse)) { apiResponse = cachedApiResponse ?? string.Empty; } else { var queries = new List(); if (q != null) { queries.Add(new { fields = _queryFields, query = q }); } if (!string.IsNullOrEmpty(season)) { var zeroBasedSeason = season.Length >= 2 ? season : $"0{season}"; queries.Add(new { fields = new[] { "title" }, query = $"S{zeroBasedSeason}" }); } apiResponse = await FetchMediathekViewApiResponseAsync(queries, 1500); if (string.IsNullOrEmpty(apiResponse)) { return NewznabUtils.SerializeRss(NewznabUtils.GetEmptyRssResult()); } _cache.Set(mediathekViewRequestCacheKey, apiResponse, _cacheTimeSpan); } // Deserialize the API response and apply ruleset filters var results = JsonSerializer.Deserialize(apiResponse)?.Result.Results ?? []; var (matchedEpisodes, unmatchedFilteredResultItems) = await ApplyRulesetFilters(results); List? newznabItemsByRuleset = matchedEpisodes.SelectMany(GenerateRssItems).ToList(); List? newznabItemsByFallback = MediathekSearchFallbackHandler.GetFallbackSearchResultItemsByString(unmatchedFilteredResultItems, season); // Combine the results from ruleset matching and fallback handler var newznabRssResponse = ConvertNewznabItemsToRss([.. newznabItemsByRuleset, .. newznabItemsByFallback], limit, offset); // Cache the response and return it _cache.Set(cacheKey, newznabRssResponse, _cacheTimeSpan); return newznabRssResponse; } private List GenerateRssItems(MatchedEpisodeInfo matchedEpisodeInfo) { var items = new List(); string[] categories = ["5000", "2000"]; if (!string.IsNullOrEmpty(matchedEpisodeInfo.Item.UrlVideoHd)) { items.AddRange(CreateRssItems(matchedEpisodeInfo, "1080p", 1.6, "TV > HD", [.. categories, "5040", "2040"], matchedEpisodeInfo.Item.UrlVideoHd)); } if (!string.IsNullOrEmpty(matchedEpisodeInfo.Item.UrlVideo)) { items.AddRange(CreateRssItems(matchedEpisodeInfo, "720p", 1.0, "TV > HD", [.. categories, "5040", "2040"], matchedEpisodeInfo.Item.UrlVideo)); } if (!string.IsNullOrEmpty(matchedEpisodeInfo.Item.UrlVideoLow)) { items.AddRange(CreateRssItems(matchedEpisodeInfo, "480p", 0.4, "TV > SD", [.. categories, "5030", "2030"], matchedEpisodeInfo.Item.UrlVideoLow)); } return items; } private List CreateRssItems(MatchedEpisodeInfo matchedEpisodeInfo, string quality, double sizeMultiplier, string category, string[] categoryValues, string url) { var items = new List { CreateRssItem(matchedEpisodeInfo, quality, sizeMultiplier, category, categoryValues, url, EpisodeType.Standard) }; // also create daily type if season is a year if (matchedEpisodeInfo.Episode.SeasonNumber > 1950) { items.Add(CreateRssItem(matchedEpisodeInfo, quality, sizeMultiplier, category, categoryValues, url, EpisodeType.Daily)); } return items; } private static string FormatTitle(string title) { // Replace German Umlaute and special characters title = title.Replace("ä", "ae") .Replace("ö", "oe") .Replace("ü", "ue") .Replace("ß", "ss") .Replace("Ä", "Ae") .Replace("Ö", "Oe") .Replace("Ü", "Ue"); // Remove unwanted characters title = TitleRegexUnd().Replace(title, "and"); title = TitleRegexSymbols().Replace(title, ""); // Remove various symbols title = TitleRegexWhitespace().Replace(title, ".").Replace("..", "."); return title; } private static Item CreateRssItem(MatchedEpisodeInfo matchedEpisodeInfo, string quality, double sizeMultiplier, string category, string[] categoryValues, string url, EpisodeType episodeType) { var adjustedSize = (long)(matchedEpisodeInfo.Item.Size * sizeMultiplier); var parsedTitle = GenerateTitle(matchedEpisodeInfo, quality, episodeType); var formattedTitle = FormatTitle(parsedTitle); var translatedTitle = formattedTitle; var encodedTitle = Convert.ToBase64String(Encoding.UTF8.GetBytes(translatedTitle)); var encodedUrl = Convert.ToBase64String(Encoding.UTF8.GetBytes(url)); // Generate the full URL for the fake_nzb_download endpoint var fakeDownloadUrl = $"/api/fake_nzb_download?encodedUrl={encodedUrl}&encodedTitle={encodedTitle}"; var item = matchedEpisodeInfo.Item; return new Item { Title = translatedTitle, Guid = new Guid { IsPermaLink = true, Value = $"{item.UrlWebsite}#{quality}{(episodeType == EpisodeType.Daily ? "" : "-d")}", }, Link = url, Comments = item.UrlWebsite, PubDate = DateTimeOffset.FromUnixTimeSeconds(item.Timestamp).ToString("R"), Category = category, Description = item.Description, Enclosure = new Enclosure { Url = fakeDownloadUrl, Length = adjustedSize, Type = "application/x-nzb" }, Attributes = NewznabUtils.GenerateAttributes(matchedEpisodeInfo.Episode.PaddedSeason, categoryValues) }; } private static string GenerateTitle(MatchedEpisodeInfo matchedEpisodeInfo, string quality, EpisodeType episodeType) { var episode = matchedEpisodeInfo.Episode; if (episodeType == EpisodeType.Daily) { return $"{matchedEpisodeInfo.ShowName}.{episode.Aired:yyyy-MM-dd}.{episode.Name}.GERMAN.{quality}.WEB.h264-MEDiATHEK".Replace(" ", "."); } return $"{matchedEpisodeInfo.ShowName}.S{episode.PaddedSeason}E{episode.PaddedEpisode}.{episode.Name}.GERMAN.{quality}.WEB.h264-MEDiATHEK".Replace(" ", "."); } public static bool ShouldSkipItem(ApiResultItem item) { return item.UrlVideo.EndsWith(".m3u8") || _skipKeywords.Any(item.Title.Contains); } [GeneratedRegex(@"[&]")] private static partial Regex TitleRegexUnd(); [GeneratedRegex(@"[/:;,""'’@#?$%^*+=!|<>,()]")] private static partial Regex TitleRegexSymbols(); [GeneratedRegex(@"\s+")] private static partial Regex TitleRegexWhitespace(); } ================================================ FILE: MediathekArrServer/Services/RulesetBackgroundService.cs ================================================ namespace MediathekArrServer.Services; using Microsoft.Extensions.Hosting; using System; using System.Threading; using System.Threading.Tasks; public class RulesetBackgroundService(IServiceProvider serviceProvider, ILogger logger) : BackgroundService { private readonly TimeSpan _refreshInterval = TimeSpan.FromMinutes(30); protected override async Task ExecuteAsync(CancellationToken stoppingToken) { while (!stoppingToken.IsCancellationRequested) { using (var scope = serviceProvider.CreateScope()) { var searchService = scope.ServiceProvider.GetRequiredService(); try { logger.LogInformation("Starting ruleset update at {Time}", DateTime.UtcNow); await searchService.UpdateRulesetsAsync(); logger.LogInformation("Ruleset update completed successfully at {Time}", DateTime.UtcNow); } catch (Exception ex) { logger.LogError(ex, "Error updating rulesets at {Time}", DateTime.UtcNow); } } await Task.Delay(_refreshInterval, stoppingToken); } } } ================================================ FILE: MediathekArrServer/appsettings.Development.json ================================================ { "Logging": { "LogLevel": { "Default": "Information", "Microsoft.AspNetCore": "Warning" } } } ================================================ FILE: MediathekArrServer/appsettings.Production.json ================================================ { "Kestrel": { "Endpoints": { "Http": { "Url": "http://[::]:5008" } } } } ================================================ FILE: MediathekArrServer/appsettings.json ================================================ { "Logging": { "LogLevel": { "Default": "Information", "Microsoft.AspNetCore": "Warning" } }, "AllowedHosts": "*" } ================================================ FILE: README.md ================================================ mediathekarr # MediathekArr work in progress, please report bugs and ideas Thanks to https://github.com/mediathekview/mediathekviewweb for the Mediathek API Thanks to https://github.com/PCJones/UmlautAdaptarr for the German Title API Thanks to https://thetvdb.com for the metadata API Example screenshot: ![grafik](https://github.com/user-attachments/assets/654c42fa-4eab-4b6e-b1c7-9b23192c7a98) ## Features | Feature | Status | |-------------------------------------------------------------------|---------------| | Prowlarr & NZB Hydra Support |✓ | | Sonarr (TV Show) Support |✓ | | Radarr (Movie) Support* |limited*, WIP | | Subtitle Support |✓ | | MKV Creation |✓ | | Web-Interface with installation wizard |✓ | | Advanced filter and matching system for TV shows, seasons and episodes... due to the horrendous lack of consistency and metadata in ARD/ZDF Mediatheken|✓ | | Ideas? | Wishes? | \* You can find a few movies via interactive search, but not a lot. You can however find all movies via a text search in prowlarr and send the result to radarr. ## Installation using docker ## Important Note: **You should use the beta image until 1.0 is released. Latest/Main is not working.** 1. Configure docker-compose.yml - you can find the most recent beta docker compose [here](https://github.com/PCJones/MediathekArr/releases/latest) 2. Find out your wizard url: Depending on your docker network setup either `http://localhost:5007`, `http://mediathekarr:5007` or `http://YOUR_HOST_IP:5007` 3. Open the wizard and follow the wizards instructions :-) 4. You are done! In canse you encounter any problems please don't hesitate to create an issue or to [contact me]([url](https://github.com/PCJones/MediathekArr/tree/main?tab=readme-ov-file#kontakt--support)). ## How does it work - Indexer: MediathekArr is pretending to be a usenet indexer, but are actually just fetching and parsing search results from MediathekViewWeb - Downloader: MediathekArr is pretending to be a SABnzbd usenet downloader but is actually just downloading the video and subtitles via HTTP directly from the Mediatheken ## Kontakt & Support - Öffne gerne ein Issue auf GitHub falls du Unterstützung benötigst. - [Telegram](https://t.me/pc_jones) - [UsenetDE Discord Server](https://discord.gg/src6zcH4rr) -> #mediathekarr Channel ## Spenden Über eine Spende freue ich mich natürlich immer :D Buy Me A Coffee Coindrop.to me Für andere Spendenmöglichkeiten gerne auf Discord oder Telegram melden - danke! ## Star History [![Star History Chart](https://api.star-history.com/svg?repos=pcjones/mediathekarr&type=Date)](https://star-history.com/#pcjones/mediathekarr&Date) ================================================ FILE: api/v1/db.php ================================================ setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION); if ($isFirstRun) { createTables($db); displayApiKeyForm($db); } return $db; } function createTables($db) { // Create table to store the API key $createApiKeyTableQuery = "CREATE TABLE IF NOT EXISTS api_key ( id INTEGER PRIMARY KEY, key TEXT NOT NULL )"; // Create table to store the API token and its expiration $createTokenTableQuery = "CREATE TABLE IF NOT EXISTS api_token ( id INTEGER PRIMARY KEY, token TEXT NOT NULL, expiration_date TEXT NOT NULL )"; $createSeriesCacheTableQuery = "CREATE TABLE IF NOT EXISTS series_cache ( series_id INTEGER PRIMARY KEY, name TEXT, german_name TEXT, aliases TEXT, last_updated TEXT, next_aired TEXT, last_aired TEXT, cache_expiry TEXT )"; $createEpisodesTableQuery = "CREATE TABLE IF NOT EXISTS episodes ( id INTEGER PRIMARY KEY, series_id INTEGER, name TEXT, aired TEXT, runtime INTEGER, season_number INTEGER, episode_number INTEGER, FOREIGN KEY(series_id) REFERENCES series_cache(series_id) )"; $db->exec($createApiKeyTableQuery); $db->exec($createTokenTableQuery); $db->exec($createSeriesCacheTableQuery); $db->exec($createEpisodesTableQuery); } function displayApiKeyForm($db) { if ($_SERVER['REQUEST_METHOD'] === 'POST' && isset($_POST['api_key'])) { $apiKey = trim($_POST['api_key']); if ($apiKey) { // Store the API key in the database $stmt = $db->prepare("INSERT INTO api_key (id, key) VALUES (1, :key)"); $stmt->execute(['key' => $apiKey]); echo "API key saved successfully. You can now use the application."; exit; } else { echo "Please enter a valid API key."; } } echo ' Set TVDB API Key

Enter TVDB API Key

'; exit; } function getApiKey($db) { // Retrieve the API key from the database $stmt = $db->query("SELECT key FROM api_key WHERE id = 1"); $result = $stmt->fetch(PDO::FETCH_ASSOC); if ($result) { return $result['key']; } else { // Show API key form if not set displayApiKeyForm($db); } } ?> ================================================ FILE: api/v1/get_show.php ================================================ $cacheExpiry; } catch (Exception $e) { return true; // If date parsing fails, consider cache expired } } // Main function to fetch series information function getSeriesData($db, $tvdbId, $apiKey, $debug = false) { try { // Fetch from cache $stmt = $db->prepare("SELECT * FROM series_cache WHERE series_id = :tvdb_id"); $stmt->bindValue(':tvdb_id', (int)$tvdbId, PDO::PARAM_INT); $stmt->execute(); $seriesData = $stmt->fetch(PDO::FETCH_ASSOC); $cached = false; $cacheExpiry = null; if ($seriesData) { $cached = !isCacheExpired($seriesData); $cacheExpiry = $seriesData['cache_expiry']; } // Return cached data if available and not expired if ($cached) { $episodesStmt = $db->prepare("SELECT * FROM episodes WHERE series_id = :tvdb_id"); $episodesStmt->bindValue(':tvdb_id', (int)$tvdbId, PDO::PARAM_INT); $episodesStmt->execute(); $episodes = $episodesStmt->fetchAll(PDO::FETCH_ASSOC); $response = [ "status" => "success", "data" => [ "id" => $tvdbId, "name" => $seriesData['name'], "german_name" => $seriesData['german_name'], "aliases" => json_decode($seriesData['aliases']), "episodes" => array_map(function ($episode) { return [ "name" => $episode['name'], "aired" => $episode['aired'], "runtime" => $episode['runtime'], "seasonNumber" => $episode['season_number'], "episodeNumber" => $episode['episode_number'], ]; }, $episodes) ] ]; if ($debug) { $response['debug'] = [ "cached" => true, "cache_expiry" => $cacheExpiry ]; } return $response; } else { // Fetch new data if cache is expired or unavailable return fetchAndCacheSeriesData($db, $tvdbId, $apiKey, $debug); } } catch (Exception $e) { return ["status" => "error", "message" => "Error retrieving series data: " . $e->getMessage()]; } } // Function to fetch and cache data from TVDB function fetchAndCacheSeriesData($db, $tvdbId, $apiKey, $debug = false) { $token = getToken($db, $apiKey); if (!$token) { return ["status" => "error", "message" => "Failed to retrieve valid token from TVDB"]; } $curl = curl_init("https://api4.thetvdb.com/v4/series/$tvdbId/extended?meta=episodes&short=true"); curl_setopt($curl, CURLOPT_RETURNTRANSFER, true); curl_setopt($curl, CURLOPT_HTTPHEADER, [ "Authorization: Bearer $token", "Accept: application/json" ]); $response = curl_exec($curl); // Check for Curl errors if (curl_errno($curl)) { $error_msg = curl_error($curl); curl_close($curl); return ["status" => "error", "message" => "Curl error: " . $error_msg]; } curl_close($curl); // Decode response and check for errors $data = json_decode($response, true); if (!$data || $data['status'] !== 'success') { return ["status" => "error", "message" => "Failed to fetch data from TVDB"]; } try { $series = $data['data']; $germanName = $series['nameTranslations']['deu'] ?? $series['name']; $rawAliases = $series['aliases'] ?? []; // Normalize aliases into an array $germanAliases = []; if (is_array($rawAliases)) { foreach ($rawAliases as $alias) { if (isset($alias['language']) && $alias['language'] === 'deu') { $germanAliases[] = $alias; } } } elseif (is_object($rawAliases)) { foreach ((array)$rawAliases as $alias) { if (isset($alias['language']) && $alias['language'] === 'deu') { $germanAliases[] = $alias; } } } // If neither, default to an empty array $germanAliases = $germanAliases ?: []; $nextAired = !empty($series['nextAired']) ? new DateTime($series['nextAired']) : new DateTime('1970-01-01'); $lastAired = !empty($series['lastAired']) ? new DateTime($series['lastAired']) : new DateTime('1970-01-01'); $lastUpdated = new DateTime($series['lastUpdated']); $cacheExpiry = new DateTime(); if ($lastUpdated->diff($cacheExpiry)->days < 7 || ($nextAired != new DateTime('1970-01-01') && $nextAired->diff($cacheExpiry)->days < 6) || ($lastAired != new DateTime('1970-01-01') && $lastAired->diff($cacheExpiry)->days < 3)) { $cacheExpiry->modify('+2 days'); } else { $cacheExpiry->modify('+6 days'); } // Cache series data $db->beginTransaction(); $db->exec("DELETE FROM series_cache WHERE series_id = $tvdbId"); $stmt = $db->prepare("INSERT INTO series_cache (series_id, name, german_name, aliases, last_updated, next_aired, last_aired, cache_expiry) VALUES (:tvdb_id, :name, :german_name, :aliases, :last_updated, :next_aired, :last_aired, :cache_expiry)"); $stmt->execute([ 'tvdb_id' => $tvdbId, 'name' => $series['name'], 'german_name' => $germanName, 'aliases' => json_encode($germanAliases), 'last_updated' => $series['lastUpdated'], 'next_aired' => $nextAired->format('Y-m-d H:i:s'), 'last_aired' => $lastAired->format('Y-m-d H:i:s'), 'cache_expiry' => $cacheExpiry->format('Y-m-d H:i:s') ]); $db->exec("DELETE FROM episodes WHERE series_id = $tvdbId"); $episodesStmt = $db->prepare("INSERT INTO episodes (id, series_id, name, aired, runtime, season_number, episode_number) VALUES (:id, :tvdb_id, :name, :aired, :runtime, :season_number, :episode_number)"); foreach ($series['episodes'] as $episode) { $episodesStmt->execute([ 'id' => $episode['id'], 'tvdb_id' => $tvdbId, 'name' => $episode['name'], 'aired' => $episode['aired'], 'runtime' => $episode['runtime'], 'season_number' => $episode['seasonNumber'], 'episode_number' => $episode['number'] ]); } $db->commit(); $response = [ "status" => "success", "data" => [ "id" => $tvdbId, "name" => $series['name'], "german_name" => $germanName, "aliases" => $germanAliases, "episodes" => array_map(function ($episode) { return [ "name" => $episode['name'], "aired" => $episode['aired'], "runtime" => $episode['runtime'], "seasonNumber" => $episode['seasonNumber'], "episodeNumber" => $episode['number'], ]; }, $series['episodes']) ] ]; if ($debug) { $response['debug'] = [ "cached" => false, "cache_expiry" => $cacheExpiry->format('Y-m-d H:i:s') ]; } return $response; } catch (Exception $e) { $db->rollBack(); return ["status" => "error", "message" => "Database error: " . $e->getMessage()]; } } // Process request $tvdbId = filter_input(INPUT_GET, 'tvdbid', FILTER_VALIDATE_INT); $debug = filter_input(INPUT_GET, 'debug', FILTER_VALIDATE_BOOLEAN); if ($tvdbId) { echo json_encode(getSeriesData($db, $tvdbId, $apiKey, $debug)); } else { echo json_encode(["status" => "error", "message" => "TVDB ID is required and must be an integer"]); } ?> ================================================ FILE: api/v1/token_manager.php ================================================ query("SELECT token, expiration_date FROM api_token WHERE id = 1"); $result = $stmt->fetch(PDO::FETCH_ASSOC); if ($result && new DateTime() < new DateTime($result['expiration_date'])) { return $result['token']; } else { // If no valid token, refresh the token $apiKey = getApiKey($db); return refreshToken($db, $apiKey); } } function refreshToken($db, $apiKey) { $curl = curl_init('https://api4.thetvdb.com/v4/login'); curl_setopt($curl, CURLOPT_RETURNTRANSFER, true); curl_setopt($curl, CURLOPT_POST, true); curl_setopt($curl, CURLOPT_HTTPHEADER, ['Content-Type: application/json']); curl_setopt($curl, CURLOPT_POSTFIELDS, json_encode(['apikey' => $apiKey])); $response = curl_exec($curl); $data = json_decode($response, true); if ($data && $data['status'] == 'success') { $token = $data['data']['token']; $expirationDate = date('Y-m-d H:i:s', time() + 86400); // Assuming token expires after 24 hours // Update or insert the new token and expiration into the api_token table $db->exec("DELETE FROM api_token WHERE id = 1"); // Clear existing token $stmt = $db->prepare("INSERT INTO api_token (id, token, expiration_date) VALUES (1, :token, :expiration_date)"); $stmt->execute(['token' => $token, 'expiration_date' => $expirationDate]); return $token; } else { // Handle error or retry logic return null; } } ?> ================================================ FILE: build_and_push_docker_image.bat ================================================ @echo off SET IMAGE_NAME=pcjones/mediathekarr echo Enter the version number for the Docker image: set /p VERSION="Version: " echo Building Docker image with version %VERSION%... docker build -t %IMAGE_NAME%:%VERSION% . docker tag %IMAGE_NAME%:%VERSION% %IMAGE_NAME%:latest echo Pushing Docker image with version %VERSION%... docker push %IMAGE_NAME%:%VERSION% echo Pushing Docker image with tag latest... docker push %IMAGE_NAME%:latest echo Done. pause ================================================ FILE: docker-compose.yml ================================================ services: mediathekarr: image: pcjones/mediathekarr:latest container_name: mediathekarr environment: - TZ=Europe/Berlin - DOWNLOAD_FOLDER_PATH_MAPPING=/downloads/completed # Change right side for correct path mapping # - MEDIATHEKARR_API_BASE_URL=https://mediathekarr.pcjones.de/api/v1 # Only change this if you are hosting your own API. Not needed for 99% of users volumes: - ./your_temp_downloads_folder/:/app/downloads # Change left side to your temp download folder location ports: - "127.0.0.1:5007:5007" # Port on the right side can be changed to any value you like restart: unless-stopped