API mostly working, starting to work on webapp

This commit is contained in:
Cameron
2024-08-23 23:52:36 -05:00
commit a4403ce17b
26 changed files with 1725 additions and 0 deletions

398
.gitignore vendored Normal file
View File

@@ -0,0 +1,398 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
*.ncb
*.aps
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
*.sln.iml

211
Background/ComicAnalyzer.cs Normal file
View File

@@ -0,0 +1,211 @@
using Microsoft.AspNetCore.Routing.Constraints;
using Microsoft.AspNetCore.StaticFiles;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
using System.Diagnostics;
using System.IO.Compression;
using System.IO.Hashing;
using System.Linq;
namespace ComiServ.Background
{
public record class ComicAnalysis
(
long FileSizeBytes,
int PageCount,
Int64 Xxhash
);
public record class ComicPage
(
string Filename,
string Mime,
byte[] Data
);
public interface IComicAnalyzer
{
public static readonly IReadOnlyList<string> ZIP_EXTS = [".cbz", ".zip"];
public static readonly IReadOnlyList<string> RAR_EXTS = [".cbr", ".rar"];
public static readonly IReadOnlyList<string> ZIP7_EXTS = [".cb7", ".7z"];
//returns null on invalid filetype, throws on analysis error
public ComicAnalysis? AnalyzeComic(string filename);
public Task<ComicAnalysis?> AnalyzeComicAsync(string filename);
//returns null if out of range, throws for file error
public ComicPage? GetComicPage(string filepath, int page);
//based purely on filename, doesn't try to open file
//returns null for ALL UNRECOGNIZED OR NON-IMAGES
public static string? GetImageMime(string filename)
{
if (new FileExtensionContentTypeProvider().TryGetContentType(filename, out string _mime))
{
if (_mime.StartsWith("image"))
return _mime;
}
return null;
}
}
//async methods actually just block
public class SynchronousComicAnalyzer(ILogger<IComicAnalyzer>? logger)
: IComicAnalyzer
{
private readonly ILogger<IComicAnalyzer>? _logger = logger;
public ComicAnalysis? AnalyzeComic(string filepath)
{
_logger?.LogTrace($"Analyzing comic: {filepath}");
var ext = new FileInfo(filepath).Extension.ToLower();
if (IComicAnalyzer.ZIP_EXTS.Contains(ext))
return ZipAnalyze(filepath);
else if (IComicAnalyzer.RAR_EXTS.Contains(ext))
return RarAnalyze(filepath);
else if (IComicAnalyzer.ZIP7_EXTS.Contains(ext))
return Zip7Analyze(filepath);
else
//throw new ArgumentException("Cannot analyze this file type");
return null;
}
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
public async Task<ComicAnalysis?> AnalyzeComicAsync(string filename)
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
{
return AnalyzeComic(filename);
}
protected ComicAnalysis ZipAnalyze(string filepath)
{
var filedata = File.ReadAllBytes(filepath);
var hash = ComputeHash(filedata);
using var stream = new MemoryStream(filedata);
using var archive = new ZipArchive(stream, ZipArchiveMode.Read, false);
return new
(
FileSizeBytes: filedata.LongLength,
PageCount: archive.Entries.Count,
Xxhash: hash
);
}
protected ComicAnalysis RarAnalyze(string filepath)
{
var filedata = File.ReadAllBytes(filepath);
var hash = ComputeHash(filedata);
using var stream = new MemoryStream(filedata);
using var rar = RarArchive.Open(stream, new SharpCompress.Readers.ReaderOptions()
{
LeaveStreamOpen = false
});
return new
(
FileSizeBytes: filedata.LongLength,
PageCount: rar.Entries.Count,
Xxhash: hash
);
}
protected ComicAnalysis Zip7Analyze(string filepath)
{
var filedata = File.ReadAllBytes(filepath);
var hash = ComputeHash(filedata);
using var stream = new MemoryStream(filedata);
using var zip7 = SevenZipArchive.Open(stream, new SharpCompress.Readers.ReaderOptions()
{
LeaveStreamOpen = false
});
return new
(
FileSizeBytes: filedata.LongLength,
PageCount: zip7.Entries.Count,
Xxhash: hash
);
}
protected static Int64 ComputeHash(ReadOnlySpan<byte> data)
=> unchecked((Int64)XxHash64.HashToUInt64(data));
public ComicPage? GetComicPage(string filepath, int page)
{
var fi = new FileInfo(filepath);
var ext = fi.Extension;
if (IComicAnalyzer.ZIP_EXTS.Contains(ext))
return GetPageZip(filepath, page);
else if (IComicAnalyzer.RAR_EXTS.Contains(ext))
return GetPageRar(filepath, page);
else if (IComicAnalyzer.ZIP7_EXTS.Contains(ext))
return GetPage7Zip(filepath, page);
else return null;
}
protected ComicPage? GetPageZip(string filepath, int page)
{
Debug.Assert(page >= 1, "Page number must be positive");
try
{
using var fileStream = new FileStream(filepath, FileMode.Open);
using var arc = new ZipArchive(fileStream, ZipArchiveMode.Read, false);
(var entry, var mime) = arc.Entries
.Select((ZipArchiveEntry e) => (e, IComicAnalyzer.GetImageMime(e.Name)))
.Where(static pair => pair.Item2 is not null)
.OrderBy(static pair => pair.Item1.FullName)
.Skip(page - 1)
.FirstOrDefault();
if (entry is null || mime is null)
return null;
using var pageStream = entry.Open();
using var pageStream2 = new MemoryStream();
pageStream.CopyTo(pageStream2);
pageStream2.Seek(0, SeekOrigin.Begin);
var pageData = pageStream2.ToArray();
return new
(
Filename: entry.Name,
Mime: mime,
Data: pageData
);
}
catch (FileNotFoundException)
{
return null;
}
catch (DirectoryNotFoundException)
{
return null;
}
}
protected ComicPage? GetPageRar(string filepath, int page)
{
using var rar = RarArchive.Open(filepath);
(var entry, var mime) = rar.Entries
.Select((RarArchiveEntry e) => (e, IComicAnalyzer.GetImageMime(e.Key)))
.Where(static pair => pair.Item2 is not null)
.OrderBy(static pair => pair.Item1.Key)
.Skip(page - 1)
.FirstOrDefault();
if (entry is null || mime is null)
return null;
using var stream = new MemoryStream();
entry.WriteTo(stream);
var pageData = stream.ToArray();
return new
(
Filename: entry.Key ?? "",
Mime: mime,
Data: pageData
);
}
protected ComicPage? GetPage7Zip(string filepath, int page)
{
using var zip7 = SevenZipArchive.Open(filepath);
(var entry, var mime) = zip7.Entries
.Select((SevenZipArchiveEntry e) => (e, IComicAnalyzer.GetImageMime(e.Key)))
.Where(static pair => pair.Item2 is not null)
.OrderBy(static pair => pair.Item1.Key)
.Skip(page - 1)
.FirstOrDefault();
if (entry is null || mime is null)
return null;
using var stream = new MemoryStream();
entry.WriteTo(stream);
var pageData = stream.ToArray();
return new
(
Filename: entry.Key ?? "",
Mime: mime,
Data: pageData
);
}
}
}

168
Background/ComicScanner.cs Normal file
View File

@@ -0,0 +1,168 @@
using System.Collections.Generic;
using System.Runtime.InteropServices;
using ComiServ.Controllers;
using ComiServ.Entities;
using Microsoft.OpenApi.Writers;
namespace ComiServ.Background
{
public record class ComicScanItem
(
string Filepath,
long FileSizeBytes,
Int64 Xxhash,
int PageCount
);
public interface IComicScanner : IDisposable
{
//TODO should be configurable
public static readonly IReadOnlyList<string> COMIC_EXTENSIONS = [
"cbz", "zip",
"cbr", "rar",
"cb7", "7zip",
];
public void TriggerLibraryScan();
public void ScheduleRepeatedLibraryScans(TimeSpan period);
public IDictionary<string, ComicScanItem> PerfomLibraryScan(CancellationToken? token = null);
}
public class ComicScanner(
IServiceProvider provider
) : IComicScanner
{
//private readonly ComicsContext _context = context;
private readonly ITaskManager _manager = provider.GetRequiredService<ITaskManager>();
private readonly Configuration _config = provider.GetRequiredService<IConfigService>().Config;
private readonly IComicAnalyzer _analyzer = provider.GetRequiredService<IComicAnalyzer>();
private readonly IServiceProvider _provider = provider;
public IDictionary<string, ComicScanItem> PerfomLibraryScan(CancellationToken? token = null)
{
return new DirectoryInfo(_config.LibraryRoot).EnumerateFiles("*", SearchOption.AllDirectories)
.Select(fi =>
{
token?.ThrowIfCancellationRequested();
var path = Path.GetRelativePath(_config.LibraryRoot, fi.FullName);
var analysis = _analyzer.AnalyzeComic(fi.FullName);
if (analysis is null)
//null will be filtered
return (path, null);
return (path, new ComicScanItem
(
Filepath: path,
FileSizeBytes: analysis.FileSizeBytes,
Xxhash: analysis.Xxhash,
PageCount: analysis.PageCount
));
})
//ignore files of the wrong extension
.Where(p => p.Item2 is not null)
.ToDictionary();
}
public void TriggerLibraryScan()
{
TaskItem ti = new(
TaskTypes.Scan,
"Library Scan",
token =>
{
var items = PerfomLibraryScan(token);
token?.ThrowIfCancellationRequested();
UpdateDatabaseWithScanResults(items);
},
null);
_manager.StartTask(ti);
}
private CancellationTokenSource? RepeatedLibraryScanTokenSource = null;
public void ScheduleRepeatedLibraryScans(TimeSpan interval)
{
RepeatedLibraryScanTokenSource?.Cancel();
RepeatedLibraryScanTokenSource?.Dispose();
RepeatedLibraryScanTokenSource = new();
TaskItem ti = new(
TaskTypes.Scan,
"Scheduled Library Scan",
token =>
{
var items = PerfomLibraryScan(token);
token?.ThrowIfCancellationRequested();
UpdateDatabaseWithScanResults(items);
},
RepeatedLibraryScanTokenSource.Token);
_manager.ScheduleTask(ti, interval);
}
public void UpdateDatabaseWithScanResults(IDictionary<string, ComicScanItem> items)
{
using var scope = _provider.CreateScope();
var services = scope.ServiceProvider;
using var context = services.GetRequiredService<ComicsContext>();
//not an ideal algorithm
//need to go through every comic in the database to update `Exists`
//also need to go through every discovered comic to add new ones
//and should make sure not to double up on the overlaps
//there should be a faster method than using ExceptBy but I don't it's urgent
//TODO profile on large database
SortedSet<string> alreadyExistingFiles = [];
foreach (var comic in context.Comics)
{
ComicScanItem info;
if (items.TryGetValue(comic.Filepath, out info))
{
comic.FileXxhash64 = info.Xxhash;
comic.Exists = true;
comic.PageCount = info.PageCount;
comic.SizeBytes = info.FileSizeBytes;
alreadyExistingFiles.Add(comic.Filepath);
}
else
{
comic.Exists = false;
}
}
var newComics = items.ExceptBy(alreadyExistingFiles, p => p.Key).Select(p =>
new Comic()
{
Handle = context.CreateHandle(),
Exists = true,
Filepath = p.Value.Filepath,
Title = new FileInfo(p.Value.Filepath).Name,
Description = "",
SizeBytes = p.Value.FileSizeBytes,
FileXxhash64 = p.Value.Xxhash,
PageCount = p.Value.PageCount
}).ToList();
newComics.ForEach(c => _manager.StartTask(new(
TaskTypes.GetCover,
$"Get Cover: {c.Title}",
token => InsertCover(Path.Join(_config.LibraryRoot, c.Filepath), c.FileXxhash64)
)));
context.Comics.AddRange(newComics);
context.SaveChanges();
}
protected void InsertCover(string filepath, long hash)
{
using var scope = _provider.CreateScope();
var services = scope.ServiceProvider;
using var context = services.GetRequiredService<ComicsContext>();
var existing = context.Covers.SingleOrDefault(c => c.FileXxhash64 == hash);
//assuming no hash overlap
//if you already have a cover, assume it's correct
if (existing is not null)
return;
var page = _analyzer.GetComicPage(filepath, 1);
if (page is null)
return;
context.Covers.Add(new()
{
FileXxhash64 = hash,
Filename = page.Filename,
CoverFile = page.Data
});
context.SaveChanges();
}
public void Dispose()
{
RepeatedLibraryScanTokenSource?.Dispose();
}
}
}

98
Background/TaskManager.cs Normal file
View File

@@ -0,0 +1,98 @@
using System.Collections.Concurrent;
namespace ComiServ.Background
{
public enum TaskTypes
{
Scan,
GetCover,
}
//task needs to use the token parameter rather than its own token, because it gets merged with the master token
public class TaskItem(TaskTypes type, string name, Action<CancellationToken?> action, CancellationToken? token = null)
{
public readonly TaskTypes Type = type;
public readonly string Name = name;
public readonly Action<CancellationToken?> Action = action;
public readonly CancellationToken Token = token ?? CancellationToken.None;
}
public interface ITaskManager : IDisposable
{
public void StartTask(TaskItem taskItem);
public void ScheduleTask(TaskItem taskItem, TimeSpan interval);
public string[] GetTasks(int limit);
public void CancelAll();
}
public class TaskManager(ILogger<ITaskManager>? logger)
: ITaskManager
{
private readonly ConcurrentDictionary<Task, TaskItem> ActiveTasks = [];
private readonly CancellationTokenSource MasterToken = new();
private readonly ILogger<ITaskManager>? _logger = logger;
private readonly ConcurrentDictionary<System.Timers.Timer,TaskItem> Scheduled = [];
public void StartTask(TaskItem taskItem)
{
_logger?.LogTrace($"Start Task: {taskItem.Name}");
var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(MasterToken.Token, taskItem.Token);
var newTask = Task.Run(() => taskItem.Action(tokenSource.Token),
tokenSource.Token);
if (!ActiveTasks.TryAdd(newTask, taskItem))
{
//TODO better exception
throw new Exception("failed to add task");
}
//TODO should master token actually cancel followup?
newTask.ContinueWith(ManageFinishedTasks, MasterToken.Token);
}
public void ScheduleTask(TaskItem taskItem, TimeSpan interval)
{
//var timer = new Timer((_) => StartTask(taskItem), null, dueTime, period ?? Timeout.InfiniteTimeSpan);
var timer = new System.Timers.Timer(interval);
var token = CancellationTokenSource.CreateLinkedTokenSource(MasterToken.Token, taskItem.Token);
Scheduled.TryAdd(timer, taskItem);
token.Token.Register(() =>
{
timer.Stop();
Scheduled.TryRemove(timer, out var _);
});
timer.Elapsed += (_, _) => taskItem.Action(token.Token);
timer.Start();
}
public string[] GetTasks(int limit)
{
return ActiveTasks.Select(p => p.Value.Name).Take(limit).ToArray();
}
public void CancelAll()
{
MasterToken.Cancel();
}
public void ManageFinishedTasks()
{
ManageFinishedTasks(null);
}
private readonly object _TaskCleanupLock = new();
protected void ManageFinishedTasks(Task? cause = null)
{
//there shouldn't really be concerns with running multiple simultaneously but might as well
lock (_TaskCleanupLock)
{
//cache first because we're modifying the dictionary
foreach (var pair in ActiveTasks.ToArray())
{
if (pair.Key.IsCompleted)
{
bool taskRemoved = ActiveTasks.TryRemove(pair.Key, out _);
if (taskRemoved)
{
_logger?.LogTrace($"Removed Task: {pair.Value.Name}");
}
}
}
}
}
public void Dispose()
{
MasterToken?.Dispose();
}
}
}

23
ComiServ.csproj Normal file
View File

@@ -0,0 +1,23 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Diagnostics.EntityFrameworkCore" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="8.0.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.VisualStudio.Web.CodeGeneration.Design" Version="8.0.4" />
<PackageReference Include="SharpCompress" Version="0.37.2" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.7.1" />
<PackageReference Include="System.IO.Hashing" Version="8.0.0" />
</ItemGroup>
</Project>

67
ComicsContext.cs Normal file
View File

@@ -0,0 +1,67 @@
using Microsoft.EntityFrameworkCore;
using ComiServ.Entities;
namespace ComiServ
{
public class ComicsContext : DbContext
{
//TODO is this the best place for this to live?
public const int HANDLE_LENGTH = 12;
//relies on low probability of repeat handles in a short period of time
//duplicate handles could be created before either of them are commited
public string CreateHandle()
{
char ToChar(int i)
{
if (i < 10)
return (char)('0' + i);
if (i - 10 + 'A' < 'O')
return (char)('A' + i - 10);
else
//skip 'O'
return (char)('A' + i - 9);
}
string handle = "";
do
{
handle = string.Join("", Enumerable.Repeat(0, HANDLE_LENGTH)
.Select(_ => ToChar(Random.Shared.Next(0, 35))));
} while (Comics.Any(c => c.Handle == handle));
return handle;
}
public DbSet<Comic> Comics { get; set; }
public DbSet<ComicTag> ComicTags { get; set; }
public DbSet<Tag> Tags { get; set; }
public DbSet<ComicAuthor> ComicAuthors { get; set; }
public DbSet<Author> Authors { get; set; }
public DbSet<Cover> Covers { get; set; }
public ComicsContext(DbContextOptions<ComicsContext> options)
: base(options)
{
}
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<Comic>().ToTable("Comics");
modelBuilder.Entity<ComicTag>().ToTable("ComicTags");
modelBuilder.Entity<Tag>().ToTable("Tags");
modelBuilder.Entity<ComicAuthor>().ToTable("ComicAuthors");
modelBuilder.Entity<Author>().ToTable("Authors");
modelBuilder.Entity<Cover>().ToTable("Covers");
}
/// <summary>
/// puts a user-provided handle into the proper form
/// </summary>
/// <param name="handle"></param>
/// <returns>formatted handle or null if invalid</returns>
public static string? CleanValidateHandle(string? handle)
{
if (handle is null)
return null;
handle = handle.Trim();
if (handle.Length != HANDLE_LENGTH)
return null;
return handle.ToUpper();
}
}
}

30
ConfigService.cs Normal file
View File

@@ -0,0 +1,30 @@
using System.Text.Json;
namespace ComiServ
{
public class Configuration
{
public string LibraryRoot { get; set; }
public string DatabaseFile { get; set; }
public Configuration Copy()
=> MemberwiseClone() as Configuration
//this really shouldn't be possible
?? throw new Exception("Failed to clone configuration");
}
public interface IConfigService
{
public Configuration Config { get; }
}
public class ConfigService : IConfigService
{
public Configuration _Config;
//protect original
public Configuration Config => _Config.Copy();
public ConfigService(string filepath)
{
using var fileStream = File.OpenRead(filepath);
_Config = JsonSerializer.Deserialize<Configuration>(fileStream)
?? throw new ArgumentException("Failed to parse config file");
}
}
}

View File

@@ -0,0 +1,284 @@
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using ComiServ.Models;
using System.Linq;
using System.Reflection;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Migrations;
using ComiServ.Entities;
using ComiServ.Background;
using System.ComponentModel;
namespace ComiServ.Controllers
{
[Route("api/v1/comics")]
[ApiController]
public class ComicController(ComicsContext context, ILogger<ComicController> logger, IConfigService config, IComicAnalyzer analyzer)
: ControllerBase
{
private readonly ComicsContext _context = context;
private readonly ILogger<ComicController> _logger = logger;
private readonly Configuration _config = config.Config;
private readonly IComicAnalyzer _analyzer = analyzer;
//TODO search parameters
[HttpGet]
[ProducesResponseType<Paginated<ComicData>>(StatusCodes.Status200OK)]
public IActionResult SearchComics(
[FromQuery(Name = "TitleSearch")]
string? titleSearch,
[FromQuery(Name = "DescriptionSearch")]
string? descSearch,
[FromQuery]
string[] authors,
[FromQuery]
string[] tags,
[FromQuery]
string? pages,
[FromQuery]
string? xxhash64Hex,
[FromQuery]
bool? exists,
[FromQuery]
[DefaultValue(0)]
int page,
[FromQuery]
[DefaultValue(20)]
int pageSize
)
{
//throw new NotImplementedException();
var results = _context.Comics
.Include("ComicAuthors.Author")
.Include("ComicTags.Tag");
if (exists is not null)
{
results = results.Where(c => c.Exists == exists);
}
foreach (var author in authors)
{
results = results.Where(c => c.ComicAuthors.Any(ca => EF.Functions.Like(ca.Author.Name, author)));
}
foreach (var tag in tags)
{
results = results.Where(c => c.ComicTags.Any(ct => EF.Functions.Like(ct.Tag.Name, tag)));
}
if (pages is not null)
{
pages = pages.Trim();
if (pages.StartsWith("<="))
{
var pageMax = int.Parse(pages.Substring(2));
results = results.Where(c => c.PageCount <= pageMax);
}
else if (pages.StartsWith('<'))
{
var pageMax = int.Parse(pages.Substring(1));
results = results.Where(c => c.PageCount < pageMax);
}
else if (pages.StartsWith(">="))
{
var pageMin = int.Parse(pages.Substring(2));
results = results.Where(c => c.PageCount >= pageMin);
}
else if (pages.StartsWith('>'))
{
var pageMin = int.Parse(pages.Substring(1));
results = results.Where(c => c.PageCount > pageMin);
}
else
{
if (pages.StartsWith('='))
pages = pages.Substring(1);
var pageExact = int.Parse(pages);
results = results.Where(c => c.PageCount == pageExact);
}
}
if (xxhash64Hex is not null)
{
xxhash64Hex = xxhash64Hex.Trim().ToUpper();
if (!xxhash64Hex.All(c => (c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')))
return BadRequest();
Int64 hash = 0;
foreach (char c in xxhash64Hex)
{
if (c >= '0' && c <= '9')
hash = hash * 16 + (c - '0');
else if (c >= 'A' && c <= 'F')
hash = hash * 16 + (c - 'A' + 10);
else
throw new ArgumentException("Invalid hex character bypassed filter");
}
results = results.Where(c => c.FileXxhash64 == hash);
}
if (titleSearch is not null)
{
//results = results.Where(c => EF.Functions.Like(c.Title, $"*{titleSearch}*"));
results = results.Where(c => c.Title.Contains(titleSearch));
}
if (descSearch is not null)
{
//results = results.Where(c => EF.Functions.Like(c.Description, $"*{descSearch}*"));
results = results.Where(c => c.Description.Contains(descSearch));
}
int offset = page * pageSize;
return Ok(new Paginated<ComicData>(pageSize, page, results.Skip(offset)
.Select(c => new ComicData(c))));
}
[HttpDelete]
[ProducesResponseType(StatusCodes.Status200OK)]
public IActionResult DeleteComicsThatDontExist()
{
var search = _context.Comics.Where(c => !c.Exists);
var nonExtant = search.ToList();
search.ExecuteDelete();
_context.SaveChanges();
return Ok(search.Select(c => new ComicData(c)));
}
[HttpGet("{handle}")]
[ProducesResponseType<ComicData>(StatusCodes.Status200OK)]
[ProducesResponseType<RequestError>(StatusCodes.Status404NotFound)]
[ProducesResponseType<RequestError>(StatusCodes.Status400BadRequest)]
public IActionResult GetSingleComicInfo(string handle)
{
_logger.LogInformation("GetSingleComicInfo: {handle}", handle);
handle = handle.Trim().ToUpper();
if (handle.Length != ComicsContext.HANDLE_LENGTH)
return BadRequest(RequestError.InvalidHandle);
var comic = _context.Comics
.Include("ComicAuthors.Author")
.Include("ComicTags.Tag")
.SingleOrDefault(c => c.Handle == handle);
if (comic is Comic actualComic)
return Ok(new ComicData(comic));
else
return NotFound(RequestError.ComicNotFound);
}
[HttpPatch("{handle}")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType<RequestError>(StatusCodes.Status400BadRequest)]
[ProducesResponseType<RequestError>(StatusCodes.Status404NotFound)]
public IActionResult UpdateComicMetadata(string handle, [FromBody] ComicMetadataUpdate metadata)
{
//throw new NotImplementedException();
if (handle.Length != ComicsContext.HANDLE_LENGTH)
return BadRequest(RequestError.InvalidHandle);
//using var transaction = _context.Database.BeginTransaction();
var comic = _context.Comics.SingleOrDefault(c => c.Handle == handle);
if (comic is Comic actualComic)
{
if (metadata.Title != null)
actualComic.Title = metadata.Title;
if (metadata.Authors is List<string> authors)
{
//make sure all authors exist, without changing Id of pre-existing authors
//TODO try to batch these
authors.ForEach(author => _context.Database.ExecuteSql(
$"INSERT OR IGNORE INTO [Authors] (Name) VALUES ({author})"));
//get the Id of needed authors
var authorEntities = _context.Authors.Where(a => authors.Contains(a.Name)).ToList();
//delete existing author mappings
_context.ComicAuthors.RemoveRange(_context.ComicAuthors.Where(ca => ca.Comic.Id == comic.Id));
//add all author mappings
_context.ComicAuthors.AddRange(authorEntities.Select(a => new ComicAuthor { Comic = comic, Author = a }));
}
if (metadata.Tags is List<string> tags)
{
//make sure all tags exist, without changing Id of pre-existing tags
//TODO try to batch these
tags.ForEach(tag => _context.Database.ExecuteSql(
$"INSERT OR IGNORE INTO [Tags] (Name) VALUES ({tag})"));
//get the needed tags
var tagEntities = _context.Tags.Where(t => tags.Contains(t.Name)).ToList();
//delete existing tag mappings
_context.ComicTags.RemoveRange(_context.ComicTags.Where(ta => ta.Comic.Id == comic.Id));
//add all tag mappings
_context.ComicTags.AddRange(tagEntities.Select(t => new ComicTag { Comic = comic, Tag = t }));
}
_context.SaveChanges();
return Ok();
}
else
return NotFound(RequestError.ComicNotFound);
}
//[HttpDelete("{handle}")]
//public IActionResult DeleteComic(string handle)
//{
// throw new NotImplementedException();
//}
[HttpGet("{handle}/file")]
[ProducesResponseType<byte[]>(StatusCodes.Status200OK)]
[ProducesResponseType<RequestError>(StatusCodes.Status400BadRequest)]
[ProducesResponseType<RequestError>(StatusCodes.Status404NotFound)]
public IActionResult GetComicFile(string handle)
{
_logger.LogInformation($"{nameof(GetComicFile)}: {handle}");
handle = handle.Trim().ToUpper();
if (handle.Length != ComicsContext.HANDLE_LENGTH)
return BadRequest(RequestError.InvalidHandle);
var comic = _context.Comics.SingleOrDefault(c => c.Handle == handle);
if (comic is null)
return NotFound(RequestError.ComicNotFound);
var data = System.IO.File.ReadAllBytes(Path.Join(_config.LibraryRoot, comic.Filepath));
return File(data, "application/octet-stream", new FileInfo(comic.Filepath).Name);
}
[HttpGet("{handle}/cover")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType<RequestError>(StatusCodes.Status400BadRequest)]
[ProducesResponseType<RequestError>(StatusCodes.Status404NotFound)]
public IActionResult GetComicCover(string handle)
{
_logger.LogInformation($"{nameof(GetComicCover)}: {handle}");
var validated = ComicsContext.CleanValidateHandle(handle);
if (validated is null)
return BadRequest(RequestError.InvalidHandle);
var comic = _context.Comics
.SingleOrDefault(c => c.Handle == validated);
if (comic is null)
return NotFound(RequestError.ComicNotFound);
var cover = _context.Covers
.SingleOrDefault(cov => cov.FileXxhash64 == comic.FileXxhash64);
if (cover is null)
return NotFound(RequestError.CoverNotFound);
var mime = IComicAnalyzer.GetImageMime(cover.Filename);
if (mime is null)
return File(cover.CoverFile, "application/octet-stream", cover.Filename);
return File(cover.CoverFile, mime);
}
[HttpGet("{handle}/page/{page}")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType<RequestError>(StatusCodes.Status400BadRequest)]
[ProducesResponseType<RequestError>(StatusCodes.Status404NotFound)]
public IActionResult GetComicPage(string handle, int page)
{
_logger.LogInformation($"{nameof(GetComicPage)}: {handle} {page}");
var validated = ComicsContext.CleanValidateHandle(handle);
if (validated is null)
return BadRequest(RequestError.InvalidHandle);
var comic = _context.Comics.SingleOrDefault(c => c.Handle == validated);
if (comic is null)
return NotFound(RequestError.ComicNotFound);
var comicPage = _analyzer.GetComicPage(Path.Join(_config.LibraryRoot, comic.Filepath), page);
if (comicPage is null)
//TODO rethink error code
return NotFound(RequestError.PageNotFound);
return File(comicPage.Data, comicPage.Mime);
}
[HttpPost("cleandb")]
[ProducesResponseType(StatusCodes.Status200OK)]
public IActionResult CleanUnusedTagAuthors()
{
//Since ComicAuthors uses foreign keys
_context.Authors
.Include("ComicAuthors")
.Where(a => a.ComicAuthors.Count == 0)
.ExecuteDelete();
_context.Tags
.Include("ComicTags")
.Where(a => a.ComicTags.Count == 0)
.ExecuteDelete();
//ExecuteDelete doesn't wait for SaveChanges
//_context.SaveChanges();
return Ok();
}
}
}

View File

@@ -0,0 +1,41 @@
using ComiServ.Background;
using ComiServ.Models;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using System.ComponentModel;
namespace ComiServ.Controllers
{
[Route("api/v1/tasks")]
[ApiController]
public class TaskController(
ComicsContext context
,ITaskManager manager
,IComicScanner scanner
,ILogger<TaskController> logger
) : ControllerBase
{
private readonly ComicsContext _context = context;
private readonly ITaskManager _manager = manager;
private readonly IComicScanner _scanner = scanner;
private readonly ILogger<TaskController> _logger = logger;
private readonly CancellationTokenSource cancellationToken = new();
[HttpGet]
[ProducesResponseType<Truncated<string>>(StatusCodes.Status200OK)]
public IActionResult GetTasks(
[FromQuery]
[DefaultValue(20)]
int limit
)
{
return Ok(new Truncated<string>(limit, _manager.GetTasks(limit+1)));
}
[HttpPost("scan")]
[ProducesResponseType(StatusCodes.Status200OK)]
public IActionResult StartScan()
{
_scanner.TriggerLibraryScan();
return Ok();
}
}
}

View File

@@ -0,0 +1,12 @@
using Microsoft.AspNetCore.Mvc;
namespace ComiServ.Controllers
{
[Route("app")]
[Controller]
public class WebappController
: ControllerBase
{
}
}

17
Entities/Author.cs Normal file
View File

@@ -0,0 +1,17 @@
using Microsoft.EntityFrameworkCore;
using Swashbuckle.AspNetCore.Annotations;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
//using System.ComponentModel.DataAnnotations.Schema;
namespace ComiServ.Entities
{
[Index(nameof(Name), IsUnique = true)]
public class Author
{
public int Id { get; set; }
[Required]
public string Name { get; set; } = null!;
public ICollection<ComicAuthor> ComicAuthors = null!;
}
}

32
Entities/Comic.cs Normal file
View File

@@ -0,0 +1,32 @@
using ComiServ.Controllers;
using Microsoft.EntityFrameworkCore;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace ComiServ.Entities
{
[Index(nameof(Handle), IsUnique = true)]
[Index(nameof(Filepath), IsUnique = true)]
public class Comic
{
public int Id { get; set; }
public bool Exists { get; set; }
//id exposed through the API
[Required]
[StringLength(ComicsContext.HANDLE_LENGTH)]
public string Handle { get; set; } = null!;
[Required]
public string Filepath { get; set; } = null!;
[Required]
public string Title { get; set; } = null!;
[Required]
public string Description { get; set; } = null!;
public int PageCount { get; set; }
public long SizeBytes { get; set; }
public long FileXxhash64 { get; set; }
[InverseProperty("Comic")]
public ICollection<ComicTag> ComicTags { get; set; } = [];
[InverseProperty("Comic")]
public ICollection<ComicAuthor> ComicAuthors { get; set; } = [];
}
}

21
Entities/ComicAuthor.cs Normal file
View File

@@ -0,0 +1,21 @@
using Microsoft.EntityFrameworkCore;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace ComiServ.Entities
{
[PrimaryKey("ComicId", "AuthorId")]
[Index("ComicId")]
[Index("AuthorId")]
public class ComicAuthor
{
[ForeignKey(nameof(Comic))]
public int ComicId { get; set; }
[Required]
public Comic Comic { get; set; } = null!;
[ForeignKey(nameof(Author))]
public int AuthorId { get; set; }
[Required]
public Author Author { get; set; } = null!;
}
}

15
Entities/ComicTag.cs Normal file
View File

@@ -0,0 +1,15 @@
using Microsoft.EntityFrameworkCore;
namespace ComiServ.Entities
{
[PrimaryKey("ComicId", "TagId")]
[Index("ComicId")]
[Index("TagId")]
public class ComicTag
{
public int ComicId { get; set; }
public Comic Comic { get; set; } = null!;
public int TagId { get; set; }
public Tag Tag { get; set; } = null!;
}
}

12
Entities/Cover.cs Normal file
View File

@@ -0,0 +1,12 @@
using Microsoft.EntityFrameworkCore;
namespace ComiServ.Entities
{
[PrimaryKey("FileXxhash64")]
public class Cover
{
public long FileXxhash64 { get; set; }
public string Filename { get; set; } = null!;
public byte[] CoverFile { get; set; } = null!;
}
}

View File

@@ -0,0 +1,35 @@
using Microsoft.EntityFrameworkCore.Metadata.Internal;
using Microsoft.OpenApi.Models;
using Swashbuckle.AspNetCore.SwaggerGen;
namespace ComiServ.Entities
{
/// <summary>
/// This was originally made to remove Entity types that were being added to the Swagger schema.
/// I found that there was a bug a `ProducesResponseTypeAttribute` that caused it, and this is
/// no longer necessary. I changed Apply to a nop but am keeping this around as an example and
/// in case I actually need something like this in the future.
/// </summary>
public class EntitySwaggerFilter : ISchemaFilter
{
public readonly static string[] FILTER = [
nameof(Author),
nameof(Comic),
nameof(ComicAuthor),
nameof(ComicTag),
nameof(Cover),
nameof(Tag)
];
public void Apply(OpenApiSchema schema, SchemaFilterContext context)
{
return;
foreach (var item in context.SchemaRepository.Schemas.Keys)
{
if (FILTER.Contains(item))
{
context.SchemaRepository.Schemas.Remove(item);
}
}
}
}
}

16
Entities/Tag.cs Normal file
View File

@@ -0,0 +1,16 @@
using Microsoft.EntityFrameworkCore;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace ComiServ.Entities
{
[Index(nameof(Name), IsUnique = true)]
public class Tag
{
//[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
public int Id { get; set; }
[Required]
public string Name { get; set; } = null!;
public ICollection<ComicTag> ComicTags = null!;
}
}

7
Logging/Events.cs Normal file
View File

@@ -0,0 +1,7 @@
namespace ComiServ.Logging
{
public static class Events
{
}
}

40
Models/ComicData.cs Normal file
View File

@@ -0,0 +1,40 @@
using ComiServ.Entities;
namespace ComiServ.Models
{
public class ComicData
{
public string Handle { get; set; }
public bool Exists { get; set; }
public string Filepath { get; set; }
public string Title { get; set; }
public string Description { get; set; }
public int PageCount { get; set; }
public long SizeBytes { get; set; }
public string FileXxhash64 { get; set; }
public List<string> Authors { get; set; }
public List<string> Tags { get; set; }
public ComicData(Comic comic)
{
Handle = comic.Handle;
Exists = comic.Exists;
Filepath = comic.Filepath;
Title = comic.Title;
PageCount = comic.PageCount;
SizeBytes = comic.SizeBytes;
FileXxhash64 = "";
var unsigned = (UInt64)comic.FileXxhash64;
for (int i = 0; i < 8; i++)
{
var c = unsigned % 16;
if (c < 10)
FileXxhash64 += ((char)('0' + c)).ToString();
else
FileXxhash64 += ((char)('A' + c - 10)).ToString();
unsigned /= 16;
}
Authors = comic.ComicAuthors.Select(a => a.Author.Name).ToList();
Tags = comic.ComicTags.Select(a => a.Tag.Name).ToList();
}
}
}

View File

@@ -0,0 +1,10 @@
namespace ComiServ.Models
{
public class ComicMetadataUpdate
{
public string? Title { get; set; }
public string? Description { get; set; }
public List<string>? Tags { get; set; }
public List<string>? Authors { get; set; }
}
}

35
Models/Paginated.cs Normal file
View File

@@ -0,0 +1,35 @@
namespace ComiServ.Models
{
public class Paginated<T>
{
public int Max { get; }
public int Page { get;}
public bool Last { get; }
public int Count { get; }
public List<T> Items { get; }
public Paginated(int max, int page, IEnumerable<T> iter)
{
Max = max;
Page = page;
if (max <= 0)
{
throw new ArgumentOutOfRangeException(nameof(max), max, "must be greater than 0");
}
if (page < 0)
{
throw new ArgumentOutOfRangeException(nameof(page), page, "must be greater than or equal to 0");
}
Items = iter.Take(max + 1).ToList();
if (Items.Count > max)
{
Last = false;
Items.RemoveAt(max);
}
else
{
Last = true;
}
Count = Items.Count;
}
}
}

34
Models/RequestError.cs Normal file
View File

@@ -0,0 +1,34 @@
namespace ComiServ.Models
{
public class RequestError
{
public static RequestError InvalidHandle => new("Invalid handle");
public static RequestError ComicNotFound => new("Comic not found");
public static RequestError CoverNotFound => new("Cover not found");
public static RequestError PageNotFound => new("Page not found");
public static RequestError FileNotFound => new("File not found");
public string[] Errors { get; }
public RequestError(string ErrorMessage)
{
Errors = [ErrorMessage];
}
public RequestError(IEnumerable<string> ErrorMessages)
{
Errors = ErrorMessages.ToArray();
}
public RequestError And(RequestError other)
{
return new RequestError(Errors.Concat(other.Errors));
}
public RequestError And(string other)
{
return new RequestError(Errors.Append(other));
}
public RequestError And(IEnumerable<string> other)
{
return new RequestError(Errors.Concat(other))
;
}
}
}

32
Models/Truncated.cs Normal file
View File

@@ -0,0 +1,32 @@
using System.Reflection.PortableExecutable;
namespace ComiServ.Models
{
public class Truncated<T>
{
public int Max { get; }
public int Count { get; }
public bool Complete { get; }
public List<T> Items { get; }
public Truncated(int max, IEnumerable<T> items)
{
if (max <= 0)
{
throw new ArgumentOutOfRangeException(nameof(max), max, "must be greater than 0");
}
Max = max;
Items = items.Take(max+1).ToList();
if (Items.Count <= max)
{
Complete = true;
if (Items.Count > 0)
Items.RemoveAt(max);
}
else
{
Complete = false;
}
Count = Items.Count;
}
}
}

75
Program.cs Normal file
View File

@@ -0,0 +1,75 @@
using ComiServ;
using Microsoft.EntityFrameworkCore;
using Microsoft.AspNetCore.Builder;
using Microsoft.Extensions.DependencyInjection;
using ComiServ.Background;
using Swashbuckle.AspNetCore.SwaggerGen;
using ComiServ.Entities;
var builder = WebApplication.CreateBuilder(args);
var CONFIG_FILEPATH = "config.json";
var configService = new ConfigService(CONFIG_FILEPATH);
var config = configService.Config;
var ConnectionString = $"Data Source={config.DatabaseFile};Mode=ReadWriteCreate";
// Add services to the container.
builder.Services.AddControllers();
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen(c =>
{
c.SchemaFilter<EntitySwaggerFilter>();
});
builder.Services.AddSingleton<IConfigService>(configService);
builder.Services.AddDbContext<ComicsContext>(options =>
options.UseSqlite(ConnectionString));
builder.Services.AddDatabaseDeveloperPageExceptionFilter();
builder.Services.AddSingleton<ITaskManager>(sp =>
new TaskManager(sp.GetService<ILogger<ITaskManager>>()));
builder.Services.AddSingleton<IComicAnalyzer>(sp =>
new SynchronousComicAnalyzer(
logger: sp.GetRequiredService<ILogger<IComicAnalyzer>>()));
builder.Services.AddSingleton<IComicScanner>(sp =>
new ComicScanner(provider: sp));
builder.Services.AddHttpLogging(o => { });
//builder.Services.AddRazorPages().AddRazorPagesOptions(o =>
//{
// o.RootDirectory = "/Pages";
//});
builder.Services.AddLogging(config =>
{
config.AddConsole();
config.AddDebug();
});
var app = builder.Build();
app.UseHttpLogging();
app.UseHttpsRedirection();
app.UseStaticFiles();
// Configure the HTTP request pipeline.
if (app.Environment.IsDevelopment())
{
app.UseSwagger();
app.UseSwaggerUI();
app.UseDeveloperExceptionPage();
app.UseMigrationsEndPoint();
}
using (var scope = app.Services.CreateScope())
{
var services = scope.ServiceProvider;
using var context = services.GetRequiredService<ComicsContext>();
context.Database.EnsureCreated();
}
var scanner = app.Services.GetRequiredService<IComicScanner>();
scanner.TriggerLibraryScan();
scanner.ScheduleRepeatedLibraryScans(TimeSpan.FromDays(1));
app.UseHttpsRedirection();
app.UseAuthorization();
app.MapControllers();
app.Run();

View File

@@ -0,0 +1,8 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
}
}

4
config.json Normal file
View File

@@ -0,0 +1,4 @@
{
"LibraryRoot": "./Library",
"DatabaseFile": "ComiServ.db"
}