Compare commits

...

11 Commits

Author SHA1 Message Date
Robert McRackan
f1aacd92ad Bugfix: decrypt file conflict 2019-12-02 14:39:46 -05:00
Robert McRackan
b1b426427c Bugfix: initial bottom counts can throw error when a book was moved since Libation was last run 2019-11-27 16:57:35 -05:00
Robert McRackan
0683e5f55b Optimize tag persistence 2019-11-27 15:36:34 -05:00
Robert McRackan
5c81441f83 Bugfix: decrypt book with no author 2019-11-27 09:27:43 -05:00
Robert McRackan
57bc74cd23 Improved logging for file decrypt 2019-11-26 13:13:16 -05:00
Robert McRackan
1cecd4ba2e Improved logging. Updated nuget packages 2019-11-26 10:42:38 -05:00
Robert McRackan
7a4bd639fb Add notes for v3.1-beta.5 2019-11-25 14:12:07 -05:00
Robert McRackan
87e6a46808 remove db file 2019-11-25 14:09:53 -05:00
Robert McRackan
a2e30df51f Improved importing 2019-11-25 13:45:29 -05:00
Robert McRackan
c8e759c067 update notes 2019-11-24 21:47:03 -05:00
Robert McRackan
6c9074169a Added beta-specific logging 2019-11-24 21:45:35 -05:00
36 changed files with 554 additions and 399 deletions

1
.gitignore vendored
View File

@@ -332,3 +332,4 @@ ASALocalRun/
# manually ignored files
/__TODO.txt
/DataLayer/LibationContext.db

View File

@@ -72,8 +72,9 @@ namespace AaxDecrypter
}
private AaxToM4bConverter(string inputFile, string decryptKey)
{
if (string.IsNullOrWhiteSpace(inputFile)) throw new ArgumentNullException(nameof(inputFile), "Input file may not be null or whitespace");
if (!File.Exists(inputFile)) throw new ArgumentNullException(nameof(inputFile), "File does not exist");
ArgumentValidator.EnsureNotNullOrWhiteSpace(inputFile, nameof(inputFile));
if (!File.Exists(inputFile))
throw new ArgumentNullException(nameof(inputFile), "File does not exist");
steps = new StepSequence
{
@@ -89,22 +90,24 @@ namespace AaxDecrypter
["End: Create Nfo"] = End_CreateNfo
};
this.inputFileName = inputFile;
inputFileName = inputFile;
this.decryptKey = decryptKey;
}
private async Task prelimProcessing()
{
this.tags = new Tags(this.inputFileName);
this.encodingInfo = new EncodingInfo(this.inputFileName);
this.chapters = new Chapters(this.inputFileName, this.tags.duration.TotalSeconds);
tags = new Tags(inputFileName);
encodingInfo = new EncodingInfo(inputFileName);
chapters = new Chapters(inputFileName, tags.duration.TotalSeconds);
var defaultFilename = Path.Combine(
Path.GetDirectoryName(this.inputFileName),
getASCIITag(this.tags.author),
getASCIITag(this.tags.title) + ".m4b"
Path.GetDirectoryName(inputFileName),
getASCIITag(tags.author),
getASCIITag(tags.title) + ".m4b"
);
SetOutputFilename(defaultFilename);
// set default name
SetOutputFilename(defaultFilename);
await Task.Run(() => saveCover(inputFileName));
}
@@ -118,7 +121,7 @@ namespace AaxDecrypter
private void saveCover(string aaxFile)
{
using var file = TagLib.File.Create(aaxFile, "audio/mp4", TagLib.ReadStyle.Average);
this.coverBytes = file.Tag.Pictures[0].Data.Data;
coverBytes = file.Tag.Pictures[0].Data.Data;
}
private void printPrelim()
@@ -156,21 +159,24 @@ namespace AaxDecrypter
public void SetOutputFilename(string outFileName)
{
this.outputFileName = outFileName;
outputFileName = outFileName;
if (Path.GetExtension(this.outputFileName) != ".m4b")
this.outputFileName = outputFileWithNewExt(".m4b");
if (Path.GetExtension(outputFileName) != ".m4b")
outputFileName = outputFileWithNewExt(".m4b");
this.outDir = Path.GetDirectoryName(this.outputFileName);
if (File.Exists(outputFileName))
File.Delete(outputFileName);
outDir = Path.GetDirectoryName(outputFileName);
}
private string outputFileWithNewExt(string extension)
=> Path.Combine(this.outDir, Path.GetFileNameWithoutExtension(this.outputFileName) + '.' + extension.Trim('.'));
=> Path.Combine(outDir, Path.GetFileNameWithoutExtension(outputFileName) + '.' + extension.Trim('.'));
public bool Step1_CreateDir()
{
ProcessRunner.WorkingDir = this.outDir;
Directory.CreateDirectory(this.outDir);
ProcessRunner.WorkingDir = outDir;
Directory.CreateDirectory(outDir);
return true;
}
@@ -178,7 +184,7 @@ namespace AaxDecrypter
{
DecryptProgressUpdate?.Invoke(this, 0);
var tempRipFile = Path.Combine(this.outDir, "funny.aac");
var tempRipFile = Path.Combine(outDir, "funny.aac");
var fail = "WARNING-Decrypt failure. ";
@@ -193,7 +199,7 @@ namespace AaxDecrypter
if (returnCode == -99)
{
Console.WriteLine($"{fail}Incorrect decrypt key: {decryptKey}");
this.decryptKey = null;
decryptKey = null;
returnCode = getKey_decrypt(tempRipFile);
}
}
@@ -232,7 +238,7 @@ namespace AaxDecrypter
Console.WriteLine("Cracking activation bytes");
var activation_bytes = BytesCracker.GetActivationBytes(checksum);
this.decryptKey = activation_bytes;
decryptKey = activation_bytes;
Console.WriteLine("Activation bytes cracked. Decrypt key: " + activation_bytes);
}
@@ -243,10 +249,10 @@ namespace AaxDecrypter
Console.WriteLine("Decrypting with key " + decryptKey);
var returnCode = 100;
var thread = new Thread(() => returnCode = this.ngDecrypt());
var thread = new Thread(() => returnCode = ngDecrypt());
thread.Start();
double fileLen = new FileInfo(this.inputFileName).Length;
double fileLen = new FileInfo(inputFileName).Length;
while (thread.IsAlive && returnCode == 100)
{
Thread.Sleep(500);
@@ -266,7 +272,7 @@ namespace AaxDecrypter
var info = new ProcessStartInfo
{
FileName = DecryptSupportLibraries.mp4trackdumpPath,
Arguments = "-c " + this.encodingInfo.channels + " -r " + this.encodingInfo.sampleRate + " \"" + this.inputFileName + "\""
Arguments = "-c " + encodingInfo.channels + " -r " + encodingInfo.sampleRate + " \"" + inputFileName + "\""
};
info.EnvironmentVariables["VARIABLE"] = decryptKey;
@@ -279,21 +285,22 @@ namespace AaxDecrypter
return exitCode;
}
// temp file names for steps 3, 4, 5
string tempChapsPath => Path.Combine(this.outDir, "tempChaps.mp4");
// temp file names for steps 3, 4, 5
string tempChapsGuid { get; } = Guid.NewGuid().ToString().ToUpper().Replace("-", "");
string tempChapsPath => Path.Combine(outDir, $"tempChaps_{tempChapsGuid}.mp4");
string mp4_file => outputFileWithNewExt(".mp4");
string ff_txt_file => mp4_file + ".ff.txt";
public bool Step3_Chapterize()
{
string str1 = "";
if (this.chapters.FirstChapterStart != 0.0)
if (chapters.FirstChapterStart != 0.0)
{
str1 = " -ss " + this.chapters.FirstChapterStart.ToString("0.000", CultureInfo.InvariantCulture) + " -t " + (this.chapters.LastChapterStart - 1.0).ToString("0.000", CultureInfo.InvariantCulture) + " ";
str1 = " -ss " + chapters.FirstChapterStart.ToString("0.000", CultureInfo.InvariantCulture) + " -t " + (chapters.LastChapterStart - 1.0).ToString("0.000", CultureInfo.InvariantCulture) + " ";
}
string ffmpegTags = this.tags.GenerateFfmpegTags();
string ffmpegChapters = this.chapters.GenerateFfmpegChapters();
string ffmpegTags = tags.GenerateFfmpegTags();
string ffmpegChapters = chapters.GenerateFfmpegChapters();
File.WriteAllText(ff_txt_file, ffmpegTags + ffmpegChapters);
var tagAndChapterInfo = new ProcessStartInfo
@@ -309,8 +316,8 @@ namespace AaxDecrypter
public bool Step4_InsertCoverArt()
{
// save cover image as temp file
var coverPath = Path.Combine(this.outDir, "cover-" + Guid.NewGuid() + ".jpg");
FileExt.CreateFile(coverPath, this.coverBytes);
var coverPath = Path.Combine(outDir, "cover-" + Guid.NewGuid() + ".jpg");
FileExt.CreateFile(coverPath, coverBytes);
var insertCoverArtInfo = new ProcessStartInfo
{
@@ -329,26 +336,26 @@ namespace AaxDecrypter
{
FileExt.SafeDelete(mp4_file);
FileExt.SafeDelete(ff_txt_file);
FileExt.SafeMove(tempChapsPath, this.outputFileName);
FileExt.SafeMove(tempChapsPath, outputFileName);
return true;
}
public bool Step6_AddTags()
{
this.tags.AddAppleTags(this.outputFileName);
tags.AddAppleTags(outputFileName);
return true;
}
public bool End_CreateCue()
{
File.WriteAllText(outputFileWithNewExt(".cue"), this.chapters.GetCuefromChapters(Path.GetFileName(this.outputFileName)));
File.WriteAllText(outputFileWithNewExt(".cue"), chapters.GetCuefromChapters(Path.GetFileName(outputFileName)));
return true;
}
public bool End_CreateNfo()
{
File.WriteAllText(outputFileWithNewExt(".nfo"), NFO.CreateNfoContents(AppName, this.tags, this.encodingInfo, this.chapters));
File.WriteAllText(outputFileWithNewExt(".nfo"), NFO.CreateNfoContents(AppName, tags, encodingInfo, chapters));
return true;
}
}

View File

@@ -28,7 +28,7 @@ namespace AaxDecrypter
using TagLib.File tagLibFile = TagLib.File.Create(file, "audio/mp4", ReadStyle.Average);
this.title = tagLibFile.Tag.Title.Replace(" (Unabridged)", "");
this.album = tagLibFile.Tag.Album.Replace(" (Unabridged)", "");
this.author = tagLibFile.Tag.FirstPerformer;
this.author = tagLibFile.Tag.FirstPerformer ?? "[unknown]";
this.year = tagLibFile.Tag.Year.ToString();
this.comments = tagLibFile.Tag.Comment;
this.duration = tagLibFile.Properties.Duration;

View File

@@ -9,30 +9,49 @@ namespace ApplicationServices
{
public static class LibraryCommands
{
public static async Task<(int totalCount, int newCount)> IndexLibraryAsync(ILoginCallback callback)
public static async Task<(int totalCount, int newCount)> ImportLibraryAsync(ILoginCallback callback)
{
var audibleApiActions = new AudibleApiActions();
var items = await audibleApiActions.GetAllLibraryItemsAsync(callback);
var totalCount = items.Count;
try
{
var audibleApiActions = new AudibleApiActions();
var items = await audibleApiActions.GetAllLibraryItemsAsync(callback);
var totalCount = items.Count;
Serilog.Log.Logger.Debug($"GetAllLibraryItems: Total count {totalCount}");
var libImporter = new LibraryImporter();
var newCount = await Task.Run(() => libImporter.Import(items));
var libImporter = new LibraryImporter();
var newCount = await Task.Run(() => libImporter.Import(items));
Serilog.Log.Logger.Debug($"Import: New count {newCount}");
await Task.Run(() => SearchEngineCommands.FullReIndex());
await Task.Run(() => SearchEngineCommands.FullReIndex());
Serilog.Log.Logger.Debug("FullReIndex: success");
return (totalCount, newCount);
return (totalCount, newCount);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error importing library");
throw;
}
}
public static int UpdateTags(this LibationContext context, Book book, string newTags)
{
book.UserDefinedItem.Tags = newTags;
try
{
book.UserDefinedItem.Tags = newTags;
var qtyChanges = context.SaveChanges();
var qtyChanges = context.SaveChanges();
if (qtyChanges > 0)
SearchEngineCommands.UpdateBookTags(book);
if (qtyChanges > 0)
SearchEngineCommands.UpdateBookTags(book);
return qtyChanges;
return qtyChanges;
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error updating tags");
throw;
}
}
}
}

View File

@@ -12,13 +12,13 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="3.0.0">
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="3.0.1">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="3.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.SqlServer" Version="3.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="3.0.0">
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="3.0.1" />
<PackageReference Include="Microsoft.EntityFrameworkCore.SqlServer" Version="3.0.1" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="3.0.1">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>

View File

Binary file not shown.

View File

@@ -9,7 +9,7 @@ using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20191119144803_Fresh")]
[Migration("20191125182309_Fresh")]
partial class Fresh
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
@@ -129,6 +129,13 @@ namespace DataLayer.Migrations
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>

View File

@@ -200,6 +200,11 @@ namespace DataLayer.Migrations
columns: new[] { "CategoryId", "AudibleCategoryId", "Name", "ParentCategoryCategoryId" },
values: new object[] { -1, "", "", null });
migrationBuilder.InsertData(
table: "Contributors",
columns: new[] { "ContributorId", "AudibleContributorId", "Name" },
values: new object[] { -1, null, "" });
migrationBuilder.CreateIndex(
name: "IX_BookContributor_BookId",
table: "BookContributor",

View File

@@ -127,6 +127,13 @@ namespace DataLayer.Migrations
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>

View File

@@ -62,7 +62,8 @@ namespace DataLayer
string description,
int lengthInMinutes,
IEnumerable<Contributor> authors,
IEnumerable<Contributor> narrators)
IEnumerable<Contributor> narrators,
Category category)
{
// validate
ArgumentValidator.EnsureNotNull(audibleProductId, nameof(audibleProductId));
@@ -80,8 +81,7 @@ namespace DataLayer
_seriesLink = new HashSet<SeriesBook>();
_supplements = new HashSet<Supplement>();
// since category/id is never null, nullity means it hasn't been loaded
CategoryId = Category.GetEmpty().CategoryId;
Category = category;
// simple assigns
Title = title;
@@ -91,7 +91,7 @@ namespace DataLayer
// assigns with biz logic
ReplaceAuthors(authors);
ReplaceNarrators(narrators);
}
}
#region contributors, authors, narrators
// use uninitialised backing fields - this means we can detect if the collection was loaded
@@ -123,16 +123,10 @@ namespace DataLayer
ArgumentValidator.EnsureEnumerableNotNullOrEmpty(newContributors, nameof(newContributors));
// the edge cases of doing local-loaded vs remote-only got weird. just load it
if (_contributorsLink == null)
{
ArgumentValidator.EnsureNotNull(context, nameof(context));
if (!context.Entry(this).IsKeySet)
throw new InvalidOperationException("Could not add contributors");
if (_contributorsLink is null)
getEntry(context).Collection(s => s.ContributorsLink).Load();
context.Entry(this).Collection(s => s.ContributorsLink).Load();
}
var roleContributions = getContributions(role);
var roleContributions = getContributions(role);
var isIdentical = roleContributions.Select(c => c.Contributor).SequenceEqual(newContributors);
if (isIdentical)
return;
@@ -140,7 +134,8 @@ namespace DataLayer
_contributorsLink.RemoveWhere(bc => bc.Role == role);
addNewContributors(newContributors, role);
}
private void addNewContributors(IEnumerable<Contributor> newContributors, Role role)
private void addNewContributors(IEnumerable<Contributor> newContributors, Role role)
{
byte order = 0;
var newContributionsEnum = newContributors.Select(c => new BookContributor(this, c, role, order++));
@@ -155,6 +150,18 @@ namespace DataLayer
.ToList();
#endregion
private Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry<Book> getEntry(DbContext context)
{
ArgumentValidator.EnsureNotNull(context, nameof(context));
var entry = context.Entry(this);
if (!entry.IsKeySet)
throw new InvalidOperationException("Could not load a valid Book from database");
return entry;
}
#region series
private HashSet<SeriesBook> _seriesLink;
public IEnumerable<SeriesBook> SeriesLink => _seriesLink?.ToList();
@@ -186,16 +193,10 @@ namespace DataLayer
// our add() is conditional upon what's already included in the collection.
// therefore if not loaded, a trip is required. might as well just load it
if (_seriesLink == null)
{
ArgumentValidator.EnsureNotNull(context, nameof(context));
if (!context.Entry(this).IsKeySet)
throw new InvalidOperationException("Could not add series");
if (_seriesLink is null)
getEntry(context).Collection(s => s.SeriesLink).Load();
context.Entry(this).Collection(s => s.SeriesLink).Load();
}
var singleSeriesBook = _seriesLink.SingleOrDefault(sb => sb.Series == series);
var singleSeriesBook = _seriesLink.SingleOrDefault(sb => sb.Series == series);
if (singleSeriesBook == null)
_seriesLink.Add(new SeriesBook(series, this, index));
else
@@ -211,8 +212,7 @@ namespace DataLayer
public void AddSupplementDownloadUrl(string url)
{
// supplements are owned by Book, so no need to Load():
// OwnsMany: "Can only ever appear on navigation properties of other entity types.
// Are automatically loaded, and can only be tracked by a DbContext alongside their owner."
// Are automatically loaded, and can only be tracked by a DbContext alongside their owner.
ArgumentValidator.EnsureNotNullOrWhiteSpace(url, nameof(url));
@@ -232,19 +232,12 @@ namespace DataLayer
}
public void UpdateCategory(Category category, DbContext context = null)
{
// since category is never null, nullity means it hasn't been loaded
if (Category != null || CategoryId == Category.GetEmpty().CategoryId)
{
Category = category;
return;
}
{
// since category is never null, nullity means it hasn't been loaded
if (Category is null)
getEntry(context).Reference(s => s.Category).Load();
if (context == null)
throw new Exception("need context");
context.Entry(this).Reference(s => s.Category).Load();
Category = category;
Category = category;
}
public override string ToString() => $"[{AudibleProductId}] {Title}";

View File

@@ -18,8 +18,7 @@ namespace DataLayer
public class Category
{
// Empty is a special case. use private ctor w/o validation
public static Category GetEmpty() => new Category { CategoryId = -1, AudibleCategoryId = "", Name = "", ParentCategory = null };
public bool IsEmpty() => string.IsNullOrWhiteSpace(AudibleCategoryId) || string.IsNullOrWhiteSpace(Name) || ParentCategory == null;
public static Category GetEmpty() => new Category { CategoryId = -1, AudibleCategoryId = "", Name = "" };
internal int CategoryId { get; private set; }
public string AudibleCategoryId { get; private set; }

View File

@@ -5,21 +5,24 @@ using Dinah.Core;
namespace DataLayer
{
public class Contributor
{
// contributors search links are just name with url-encoding. space can be + or %20
// author search link: /search?searchAuthor=Robert+Bevan
// narrator search link: /search?searchNarrator=Robert+Bevan
// can also search multiples. concat with comma before url encode
{
// Empty is a special case. use private ctor w/o validation
public static Contributor GetEmpty() => new Contributor { ContributorId = -1, Name = "" };
// id.s
// ----
// https://www.audible.com/author/Neil-Gaiman/B000AQ01G2 == https://www.audible.com/author/B000AQ01G2
// goes to summary page
// at bottom "See all titles by Neil Gaiman" goes to https://www.audible.com/search?searchAuthor=Neil+Gaiman
// some authors have no id. simply goes to https://www.audible.com/search?searchAuthor=Rufus+Fears
// all narrators have no id: https://www.audible.com/search?searchNarrator=Neil+Gaiman
// contributors search links are just name with url-encoding. space can be + or %20
// author search link: /search?searchAuthor=Robert+Bevan
// narrator search link: /search?searchNarrator=Robert+Bevan
// can also search multiples. concat with comma before url encode
internal int ContributorId { get; private set; }
// id.s
// ----
// https://www.audible.com/author/Neil-Gaiman/B000AQ01G2 == https://www.audible.com/author/B000AQ01G2
// goes to summary page
// at bottom "See all titles by Neil Gaiman" goes to https://www.audible.com/search?searchAuthor=Neil+Gaiman
// some authors have no id. simply goes to https://www.audible.com/search?searchAuthor=Rufus+Fears
// all narrators have no id: https://www.audible.com/search?searchNarrator=Neil+Gaiman
internal int ContributorId { get; private set; }
public string Name { get; private set; }
private HashSet<BookContributor> _booksLink;

View File

@@ -56,12 +56,16 @@ namespace DataLayer
modelBuilder.ApplyConfiguration(new SeriesBookConfig());
modelBuilder.ApplyConfiguration(new CategoryConfig());
// seeds go here. examples in scratch pad
modelBuilder
.Entity<Category>()
.HasData(Category.GetEmpty());
// seeds go here. examples in scratch pad
modelBuilder
.Entity<Category>()
.HasData(Category.GetEmpty());
modelBuilder
.Entity<Contributor>()
.HasData(Contributor.GetEmpty());
// views are now supported via "query types" (instead of "entity types"): https://docs.microsoft.com/en-us/ef/core/modeling/query-types
}
}
// views are now supported via "keyless entity types" (instead of "entity types" or the prev "query types"):
// https://docs.microsoft.com/en-us/ef/core/modeling/keyless-entity-types
}
}
}

View File

@@ -4,7 +4,6 @@ using System.Linq;
using Dinah.Core.Collections.Generic;
using Dinah.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.ChangeTracking;
namespace DataLayer
{
@@ -14,25 +13,18 @@ namespace DataLayer
public void Executing(DbContext context)
{
// persist tags:
var modifiedEntities = context
var tagsCollection
= context
.ChangeTracker
.Entries()
.Where(p => p.State.In(EntityState.Modified, EntityState.Added))
.ToList();
persistTags(modifiedEntities);
}
private static void persistTags(List<EntityEntry> modifiedEntities)
{
var tagSets = modifiedEntities
.Where(e => e.State.In(EntityState.Modified, EntityState.Added))
.Select(e => e.Entity as UserDefinedItem)
// filter by null but NOT by blank. blank is the valid way to show the absence of tags
.Where(a => a != null)
.Where(udi => udi != null)
// do NOT filter out entires with blank tags. blank is the valid way to show the absence of tags
.Select(t => (t.Book.AudibleProductId, t.Tags))
.ToList();
foreach (var t in tagSets)
FileManager.TagsPersistence.Save(t.Book.AudibleProductId, t.Tags);
FileManager.TagsPersistence.Save(tagsCollection);
}
}
}

View File

@@ -7,15 +7,22 @@ nuget
Microsoft.EntityFrameworkCore.Tools (needed for using Package Manager Console)
Microsoft.EntityFrameworkCore.Sqlite
MIGRATIONS require standard, not core
using standard instead of core. edit 3 things in csproj
1of3: pluralize xml TargetFramework tag to TargetFrameworks
2of2: TargetFrameworks from: netstandard2.1
to: netcoreapp3.0;netstandard2.1
3of3: add
<PropertyGroup>
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
</PropertyGroup>
MIGRATIONS
require core, not standard
this can be a problem b/c standard and framework can only reference standard, not core
TO USE MIGRATIONS (core and/or standard)
add to csproj
<PropertyGroup>
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
</PropertyGroup>
TO USE MIGRATIONS AS *BOTH* CORE AND STANDARD
edit csproj
pluralize this xml tag
from: TargetFramework
to: TargetFrameworks
inside of TargetFrameworks
from: netstandard2.1
to: netcoreapp3.0;netstandard2.1
run. error
SQLite Error 1: 'no such table: Blogs'.

View File

@@ -3,7 +3,10 @@
"LibationContext_sqlserver": "Server=(LocalDb)\\MSSQLLocalDB;Database=DataLayer.LibationContext;Integrated Security=true;",
"LibationContext": "Data Source=LibationContext.db;Foreign Keys=False;",
"// on windows sqlite paths accept windows and/or unix slashes": "",
"// sqlite notes": "",
"// absolute path example": "Data Source=C:/foo/bar/sample.db",
"// relative path example": "Data Source=sample.db",
"// on windows: sqlite paths accept windows and/or unix slashes": "",
"MyTestContext": "Data Source=%DESKTOP%/sample.db"
}
}

View File

@@ -1,127 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleApiDTOs;
using DataLayer;
using InternalUtilities;
namespace DtoImporterService
{
public class BookImporter : ItemsImporterBase
{
public override IEnumerable<Exception> Validate(IEnumerable<Item> items) => new BookValidator().Validate(items);
protected override int DoImport(IEnumerable<Item> items, LibationContext context)
{
// pre-req.s
new ContributorImporter().Import(items, context);
new SeriesImporter().Import(items, context);
new CategoryImporter().Import(items, context);
// get distinct
var productIds = items.Select(i => i.ProductId).ToList();
// load db existing => .Local
loadLocal_books(productIds, context);
// upsert
var qtyNew = upsertBooks(items, context);
return qtyNew;
}
private void loadLocal_books(List<string> productIds, LibationContext context)
{
var localProductIds = context.Books.Local.Select(b => b.AudibleProductId);
var remainingProductIds = productIds
.Distinct()
.Except(localProductIds)
.ToList();
// GetBooks() eager loads Series, category, et al
if (remainingProductIds.Any())
context.Books.GetBooks(b => remainingProductIds.Contains(b.AudibleProductId)).ToList();
}
private int upsertBooks(IEnumerable<Item> items, LibationContext context)
{
var qtyNew = 0;
foreach (var item in items)
{
var book = context.Books.Local.SingleOrDefault(p => p.AudibleProductId == item.ProductId);
if (book is null)
{
// nested logic is required so order of names is retained. else, contributors may appear in the order they were inserted into the db
var authors = item
.Authors
.Select(a => context.Contributors.Local.Single(c => a.Name == c.Name))
.ToList();
// if no narrators listed, author is the narrator
if (item.Narrators is null || !item.Narrators.Any())
item.Narrators = item.Authors;
// nested logic is required so order of names is retained. else, contributors may appear in the order they were inserted into the db
var narrators = item
.Narrators
.Select(n => context.Contributors.Local.Single(c => n.Name == c.Name))
.ToList();
book = context.Books.Add(new Book(
new AudibleProductId(item.ProductId),
item.Title,
item.Description,
item.LengthInMinutes,
authors,
narrators)
).Entity;
var publisherName = item.Publisher;
if (!string.IsNullOrWhiteSpace(publisherName))
{
var publisher = context.Contributors.Local.Single(c => publisherName == c.Name);
book.ReplacePublisher(publisher);
}
qtyNew++;
}
// set/update book-specific info which may have changed
book.PictureId = item.PictureId;
book.UpdateProductRating(item.Product_OverallStars, item.Product_PerformanceStars, item.Product_StoryStars);
if (!string.IsNullOrWhiteSpace(item.SupplementUrl))
book.AddSupplementDownloadUrl(item.SupplementUrl);
// important to update user-specific info. this will have changed if user has rated/reviewed the book since last library import
book.UserDefinedItem.UpdateRating(item.MyUserRating_Overall, item.MyUserRating_Performance, item.MyUserRating_Story);
//
// this was round 1 when it was a 2 step process
//
//// update series even for existing books. these are occasionally updated
//var seriesIds = item.Series.Select(kvp => kvp.SeriesId).ToList();
//var allSeries = context.Series.Local.Where(c => seriesIds.Contains(c.AudibleSeriesId)).ToList();
//foreach (var series in allSeries)
// book.UpsertSeries(series);
// these will upsert over library-scraped series, but will not leave orphans
if (item.Series != null)
{
foreach (var seriesEntry in item.Series)
{
var series = context.Series.Local.Single(s => seriesEntry.SeriesId == s.AudibleSeriesId);
book.UpsertSeries(series, seriesEntry.Index);
}
}
// categories are laid out for a breadcrumb. category is 1st, subcategory is 2nd
var category = context.Categories.Local.SingleOrDefault(c => c.AudibleCategoryId == item.Categories.LastOrDefault().CategoryId);
if (category != null)
book.UpdateCategory(category, context);
book.UpdateBookDetails(item.IsAbridged, item.DatePublished);
}
return qtyNew;
}
}
}

View File

@@ -0,0 +1,137 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleApiDTOs;
using DataLayer;
using InternalUtilities;
namespace DtoImporterService
{
public class BookImporter : ItemsImporterBase
{
public override IEnumerable<Exception> Validate(IEnumerable<Item> items) => new BookValidator().Validate(items);
protected override int DoImport(IEnumerable<Item> items, LibationContext context)
{
// pre-req.s
new ContributorImporter().Import(items, context);
new SeriesImporter().Import(items, context);
new CategoryImporter().Import(items, context);
// get distinct
var productIds = items.Select(i => i.ProductId).ToList();
// load db existing => .Local
loadLocal_books(productIds, context);
// upsert
var qtyNew = upsertBooks(items, context);
return qtyNew;
}
private void loadLocal_books(List<string> productIds, LibationContext context)
{
var localProductIds = context.Books.Local.Select(b => b.AudibleProductId);
var remainingProductIds = productIds
.Distinct()
.Except(localProductIds)
.ToList();
// GetBooks() eager loads Series, category, et al
if (remainingProductIds.Any())
context.Books.GetBooks(b => remainingProductIds.Contains(b.AudibleProductId)).ToList();
}
private int upsertBooks(IEnumerable<Item> items, LibationContext context)
{
var qtyNew = 0;
foreach (var item in items)
{
var book = context.Books.Local.SingleOrDefault(p => p.AudibleProductId == item.ProductId);
if (book is null)
{
book = createNewBook(item, context);
qtyNew++;
}
updateBook(item, book, context);
}
return qtyNew;
}
private static Book createNewBook(Item item, LibationContext context)
{
// absence of authors is very rare, but possible
if (!item.Authors?.Any() ?? true)
item.Authors = new[] { new Person { Name = "", Asin = null } };
// nested logic is required so order of names is retained. else, contributors may appear in the order they were inserted into the db
var authors = item
.Authors
.Select(a => context.Contributors.Local.Single(c => a.Name == c.Name))
.ToList();
var narrators
= item.Narrators is null || !item.Narrators.Any()
// if no narrators listed, author is the narrator
? authors
// nested logic is required so order of names is retained. else, contributors may appear in the order they were inserted into the db
: item
.Narrators
.Select(n => context.Contributors.Local.Single(c => n.Name == c.Name))
.ToList();
// categories are laid out for a breadcrumb. category is 1st, subcategory is 2nd
// absence of categories is very rare, but possible
var lastCategory = item.Categories.LastOrDefault()?.CategoryId ?? "";
var category = context.Categories.Local.SingleOrDefault(c => c.AudibleCategoryId == lastCategory);
var book = context.Books.Add(new Book(
new AudibleProductId(item.ProductId),
item.Title,
item.Description,
item.LengthInMinutes,
authors,
narrators,
category)
).Entity;
var publisherName = item.Publisher;
if (!string.IsNullOrWhiteSpace(publisherName))
{
var publisher = context.Contributors.Local.Single(c => publisherName == c.Name);
book.ReplacePublisher(publisher);
}
book.UpdateBookDetails(item.IsAbridged, item.DatePublished);
if (!string.IsNullOrWhiteSpace(item.SupplementUrl))
book.AddSupplementDownloadUrl(item.SupplementUrl);
return book;
}
private static void updateBook(Item item, Book book, LibationContext context)
{
// set/update book-specific info which may have changed
book.PictureId = item.PictureId;
book.UpdateProductRating(item.Product_OverallStars, item.Product_PerformanceStars, item.Product_StoryStars);
// important to update user-specific info. this will have changed if user has rated/reviewed the book since last library import
book.UserDefinedItem.UpdateRating(item.MyUserRating_Overall, item.MyUserRating_Performance, item.MyUserRating_Story);
// update series even for existing books. these are occasionally updated
// these will upsert over library-scraped series, but will not leave orphans
if (item.Series != null)
{
foreach (var seriesEntry in item.Series)
{
var series = context.Series.Local.Single(s => seriesEntry.SeriesId == s.AudibleSeriesId);
book.UpsertSeries(series, seriesEntry.Index);
}
}
}
}
}

View File

@@ -33,8 +33,11 @@ namespace DtoImporterService
.Except(localIds)
.ToList();
// load existing => local
// remember to include default/empty/missing
var emptyName = Contributor.GetEmpty().Name;
if (remainingCategoryIds.Any())
context.Categories.Where(c => remainingCategoryIds.Contains(c.AudibleCategoryId)).ToList();
context.Categories.Where(c => remainingCategoryIds.Contains(c.AudibleCategoryId) || c.Name == emptyName).ToList();
}
// only use after loading contributors => local

View File

@@ -19,10 +19,10 @@ namespace DtoImporterService
var publishers = items.GetPublishersDistinct().ToList();
// load db existing => .Local
var allNames = authors
.Select(a => a.Name)
var allNames = publishers
.Union(authors.Select(n => n.Name))
.Union(narrators.Select(n => n.Name))
.Union(publishers)
.Where(name => !string.IsNullOrWhiteSpace(name))
.ToList();
loadLocal_contributors(allNames, context);
@@ -36,9 +36,6 @@ namespace DtoImporterService
private void loadLocal_contributors(List<string> contributorNames, LibationContext context)
{
contributorNames.Remove(null);
contributorNames.Remove("");
//// BAD: very inefficient
// var x = context.Contributors.Local.Where(c => !contribNames.Contains(c.Name));
@@ -50,11 +47,10 @@ namespace DtoImporterService
.ToList();
// load existing => local
// remember to include default/empty/missing
var emptyName = Contributor.GetEmpty().Name;
if (remainingContribNames.Any())
context.Contributors.Where(c => remainingContribNames.Contains(c.Name)).ToList();
// _________________________________^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
// i tried to extract this pattern, but this part prohibits doing so
// wouldn't work anyway for Books.GetBooks()
context.Contributors.Where(c => remainingContribNames.Contains(c.Name) || c.Name == emptyName).ToList();
}
// only use after loading contributors => local

View File

@@ -20,12 +20,28 @@ namespace DtoImporterService
}
}
var exceptions = Validate(param);
if (exceptions != null && exceptions.Any())
throw new AggregateException($"Device Jobs Service configuration validation failed", exceptions);
try
{
var exceptions = Validate(param);
if (exceptions != null && exceptions.Any())
throw new AggregateException($"Importer validation failed", exceptions);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Import error: validation");
throw;
}
var result = func(param, context);
return result;
try
{
var result = func(param, context);
return result;
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Import error: post-validation importing");
throw;
}
}
IEnumerable<Exception> Validate(T param);
}

View File

@@ -17,17 +17,17 @@ namespace DtoImporterService
var series = items.GetSeriesDistinct().ToList();
// load db existing => .Local
var seriesIds = series.Select(s => s.SeriesId).ToList();
loadLocal_series(seriesIds, context);
loadLocal_series(series, context);
// upsert
var qtyNew = upsertSeries(series, context);
return qtyNew;
}
private void loadLocal_series(List<string> seriesIds, LibationContext context)
private void loadLocal_series(List<AudibleApiDTOs.Series> series, LibationContext context)
{
var localIds = context.Series.Local.Select(s => s.AudibleSeriesId);
var seriesIds = series.Select(s => s.SeriesId).ToList();
var localIds = context.Series.Local.Select(s => s.AudibleSeriesId).ToList();
var remainingSeriesIds = seriesIds
.Distinct()
.Except(localIds)

View File

@@ -94,6 +94,7 @@ namespace FileLiberator
NarratorsDiscovered?.Invoke(this, converter.tags.narrator);
CoverImageFilepathDiscovered?.Invoke(this, converter.coverBytes);
// override default which was set in CreateAsync
converter.SetOutputFilename(proposedOutputFile);
converter.DecryptProgressUpdate += (s, progress) => UpdateProgress?.Invoke(this, progress);
@@ -117,46 +118,58 @@ namespace FileLiberator
}
private static void moveFilesToBooksDir(Book product, string outputAudioFilename)
{
// files are: temp path\author\[asin].ext
var m4bDir = new FileInfo(outputAudioFilename).Directory;
var files = m4bDir
.EnumerateFiles()
.Where(f => f.Name.ContainsInsensitive(product.AudibleProductId))
.ToList();
{
// create final directory. move each file into it. MOVE AUDIO FILE LAST
// new dir: safetitle_limit50char + " [" + productId + "]"
// create final directory. move each file into it. MOVE AUDIO FILE LAST
// new dir: safetitle_limit50char + " [" + productId + "]"
var destinationDir = getDestDir(product);
Directory.CreateDirectory(destinationDir);
// to prevent the paths from getting too long, we don't need after the 1st ":" for the folder
var underscoreIndex = product.Title.IndexOf(':');
var titleDir = (underscoreIndex < 4) ? product.Title : product.Title.Substring(0, underscoreIndex);
var finalDir = FileUtility.GetValidFilename(AudibleFileStorage.BooksDirectory, titleDir, null, product.AudibleProductId);
Directory.CreateDirectory(finalDir);
var sortedFiles = getProductFilesSorted(product, outputAudioFilename);
// move audio files to the end of the collection so these files are moved last
var musicFiles = files.Where(f => AudibleFileStorage.Audio.IsFileTypeMatch(f));
files = files
.Except(musicFiles)
.Concat(musicFiles)
.ToList();
var musicFileExt = Path.GetExtension(outputAudioFilename).Trim('.');
var musicFileExt = musicFiles
.Select(f => f.Extension)
.Distinct()
.Single()
.Trim('.');
foreach (var f in sortedFiles)
{
var dest = AudibleFileStorage.Audio.IsFileTypeMatch(f)
// audio filename: safetitle_limit50char + " [" + productId + "]." + audio_ext
? FileUtility.GetValidFilename(destinationDir, product.Title, musicFileExt, product.AudibleProductId)
// non-audio filename: safetitle_limit50char + " [" + productId + "][" + audio_ext +"]." + non_audio_ext
: FileUtility.GetValidFilename(destinationDir, product.Title, f.Extension, product.AudibleProductId, musicFileExt);
foreach (var f in files)
{
var dest = AudibleFileStorage.Audio.IsFileTypeMatch(f)
// audio filename: safetitle_limit50char + " [" + productId + "]." + audio_ext
? FileUtility.GetValidFilename(finalDir, product.Title, musicFileExt, product.AudibleProductId)
// non-audio filename: safetitle_limit50char + " [" + productId + "][" + audio_ext +"]." + non_audio_ext
: FileUtility.GetValidFilename(finalDir, product.Title, f.Extension, product.AudibleProductId, musicFileExt);
File.Move(f.FullName, dest);
}
}
File.Move(f.FullName, dest);
}
}
}
private static string getDestDir(Book product)
{
// to prevent the paths from getting too long, we don't need after the 1st ":" for the folder
var underscoreIndex = product.Title.IndexOf(':');
var titleDir
= underscoreIndex < 4
? product.Title
: product.Title.Substring(0, underscoreIndex);
var finalDir = FileUtility.GetValidFilename(AudibleFileStorage.BooksDirectory, titleDir, null, product.AudibleProductId);
return finalDir;
}
private static List<FileInfo> getProductFilesSorted(Book product, string outputAudioFilename)
{
// files are: temp path\author\[asin].ext
var m4bDir = new FileInfo(outputAudioFilename).Directory;
var files = m4bDir
.EnumerateFiles()
.Where(f => f.Name.ContainsInsensitive(product.AudibleProductId))
.ToList();
// move audio files to the end of the collection so these files are moved last
var musicFiles = files.Where(f => AudibleFileStorage.Audio.IsFileTypeMatch(f));
var sortedFiles = files
.Except(musicFiles)
.Concat(musicFiles)
.ToList();
return sortedFiles;
}
}
}

View File

@@ -44,6 +44,14 @@ namespace FileLiberator
tempAaxFilename,
(p) => api.DownloadAaxWorkaroundAsync(libraryBook.Book.AudibleProductId, tempAaxFilename, p));
// if bad file download, a 0-33 byte file will be created
System.Threading.Thread.Sleep(100);
if (new FileInfo(actualFilePath).Length < 100)
{
File.Delete(actualFilePath);
throw new Exception("Error downloading file");
}
return actualFilePath;
}

View File

@@ -2,11 +2,12 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Dinah.Core.Collections.Immutable;
using Newtonsoft.Json;
namespace FileManager
{
public static class FilePathCache
public static class FilePathCache
{
internal class CacheEntry
{
@@ -15,22 +16,25 @@ namespace FileManager
public string Path { get; set; }
}
static List<CacheEntry> inMemoryCache { get; } = new List<CacheEntry>();
static Cache<CacheEntry> cache { get; } = new Cache<CacheEntry>();
public static string JsonFile => Path.Combine(Configuration.Instance.LibationFiles, "FilePaths.json");
static FilePathCache()
{
// load json into memory. if file doesn't exist, nothing to do. save() will create if needed
if (FileUtility.FileExists(JsonFile))
inMemoryCache = JsonConvert.DeserializeObject<List<CacheEntry>>(File.ReadAllText(JsonFile));
// load json into memory. if file doesn't exist, nothing to do. save() will create if needed
if (FileUtility.FileExists(JsonFile))
{
var list = JsonConvert.DeserializeObject<List<CacheEntry>>(File.ReadAllText(JsonFile));
cache = new Cache<CacheEntry>(list);
}
}
public static bool Exists(string id, FileType type) => GetPath(id, type) != null;
public static string GetPath(string id, FileType type)
{
var entry = inMemoryCache.SingleOrDefault(i => i.Id == id && i.FileType == type);
var entry = cache.SingleOrDefault(i => i.Id == id && i.FileType == type);
if (entry == null)
return null;
@@ -44,51 +48,47 @@ namespace FileManager
return entry.Path;
}
private static object locker { get; } = new object();
private static void remove(CacheEntry entry)
{
lock (locker)
{
inMemoryCache.Remove(entry);
save();
}
}
{
cache.Remove(entry);
save();
}
public static void Upsert(string id, FileType type, string path)
{
if (!FileUtility.FileExists(path))
throw new FileNotFoundException("Cannot add path to cache. File not found");
lock (locker)
{
var entry = inMemoryCache.SingleOrDefault(i => i.Id == id && i.FileType == type);
if (entry != null)
entry.Path = path;
else
{
entry = new CacheEntry { Id = id, FileType = type, Path = path };
inMemoryCache.Add(entry);
}
save();
}
}
var entry = cache.SingleOrDefault(i => i.Id == id && i.FileType == type);
// ONLY call this within lock()
private static void save()
{
// create json if not exists
void resave() => File.WriteAllText(JsonFile, JsonConvert.SerializeObject(inMemoryCache, Formatting.Indented));
try { resave(); }
catch (IOException)
{
try { resave(); }
catch (IOException)
{
Console.WriteLine("...that's not good");
throw;
}
}
if (entry is null)
cache.Add(new CacheEntry { Id = id, FileType = type, Path = path });
else
entry.Path = path;
save();
}
// cache is thread-safe and lock free. but file saving is not
private static object locker { get; } = new object();
private static void save()
{
// create json if not exists
static void resave() => File.WriteAllText(JsonFile, JsonConvert.SerializeObject(cache.ToList(), Formatting.Indented));
lock (locker)
{
try { resave(); }
catch (IOException)
{
try { resave(); }
catch (IOException ex)
{
Serilog.Log.Logger.Error(ex, "Error saving FilePaths.json");
throw;
}
}
}
}
}
}

View File

@@ -105,12 +105,12 @@ namespace FileManager
catch (IOException)
{
try { resave(); }
catch (IOException)
{
Console.WriteLine("...that's not good");
throw;
}
}
catch (IOException ex)
{
Serilog.Log.Logger.Error(ex, "Error saving QuickFilters.json");
throw;
}
}
}
}
}

View File

@@ -27,11 +27,13 @@ namespace FileManager
= Policy.Handle<Exception>()
.WaitAndRetry(new[] { TimeSpan.FromMilliseconds(100) });
public static void Save(string productId, string tags)
public static void Save(IEnumerable<(string productId, string tags)> tagsCollection)
{
ensureCache();
cache[productId] = tags;
// on initial reload, there's a huge benefit to adding to cache individually then updating the file only once
foreach ((string productId, string tags) in tagsCollection)
cache[productId] = tags;
lock (locker)
policy.Execute(() => File.WriteAllText(TagsFile, JsonConvert.SerializeObject(cache, Formatting.Indented)));

View File

@@ -27,11 +27,24 @@ namespace InternalUtilities
ResponseGroups = LibraryOptions.ResponseGroupOptions.ALL_OPTIONS
});
// important! use this convert method
var libResult = LibraryDtoV10.FromJson(page.ToString());
var pageStr = page.ToString();
LibraryDtoV10 libResult;
try
{
// important! use this convert method
libResult = LibraryDtoV10.FromJson(pageStr);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error converting library for importing use. Full library:\r\n" + pageStr);
throw;
}
if (!libResult.Items.Any())
break;
else
Serilog.Log.Logger.Debug($"Page {i}: {libResult.Items.Length} results");
allItems.AddRange(libResult.Items);
}

View File

@@ -29,12 +29,12 @@ namespace InternalUtilities
{
var exceptions = new List<Exception>();
// a book having no authors is rare but allowed
if (items.Any(i => string.IsNullOrWhiteSpace(i.ProductId)))
exceptions.Add(new ArgumentException($"Collection contains item(s) with blank {nameof(Item.ProductId)}", nameof(items)));
if (items.Any(i => string.IsNullOrWhiteSpace(i.Title)))
exceptions.Add(new ArgumentException($"Collection contains item(s) with blank {nameof(Item.Title)}", nameof(items)));
if (items.Any(i => i.Authors is null))
exceptions.Add(new ArgumentException($"Collection contains item(s) with null {nameof(Item.Authors)}", nameof(items)));
return exceptions;
}

View File

@@ -13,10 +13,20 @@ namespace LibationWinForm.BookLiberation
InitializeComponent();
}
public void AppendError(Exception ex) => AppendText("ERROR: " + ex.Message);
public void AppendText(string text) => logTb.UIThread(() => logTb.AppendText($"{DateTime.Now} {text}{Environment.NewLine}"));
public void AppendError(Exception ex)
{
Serilog.Log.Logger.Error(ex, "Automated backup: error");
appendText("ERROR: " + ex.Message);
}
public void AppendText(string text)
{
Serilog.Log.Logger.Debug($"Automated backup: {text}");
appendText(text);
}
private void appendText(string text)
=> logTb.UIThread(() => logTb.AppendText($"{DateTime.Now} {text}{Environment.NewLine}"));
public void FinalizeUI()
public void FinalizeUI()
{
keepGoingCb.Enabled = false;
logTb.AppendText("");

View File

@@ -8,18 +8,23 @@ namespace LibationWinForm.BookLiberation
{
public partial class DecryptForm : Form
{
public DecryptForm()
class SerilogTextWriter : System.IO.TextWriter
{
public override System.Text.Encoding Encoding => System.Text.Encoding.ASCII;
public override void WriteLine(string value) => Serilog.Log.Logger.Debug(value);
}
public DecryptForm()
{
InitializeComponent();
}
System.IO.TextWriter origOut = Console.Out;
System.IO.TextWriter origOut { get; } = Console.Out;
private void DecryptForm_Load(object sender, EventArgs e)
{
// redirect Console.WriteLine to console, textbox
System.IO.TextWriter origOut = Console.Out;
var controlWriter = new RichTextBoxTextWriter(this.rtbLog);
var multiLogger = new MultiTextWriter(origOut, controlWriter);
var multiLogger = new MultiTextWriter(origOut, controlWriter, new SerilogTextWriter());
Console.SetOut(multiLogger);
}
@@ -58,13 +63,6 @@ namespace LibationWinForm.BookLiberation
public void SetCoverImage(byte[] coverBytes)
=> pictureBox1.UIThread(() => pictureBox1.Image = ImageReader.ToImage(coverBytes));
public static void AppendError(Exception ex) => AppendText("ERROR: " + ex.Message);
public static void AppendText(string text) =>
// redirected to log textbox
Console.WriteLine($"{DateTime.Now} {text}")
//logTb.UIThread(() => logTb.AppendText($"{DateTime.Now} {text}{Environment.NewLine}"))
;
public void UpdateProgress(int percentage) => progressBar1.UIThread(() => progressBar1.Value = percentage);
public void UpdateProgress(int percentage) => progressBar1.UIThread(() => progressBar1.Value = percentage);
}
}

View File

@@ -1,4 +1,5 @@
using System.Windows.Forms;
using System;
using System.Windows.Forms;
using ApplicationServices;
namespace LibationWinForm
@@ -18,11 +19,12 @@ namespace LibationWinForm
{
try
{
(TotalBooksProcessed, NewBooksAdded) = await LibraryCommands.IndexLibraryAsync(new Login.WinformResponder());
(TotalBooksProcessed, NewBooksAdded) = await LibraryCommands.ImportLibraryAsync(new Login.WinformResponder());
}
catch
catch (Exception ex)
{
MessageBox.Show("Error importing library. Please try again. If this happens after 2 or 3 tries, contact administrator", "Error importing library", MessageBoxButtons.OK, MessageBoxIcon.Error);
var msg = "Error importing library. Please try again. If this still happens after 2 or 3 tries, stop and contact administrator";
MessageBox.Show(msg, "Error importing library", MessageBoxButtons.OK, MessageBoxIcon.Error);
}
this.Close();

View File

@@ -111,7 +111,7 @@ namespace LibationWinForm
setBookBackupCounts(books);
setPdfBackupCounts(books);
}
}
enum AudioFileState { full, aax, none }
private void setBookBackupCounts(IEnumerable<Book> books)
{

View File

@@ -1,5 +1,7 @@
using System;
using System.Windows.Forms;
using Dinah.Core.Logging;
using Serilog;
namespace LibationWinForm
{
@@ -14,6 +16,8 @@ namespace LibationWinForm
if (!createSettings())
return;
init();
Application.Run(new Form1());
}
@@ -29,7 +33,7 @@ Please fill in a few settings on the following page. You can also change these s
After you make your selections, get started by importing your library.
Go to Import > Scan Library
".Trim();
MessageBox.Show(welcomeText, "Welcom to Libation", MessageBoxButtons.OK);
MessageBox.Show(welcomeText, "Welcome to Libation", MessageBoxButtons.OK);
var dialogResult = new SettingsDialog().ShowDialog();
if (dialogResult != DialogResult.OK)
{
@@ -39,5 +43,32 @@ Go to Import > Scan Library
return true;
}
private static void init()
{
// default. for reference. output example:
// 2019-11-26 08:48:40.224 -05:00 [DBG] Begin Libation
var default_outputTemplate = "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] {Message:lj}{NewLine}{Exception}";
// with class and method info. output example:
// 2019-11-26 08:48:40.224 -05:00 [DBG] (at LibationWinForm.Program.init()) Begin Libation
var code_outputTemplate = "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception}";
var logPath = System.IO.Path.Combine(FileManager.Configuration.Instance.LibationFiles, "Log.log");
Log.Logger = new LoggerConfiguration()
.Enrich.WithCaller()
.MinimumLevel.Debug()
.WriteTo.File(logPath,
rollingInterval: RollingInterval.Month,
outputTemplate: code_outputTemplate)
.CreateLogger();
Log.Logger.Debug("Begin Libation");
// .Here() captures debug info via System.Runtime.CompilerServices attributes. Warning: expensive
//var withLineNumbers_outputTemplate = "[{Timestamp:HH:mm:ss} {Level}] {SourceContext}{NewLine}{Message}{NewLine}in method {MemberName} at {FilePath}:{LineNumber}{NewLine}{Exception}{NewLine}";
//Log.Logger.Here().Debug("Begin Libation. Debug with line numbers");
}
}
}

View File

@@ -1,6 +1,12 @@
-- begin VERSIONING ---------------------------------------------------------------------------------------------------------------------
https://github.com/rmcrackan/Libation/releases
v3.1-beta.8 : Bugfix: decrypt file conflict
v3.1-beta.7 : Bugfix: decrypt book with no author
v3.1-beta.6 : Improved logging
v3.1-beta.5 : Improved importing
v3.1-beta.4 : Added beta-specific logging
v3.1-beta.3 : fixed known performance issue: Full-screen grid is slow to respond loading when books aren't liberated
v3.1-beta.2 : fixed known performance issue: Tag add/edit
v3.1-beta.1 : RELEASE TO BETA
v3.0.3 : Switch to SQLite. No longer relies on LocalDB, which must be installed separately