diff --git a/Source/AaxDecrypter/AaxDecrypter.csproj b/Source/AaxDecrypter/AaxDecrypter.csproj
index fab0d390..bd043b8a 100644
--- a/Source/AaxDecrypter/AaxDecrypter.csproj
+++ b/Source/AaxDecrypter/AaxDecrypter.csproj
@@ -2,6 +2,7 @@
net10.0
+ enable
diff --git a/Source/AaxDecrypter/AaxcDownloadConvertBase.cs b/Source/AaxDecrypter/AaxcDownloadConvertBase.cs
index 5d5806f3..7915d8cf 100644
--- a/Source/AaxDecrypter/AaxcDownloadConvertBase.cs
+++ b/Source/AaxDecrypter/AaxcDownloadConvertBase.cs
@@ -5,148 +5,146 @@ using System.IO;
using System.Linq;
using System.Threading.Tasks;
-#nullable enable
-namespace AaxDecrypter
-{
- public abstract class AaxcDownloadConvertBase : AudiobookDownloadBase
+namespace AaxDecrypter;
+
+public abstract class AaxcDownloadConvertBase : AudiobookDownloadBase
+{
+ public event EventHandler? RetrievedMetadata;
+
+ public Mp4File? AaxFile { get; private set; }
+ protected Mp4Operation? AaxConversion { get; set; }
+
+ protected AaxcDownloadConvertBase(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
+ : base(outDirectory, cacheDirectory, dlOptions) { }
+
+ /// Setting cover art by this method will insert the art into the audiobook metadata
+ public override void SetCoverArt(byte[] coverArt)
{
- public event EventHandler? RetrievedMetadata;
-
- public Mp4File? AaxFile { get; private set; }
- protected Mp4Operation? AaxConversion { get; set; }
-
- protected AaxcDownloadConvertBase(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
- : base(outDirectory, cacheDirectory, dlOptions) { }
-
- /// Setting cover art by this method will insert the art into the audiobook metadata
- public override void SetCoverArt(byte[] coverArt)
- {
- base.SetCoverArt(coverArt);
- if (coverArt is not null && AaxFile?.MetadataItems is not null)
- AaxFile.MetadataItems.Cover = coverArt;
- }
-
- public override async Task CancelAsync()
- {
- await base.CancelAsync();
- await (AaxConversion?.CancelAsync() ?? Task.CompletedTask);
- }
-
- private Mp4File Open()
- {
- if (DownloadOptions.DecryptionKeys is not KeyData[] keys || keys.Length == 0)
- throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} cannot be null or empty for a '{DownloadOptions.InputType}' file.");
- else if (DownloadOptions.InputType is FileType.Dash)
- {
- //We may have multiple keys , so use the key whose key ID matches
- //the dash files default Key ID.
- var keyIds = keys.Select(k => new Guid(k.KeyPart1, bigEndian: true)).ToArray();
-
- var dash = new DashFile(InputFileStream);
- if (dash.Tenc is null)
- throw new InvalidOperationException("The DASH file does not contain 'tenc' box, indicating that it is unencrypted.");
-
- var kidIndex = Array.IndexOf(keyIds, dash.Tenc.DefaultKID);
- if (kidIndex == -1)
- throw new InvalidOperationException($"None of the {keyIds.Length} key IDs match the dash file's default KeyID of {dash.Tenc.DefaultKID}");
-
- keys[0] = keys[kidIndex];
- var keyId = keys[kidIndex].KeyPart1;
- var key = keys[kidIndex].KeyPart2 ?? throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} for '{DownloadOptions.InputType}' must have a non-null decryption key (KeyPart2).");
- dash.SetDecryptionKey(keyId, key);
- WriteKeyFile($"KeyId={Convert.ToHexString(keyId)}{Environment.NewLine}Key={Convert.ToHexString(key)}");
-
- //Remove meta box containing DRM info
- if (DownloadOptions.FixupFile && dash.Moov.GetChild() is { } meta)
- dash.Moov.Children.Remove(meta);
-
- return dash;
- }
- else if (DownloadOptions.InputType is FileType.Aax)
- {
- var aax = new AaxFile(InputFileStream);
- var key = keys[0].KeyPart1;
- aax.SetDecryptionKey(keys[0].KeyPart1);
- WriteKeyFile($"ActivationBytes={Convert.ToHexString(key)}");
- return aax;
- }
- else if (DownloadOptions.InputType is FileType.Aaxc)
- {
- var aax = new AaxFile(InputFileStream);
- var key = keys[0].KeyPart1;
- var iv = keys[0].KeyPart2 ?? throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} for '{DownloadOptions.InputType}' must have a non-null initialization vector (KeyPart2).");
- aax.SetDecryptionKey(keys[0].KeyPart1, iv);
- WriteKeyFile($"Key={Convert.ToHexString(key)}{Environment.NewLine}IV={Convert.ToHexString(iv)}");
- return aax;
- }
- else throw new InvalidOperationException($"{nameof(DownloadOptions.InputType)} of '{DownloadOptions.InputType}' is unknown.");
-
- void WriteKeyFile(string contents)
- {
- var keyFile = Path.Combine(Path.ChangeExtension(InputFileStream.SaveFilePath, ".key"));
- File.WriteAllText(keyFile, contents + Environment.NewLine);
- OnTempFileCreated(new(keyFile));
- }
- }
-
- protected bool Step_GetMetadata()
- {
- AaxFile = Open();
-
- RetrievedMetadata?.Invoke(this, AaxFile.MetadataItems);
-
- if (DownloadOptions.StripUnabridged)
- {
- AaxFile.MetadataItems.Title = AaxFile.MetadataItems.TitleSansUnabridged;
- AaxFile.MetadataItems.Album = AaxFile.MetadataItems.Album?.Replace(" (Unabridged)", "");
- }
-
- if (DownloadOptions.FixupFile)
- {
- if (!string.IsNullOrWhiteSpace(AaxFile.MetadataItems.Narrator))
- AaxFile.MetadataItems.AppleListBox.EditOrAddTag("©wrt", AaxFile.MetadataItems.Narrator);
-
- if (!string.IsNullOrWhiteSpace(AaxFile.MetadataItems.Copyright))
- AaxFile.MetadataItems.Copyright = AaxFile.MetadataItems.Copyright.Replace("(P)", "℗").Replace("©", "©");
-
- //Add audiobook shelf tags
- //https://github.com/advplyr/audiobookshelf/issues/1794#issuecomment-1565050213
- const string tagDomain = "com.pilabor.tone";
-
- AaxFile.MetadataItems.Title = DownloadOptions.Title;
-
- if (DownloadOptions.Subtitle is string subtitle)
- AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "SUBTITLE", subtitle);
-
- if (DownloadOptions.Publisher is string publisher)
- AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "PUBLISHER", publisher);
-
- if (DownloadOptions.Language is string language)
- AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "LANGUAGE", language);
-
- if (DownloadOptions.AudibleProductId is string asin)
- {
- AaxFile.MetadataItems.Asin = asin;
- AaxFile.MetadataItems.AppleListBox.EditOrAddTag("asin", asin);
- AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "AUDIBLE_ASIN", asin);
- }
-
- if (DownloadOptions.SeriesName is string series)
- AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "SERIES", series);
-
- if (DownloadOptions.SeriesNumber is string part)
- AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "PART", part);
- }
-
- OnRetrievedTitle(AaxFile.MetadataItems.TitleSansUnabridged);
- OnRetrievedAuthors(AaxFile.MetadataItems.FirstAuthor);
- OnRetrievedNarrators(AaxFile.MetadataItems.Narrator);
- OnRetrievedCoverArt(AaxFile.MetadataItems.Cover);
- OnInitialized();
-
- return !IsCanceled;
- }
-
- protected virtual void OnInitialized() { }
+ base.SetCoverArt(coverArt);
+ if (coverArt is not null && AaxFile?.MetadataItems is not null)
+ AaxFile.MetadataItems.Cover = coverArt;
}
+
+ public override async Task CancelAsync()
+ {
+ await base.CancelAsync();
+ await (AaxConversion?.CancelAsync() ?? Task.CompletedTask);
+ }
+
+ private Mp4File Open()
+ {
+ if (DownloadOptions.DecryptionKeys is not KeyData[] keys || keys.Length == 0)
+ throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} cannot be null or empty for a '{DownloadOptions.InputType}' file.");
+ else if (DownloadOptions.InputType is FileType.Dash)
+ {
+ //We may have multiple keys , so use the key whose key ID matches
+ //the dash files default Key ID.
+ var keyIds = keys.Select(k => new Guid(k.KeyPart1, bigEndian: true)).ToArray();
+
+ var dash = new DashFile(InputFileStream);
+ if (dash.Tenc is null)
+ throw new InvalidOperationException("The DASH file does not contain 'tenc' box, indicating that it is unencrypted.");
+
+ var kidIndex = Array.IndexOf(keyIds, dash.Tenc.DefaultKID);
+ if (kidIndex == -1)
+ throw new InvalidOperationException($"None of the {keyIds.Length} key IDs match the dash file's default KeyID of {dash.Tenc.DefaultKID}");
+
+ keys[0] = keys[kidIndex];
+ var keyId = keys[kidIndex].KeyPart1;
+ var key = keys[kidIndex].KeyPart2 ?? throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} for '{DownloadOptions.InputType}' must have a non-null decryption key (KeyPart2).");
+ dash.SetDecryptionKey(keyId, key);
+ WriteKeyFile($"KeyId={Convert.ToHexString(keyId)}{Environment.NewLine}Key={Convert.ToHexString(key)}");
+
+ //Remove meta box containing DRM info
+ if (DownloadOptions.FixupFile && dash.Moov.GetChild() is { } meta)
+ dash.Moov.Children.Remove(meta);
+
+ return dash;
+ }
+ else if (DownloadOptions.InputType is FileType.Aax)
+ {
+ var aax = new AaxFile(InputFileStream);
+ var key = keys[0].KeyPart1;
+ aax.SetDecryptionKey(keys[0].KeyPart1);
+ WriteKeyFile($"ActivationBytes={Convert.ToHexString(key)}");
+ return aax;
+ }
+ else if (DownloadOptions.InputType is FileType.Aaxc)
+ {
+ var aax = new AaxFile(InputFileStream);
+ var key = keys[0].KeyPart1;
+ var iv = keys[0].KeyPart2 ?? throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} for '{DownloadOptions.InputType}' must have a non-null initialization vector (KeyPart2).");
+ aax.SetDecryptionKey(keys[0].KeyPart1, iv);
+ WriteKeyFile($"Key={Convert.ToHexString(key)}{Environment.NewLine}IV={Convert.ToHexString(iv)}");
+ return aax;
+ }
+ else throw new InvalidOperationException($"{nameof(DownloadOptions.InputType)} of '{DownloadOptions.InputType}' is unknown.");
+
+ void WriteKeyFile(string contents)
+ {
+ var keyFile = Path.Combine(Path.ChangeExtension(InputFileStream.SaveFilePath, ".key"));
+ File.WriteAllText(keyFile, contents + Environment.NewLine);
+ OnTempFileCreated(new(keyFile));
+ }
+ }
+
+ protected bool Step_GetMetadata()
+ {
+ AaxFile = Open();
+
+ RetrievedMetadata?.Invoke(this, AaxFile.MetadataItems);
+
+ if (DownloadOptions.StripUnabridged)
+ {
+ AaxFile.MetadataItems.Title = AaxFile.MetadataItems.TitleSansUnabridged;
+ AaxFile.MetadataItems.Album = AaxFile.MetadataItems.Album?.Replace(" (Unabridged)", "");
+ }
+
+ if (DownloadOptions.FixupFile)
+ {
+ if (!string.IsNullOrWhiteSpace(AaxFile.MetadataItems.Narrator))
+ AaxFile.MetadataItems.AppleListBox.EditOrAddTag("©wrt", AaxFile.MetadataItems.Narrator);
+
+ if (!string.IsNullOrWhiteSpace(AaxFile.MetadataItems.Copyright))
+ AaxFile.MetadataItems.Copyright = AaxFile.MetadataItems.Copyright.Replace("(P)", "℗").Replace("©", "©");
+
+ //Add audiobook shelf tags
+ //https://github.com/advplyr/audiobookshelf/issues/1794#issuecomment-1565050213
+ const string tagDomain = "com.pilabor.tone";
+
+ AaxFile.MetadataItems.Title = DownloadOptions.Title;
+
+ if (DownloadOptions.Subtitle is string subtitle)
+ AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "SUBTITLE", subtitle);
+
+ if (DownloadOptions.Publisher is string publisher)
+ AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "PUBLISHER", publisher);
+
+ if (DownloadOptions.Language is string language)
+ AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "LANGUAGE", language);
+
+ if (DownloadOptions.AudibleProductId is string asin)
+ {
+ AaxFile.MetadataItems.Asin = asin;
+ AaxFile.MetadataItems.AppleListBox.EditOrAddTag("asin", asin);
+ AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "AUDIBLE_ASIN", asin);
+ }
+
+ if (DownloadOptions.SeriesName is string series)
+ AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "SERIES", series);
+
+ if (DownloadOptions.SeriesNumber is string part)
+ AaxFile.MetadataItems.AppleListBox.EditOrAddFreeformTag(tagDomain, "PART", part);
+ }
+
+ OnRetrievedTitle(AaxFile.MetadataItems.TitleSansUnabridged);
+ OnRetrievedAuthors(AaxFile.MetadataItems.FirstAuthor);
+ OnRetrievedNarrators(AaxFile.MetadataItems.Narrator);
+ OnRetrievedCoverArt(AaxFile.MetadataItems.Cover);
+ OnInitialized();
+
+ return !IsCanceled;
+ }
+
+ protected virtual void OnInitialized() { }
}
diff --git a/Source/AaxDecrypter/AaxcDownloadMultiConverter.cs b/Source/AaxDecrypter/AaxcDownloadMultiConverter.cs
index c6984031..8c9f317e 100644
--- a/Source/AaxDecrypter/AaxcDownloadMultiConverter.cs
+++ b/Source/AaxDecrypter/AaxcDownloadMultiConverter.cs
@@ -6,116 +6,114 @@ using System;
using System.IO;
using System.Threading.Tasks;
-#nullable enable
-namespace AaxDecrypter
+namespace AaxDecrypter;
+
+public class AaxcDownloadMultiConverter : AaxcDownloadConvertBase
{
- public class AaxcDownloadMultiConverter : AaxcDownloadConvertBase
+ private static readonly TimeSpan minChapterLength = TimeSpan.FromSeconds(3);
+ private FileStream? workingFileStream;
+
+ public AaxcDownloadMultiConverter(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
+ : base(outDirectory, cacheDirectory, dlOptions)
{
- private static readonly TimeSpan minChapterLength = TimeSpan.FromSeconds(3);
- private FileStream? workingFileStream;
+ AsyncSteps.Name = $"Download, Convert Aaxc To {DownloadOptions.OutputFormat}, and Split";
+ AsyncSteps["Step 1: Get Aaxc Metadata"] = () => Task.Run(Step_GetMetadata);
+ AsyncSteps["Step 2: Download Decrypted Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
+ }
- public AaxcDownloadMultiConverter(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
- : base(outDirectory, cacheDirectory, dlOptions)
+ protected override void OnInitialized()
+ {
+ //Finishing configuring lame encoder.
+ if (DownloadOptions.OutputFormat == OutputFormat.Mp3)
{
- AsyncSteps.Name = $"Download, Convert Aaxc To {DownloadOptions.OutputFormat}, and Split";
- AsyncSteps["Step 1: Get Aaxc Metadata"] = () => Task.Run(Step_GetMetadata);
- AsyncSteps["Step 2: Download Decrypted Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
- }
+ if (AaxFile is null)
+ throw new InvalidOperationException($"AaxFile is null during {nameof(OnInitialized)} in {nameof(AaxcDownloadConvertBase)}.");
+ if (DownloadOptions.LameConfig is null)
+ throw new InvalidOperationException($"LameConfig is null during {nameof(OnInitialized)} in {nameof(DownloadOptions)}.");
- protected override void OnInitialized()
- {
- //Finishing configuring lame encoder.
- if (DownloadOptions.OutputFormat == OutputFormat.Mp3)
- {
- if (AaxFile is null)
- throw new InvalidOperationException($"AaxFile is null during {nameof(OnInitialized)} in {nameof(AaxcDownloadConvertBase)}.");
- if (DownloadOptions.LameConfig is null)
- throw new InvalidOperationException($"LameConfig is null during {nameof(OnInitialized)} in {nameof(DownloadOptions)}.");
-
- MpegUtil.ConfigureLameOptions(
- AaxFile,
- DownloadOptions.LameConfig,
- DownloadOptions.Downsample,
- DownloadOptions.MatchSourceBitrate,
- chapters: null);
- }
- }
-
-
- protected async override Task Step_DownloadAndDecryptAudiobookAsync()
- {
- if (AaxFile is null) return false;
-
- try
- {
- await (AaxConversion = decryptMultiAsync(AaxFile, DownloadOptions.ChapterInfo));
-
- if (AaxConversion.IsCompletedSuccessfully)
- await moveMoovToBeginning(AaxFile, workingFileStream?.Name);
-
- return AaxConversion.IsCompletedSuccessfully;
- }
- finally
- {
- workingFileStream?.Dispose();
- FinalizeDownload();
- }
- }
-
- private Mp4Operation decryptMultiAsync(Mp4File aaxFile, ChapterInfo splitChapters)
- {
- var chapterCount = 0;
- return
- DownloadOptions.OutputFormat == OutputFormat.M4b
- ? aaxFile.ConvertToMultiMp4aAsync
- (
- splitChapters,
- newSplitCallback => newSplit(++chapterCount, splitChapters, newSplitCallback)
- )
- : aaxFile.ConvertToMultiMp3Async
- (
- splitChapters,
- newSplitCallback => newSplit(++chapterCount, splitChapters, newSplitCallback),
- DownloadOptions.LameConfig
- );
-
- void newSplit(int currentChapter, ChapterInfo splitChapters, INewSplitCallback newSplitCallback)
- {
- moveMoovToBeginning(aaxFile, workingFileStream?.Name).GetAwaiter().GetResult();
- var newTempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
- MultiConvertFileProperties props = new()
- {
- OutputFileName = newTempFile.FilePath,
- PartsPosition = currentChapter,
- PartsTotal = splitChapters.Count,
- Title = newSplitCallback.Chapter?.Title,
- };
-
- newSplitCallback.OutputFile = workingFileStream = createOutputFileStream(props);
- newSplitCallback.TrackTitle = DownloadOptions.GetMultipartTitle(props);
- newSplitCallback.TrackNumber = currentChapter;
- newSplitCallback.TrackCount = splitChapters.Count;
-
- OnTempFileCreated(newTempFile with { PartProperties = props });
- }
-
- FileStream createOutputFileStream(MultiConvertFileProperties multiConvertFileProperties)
- {
- FileUtility.SaferDelete(multiConvertFileProperties.OutputFileName);
- return File.Open(multiConvertFileProperties.OutputFileName, FileMode.OpenOrCreate, FileAccess.ReadWrite);
- }
- }
-
- private Mp4Operation moveMoovToBeginning(Mp4File aaxFile, string? filename)
- {
- if (DownloadOptions.OutputFormat is OutputFormat.M4b
- && DownloadOptions.MoveMoovToBeginning
- && filename is not null
- && File.Exists(filename))
- {
- return Mp4File.RelocateMoovAsync(filename);
- }
- else return Mp4Operation.FromCompleted(aaxFile);
+ MpegUtil.ConfigureLameOptions(
+ AaxFile,
+ DownloadOptions.LameConfig,
+ DownloadOptions.Downsample,
+ DownloadOptions.MatchSourceBitrate,
+ chapters: null);
}
}
+
+
+ protected async override Task Step_DownloadAndDecryptAudiobookAsync()
+ {
+ if (AaxFile is null) return false;
+
+ try
+ {
+ await (AaxConversion = decryptMultiAsync(AaxFile, DownloadOptions.ChapterInfo));
+
+ if (AaxConversion.IsCompletedSuccessfully)
+ await moveMoovToBeginning(AaxFile, workingFileStream?.Name);
+
+ return AaxConversion.IsCompletedSuccessfully;
+ }
+ finally
+ {
+ workingFileStream?.Dispose();
+ FinalizeDownload();
+ }
+ }
+
+ private Mp4Operation decryptMultiAsync(Mp4File aaxFile, ChapterInfo splitChapters)
+ {
+ var chapterCount = 0;
+ return
+ DownloadOptions.OutputFormat == OutputFormat.M4b
+ ? aaxFile.ConvertToMultiMp4aAsync
+ (
+ splitChapters,
+ newSplitCallback => newSplit(++chapterCount, splitChapters, newSplitCallback)
+ )
+ : aaxFile.ConvertToMultiMp3Async
+ (
+ splitChapters,
+ newSplitCallback => newSplit(++chapterCount, splitChapters, newSplitCallback),
+ DownloadOptions.LameConfig
+ );
+
+ void newSplit(int currentChapter, ChapterInfo splitChapters, INewSplitCallback newSplitCallback)
+ {
+ moveMoovToBeginning(aaxFile, workingFileStream?.Name).GetAwaiter().GetResult();
+ var newTempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
+ MultiConvertFileProperties props = new()
+ {
+ OutputFileName = newTempFile.FilePath,
+ PartsPosition = currentChapter,
+ PartsTotal = splitChapters.Count,
+ Title = newSplitCallback.Chapter?.Title,
+ };
+
+ newSplitCallback.OutputFile = workingFileStream = createOutputFileStream(props);
+ newSplitCallback.TrackTitle = DownloadOptions.GetMultipartTitle(props);
+ newSplitCallback.TrackNumber = currentChapter;
+ newSplitCallback.TrackCount = splitChapters.Count;
+
+ OnTempFileCreated(newTempFile with { PartProperties = props });
+ }
+
+ FileStream createOutputFileStream(MultiConvertFileProperties multiConvertFileProperties)
+ {
+ FileUtility.SaferDelete(multiConvertFileProperties.OutputFileName);
+ return File.Open(multiConvertFileProperties.OutputFileName, FileMode.OpenOrCreate, FileAccess.ReadWrite);
+ }
+ }
+
+ private Mp4Operation moveMoovToBeginning(Mp4File aaxFile, string? filename)
+ {
+ if (DownloadOptions.OutputFormat is OutputFormat.M4b
+ && DownloadOptions.MoveMoovToBeginning
+ && filename is not null
+ && File.Exists(filename))
+ {
+ return Mp4File.RelocateMoovAsync(filename);
+ }
+ else return Mp4Operation.FromCompleted(aaxFile);
+ }
}
diff --git a/Source/AaxDecrypter/AaxcDownloadSingleConverter.cs b/Source/AaxDecrypter/AaxcDownloadSingleConverter.cs
index 73c089f1..f626dfc1 100644
--- a/Source/AaxDecrypter/AaxcDownloadSingleConverter.cs
+++ b/Source/AaxDecrypter/AaxcDownloadSingleConverter.cs
@@ -6,110 +6,108 @@ using System;
using System.IO;
using System.Threading.Tasks;
-#nullable enable
-namespace AaxDecrypter
+namespace AaxDecrypter;
+
+public class AaxcDownloadSingleConverter : AaxcDownloadConvertBase
{
- public class AaxcDownloadSingleConverter : AaxcDownloadConvertBase
+ private readonly AverageSpeed averageSpeed = new();
+ private TempFile? outputTempFile;
+
+ public AaxcDownloadSingleConverter(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
+ : base(outDirectory, cacheDirectory, dlOptions)
{
- private readonly AverageSpeed averageSpeed = new();
- private TempFile? outputTempFile;
+ var step = 1;
- public AaxcDownloadSingleConverter(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
- : base(outDirectory, cacheDirectory, dlOptions)
+ AsyncSteps.Name = $"Download and Convert Aaxc To {DownloadOptions.OutputFormat}";
+ AsyncSteps[$"Step {step++}: Get Aaxc Metadata"] = () => Task.Run(Step_GetMetadata);
+ AsyncSteps[$"Step {step++}: Download Decrypted Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
+ if (DownloadOptions.MoveMoovToBeginning && DownloadOptions.OutputFormat is OutputFormat.M4b)
+ AsyncSteps[$"Step {step++}: Move moov atom to beginning"] = Step_MoveMoov;
+ AsyncSteps[$"Step {step++}: Create Cue"] = Step_CreateCueAsync;
+ }
+
+ protected override void OnInitialized()
+ {
+ //Finishing configuring lame encoder.
+ if (DownloadOptions.OutputFormat == OutputFormat.Mp3)
{
- var step = 1;
+ if (AaxFile is null)
+ throw new InvalidOperationException($"AaxFile is null during {nameof(OnInitialized)} in {nameof(AaxcDownloadConvertBase)}.");
+ if (DownloadOptions.LameConfig is null)
+ throw new InvalidOperationException($"LameConfig is null during {nameof(OnInitialized)} in {nameof(DownloadOptions)}.");
- AsyncSteps.Name = $"Download and Convert Aaxc To {DownloadOptions.OutputFormat}";
- AsyncSteps[$"Step {step++}: Get Aaxc Metadata"] = () => Task.Run(Step_GetMetadata);
- AsyncSteps[$"Step {step++}: Download Decrypted Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
- if (DownloadOptions.MoveMoovToBeginning && DownloadOptions.OutputFormat is OutputFormat.M4b)
- AsyncSteps[$"Step {step++}: Move moov atom to beginning"] = Step_MoveMoov;
- AsyncSteps[$"Step {step++}: Create Cue"] = Step_CreateCueAsync;
+ MpegUtil.ConfigureLameOptions(
+ AaxFile,
+ DownloadOptions.LameConfig,
+ DownloadOptions.Downsample,
+ DownloadOptions.MatchSourceBitrate,
+ DownloadOptions.ChapterInfo);
}
+ }
- protected override void OnInitialized()
+ protected async override Task Step_DownloadAndDecryptAudiobookAsync()
+ {
+ if (AaxFile is null) return false;
+ outputTempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
+ FileUtility.SaferDelete(outputTempFile.FilePath);
+
+ using var outputFile = File.Open(outputTempFile.FilePath, FileMode.OpenOrCreate, FileAccess.ReadWrite);
+ OnTempFileCreated(outputTempFile);
+
+ try
{
- //Finishing configuring lame encoder.
- if (DownloadOptions.OutputFormat == OutputFormat.Mp3)
- {
- if (AaxFile is null)
- throw new InvalidOperationException($"AaxFile is null during {nameof(OnInitialized)} in {nameof(AaxcDownloadConvertBase)}.");
- if (DownloadOptions.LameConfig is null)
- throw new InvalidOperationException($"LameConfig is null during {nameof(OnInitialized)} in {nameof(DownloadOptions)}.");
+ await (AaxConversion = decryptAsync(AaxFile, outputFile));
- MpegUtil.ConfigureLameOptions(
- AaxFile,
- DownloadOptions.LameConfig,
- DownloadOptions.Downsample,
- DownloadOptions.MatchSourceBitrate,
- DownloadOptions.ChapterInfo);
- }
- }
-
- protected async override Task Step_DownloadAndDecryptAudiobookAsync()
- {
- if (AaxFile is null) return false;
- outputTempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
- FileUtility.SaferDelete(outputTempFile.FilePath);
-
- using var outputFile = File.Open(outputTempFile.FilePath, FileMode.OpenOrCreate, FileAccess.ReadWrite);
- OnTempFileCreated(outputTempFile);
-
- try
- {
- await (AaxConversion = decryptAsync(AaxFile, outputFile));
-
- return AaxConversion.IsCompletedSuccessfully;
- }
- finally
- {
- FinalizeDownload();
- }
- }
-
- private async Task Step_MoveMoov()
- {
- if (outputTempFile is null) return false;
- AaxConversion = Mp4File.RelocateMoovAsync(outputTempFile.FilePath);
- AaxConversion.ConversionProgressUpdate += AaxConversion_MoovProgressUpdate;
- await AaxConversion;
- AaxConversion.ConversionProgressUpdate -= AaxConversion_MoovProgressUpdate;
return AaxConversion.IsCompletedSuccessfully;
}
-
- private void AaxConversion_MoovProgressUpdate(object? sender, ConversionProgressEventArgs e)
+ finally
{
- averageSpeed.AddPosition(e.ProcessPosition.TotalSeconds);
-
- var remainingTimeToProcess = (e.EndTime - e.ProcessPosition).TotalSeconds;
- var estTimeRemaining = remainingTimeToProcess / averageSpeed.Average;
-
- if (double.IsNormal(estTimeRemaining))
- OnDecryptTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
-
- OnDecryptProgressUpdate(
- new DownloadProgress
- {
- ProgressPercentage = 100 * e.FractionCompleted,
- BytesReceived = (long)(InputFileStream.Length * e.FractionCompleted),
- TotalBytesToReceive = InputFileStream.Length
- });
+ FinalizeDownload();
}
-
- private Mp4Operation decryptAsync(Mp4File aaxFile, Stream outputFile)
- => DownloadOptions.OutputFormat == OutputFormat.Mp3
- ? aaxFile.ConvertToMp3Async
- (
- outputFile,
- DownloadOptions.LameConfig,
- DownloadOptions.ChapterInfo
- )
- : DownloadOptions.FixupFile
- ? aaxFile.ConvertToMp4aAsync
- (
- outputFile,
- DownloadOptions.ChapterInfo
- )
- : aaxFile.ConvertToMp4aAsync(outputFile);
}
+
+ private async Task Step_MoveMoov()
+ {
+ if (outputTempFile is null) return false;
+ AaxConversion = Mp4File.RelocateMoovAsync(outputTempFile.FilePath);
+ AaxConversion.ConversionProgressUpdate += AaxConversion_MoovProgressUpdate;
+ await AaxConversion;
+ AaxConversion.ConversionProgressUpdate -= AaxConversion_MoovProgressUpdate;
+ return AaxConversion.IsCompletedSuccessfully;
+ }
+
+ private void AaxConversion_MoovProgressUpdate(object? sender, ConversionProgressEventArgs e)
+ {
+ averageSpeed.AddPosition(e.ProcessPosition.TotalSeconds);
+
+ var remainingTimeToProcess = (e.EndTime - e.ProcessPosition).TotalSeconds;
+ var estTimeRemaining = remainingTimeToProcess / averageSpeed.Average;
+
+ if (double.IsNormal(estTimeRemaining))
+ OnDecryptTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
+
+ OnDecryptProgressUpdate(
+ new DownloadProgress
+ {
+ ProgressPercentage = 100 * e.FractionCompleted,
+ BytesReceived = (long)(InputFileStream.Length * e.FractionCompleted),
+ TotalBytesToReceive = InputFileStream.Length
+ });
+ }
+
+ private Mp4Operation decryptAsync(Mp4File aaxFile, Stream outputFile)
+ => DownloadOptions.OutputFormat == OutputFormat.Mp3
+ ? aaxFile.ConvertToMp3Async
+ (
+ outputFile,
+ DownloadOptions.LameConfig,
+ DownloadOptions.ChapterInfo
+ )
+ : DownloadOptions.FixupFile
+ ? aaxFile.ConvertToMp4aAsync
+ (
+ outputFile,
+ DownloadOptions.ChapterInfo
+ )
+ : aaxFile.ConvertToMp4aAsync(outputFile);
}
diff --git a/Source/AaxDecrypter/AudiobookDownloadBase.cs b/Source/AaxDecrypter/AudiobookDownloadBase.cs
index d807c901..feccf926 100644
--- a/Source/AaxDecrypter/AudiobookDownloadBase.cs
+++ b/Source/AaxDecrypter/AudiobookDownloadBase.cs
@@ -6,223 +6,221 @@ using System;
using System.IO;
using System.Threading.Tasks;
-#nullable enable
-namespace AaxDecrypter
+namespace AaxDecrypter;
+
+public enum OutputFormat { M4b, Mp3 }
+
+public abstract class AudiobookDownloadBase
{
- public enum OutputFormat { M4b, Mp3 }
+ public event EventHandler? RetrievedTitle;
+ public event EventHandler? RetrievedAuthors;
+ public event EventHandler? RetrievedNarrators;
+ public event EventHandler? RetrievedCoverArt;
+ public event EventHandler? DecryptProgressUpdate;
+ public event EventHandler? DecryptTimeRemaining;
+ public event EventHandler? TempFileCreated;
- public abstract class AudiobookDownloadBase
+ public bool IsCanceled { get; protected set; }
+ protected AsyncStepSequence AsyncSteps { get; } = new();
+ protected string OutputDirectory { get; }
+ public IDownloadOptions DownloadOptions { get; }
+ protected NetworkFileStream InputFileStream => NfsPersister.NetworkFileStream;
+ protected virtual long InputFilePosition
{
- public event EventHandler? RetrievedTitle;
- public event EventHandler? RetrievedAuthors;
- public event EventHandler? RetrievedNarrators;
- public event EventHandler? RetrievedCoverArt;
- public event EventHandler? DecryptProgressUpdate;
- public event EventHandler? DecryptTimeRemaining;
- public event EventHandler? TempFileCreated;
-
- public bool IsCanceled { get; protected set; }
- protected AsyncStepSequence AsyncSteps { get; } = new();
- protected string OutputDirectory { get; }
- public IDownloadOptions DownloadOptions { get; }
- protected NetworkFileStream InputFileStream => NfsPersister.NetworkFileStream;
- protected virtual long InputFilePosition
+ get
{
- get
- {
- //Use try/catch instread of checking CanRead to avoid
- //a race with the background download completing
- //between the check and the Position call.
- try { return InputFileStream.Position; }
- catch { return InputFileStream.Length; }
- }
+ //Use try/catch instread of checking CanRead to avoid
+ //a race with the background download completing
+ //between the check and the Position call.
+ try { return InputFileStream.Position; }
+ catch { return InputFileStream.Length; }
}
- private bool downloadFinished;
+ }
+ private bool downloadFinished;
- private NetworkFileStreamPersister? m_nfsPersister;
- private NetworkFileStreamPersister NfsPersister => m_nfsPersister ??= OpenNetworkFileStream();
- private readonly DownloadProgress zeroProgress;
- private readonly string jsonDownloadState;
- private readonly string tempFilePath;
+ private NetworkFileStreamPersister? m_nfsPersister;
+ private NetworkFileStreamPersister NfsPersister => m_nfsPersister ??= OpenNetworkFileStream();
+ private readonly DownloadProgress zeroProgress;
+ private readonly string jsonDownloadState;
+ private readonly string tempFilePath;
- protected AudiobookDownloadBase(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
- {
- OutputDirectory = ArgumentValidator.EnsureNotNullOrWhiteSpace(outDirectory, nameof(outDirectory));
- DownloadOptions = ArgumentValidator.EnsureNotNull(dlOptions, nameof(dlOptions));
- DownloadOptions.DownloadSpeedChanged += (_, speed) => InputFileStream.SpeedLimit = speed;
+ protected AudiobookDownloadBase(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
+ {
+ OutputDirectory = ArgumentValidator.EnsureNotNullOrWhiteSpace(outDirectory, nameof(outDirectory));
+ DownloadOptions = ArgumentValidator.EnsureNotNull(dlOptions, nameof(dlOptions));
+ DownloadOptions.DownloadSpeedChanged += (_, speed) => InputFileStream.SpeedLimit = speed;
- if (!Directory.Exists(OutputDirectory))
- Directory.CreateDirectory(OutputDirectory);
+ if (!Directory.Exists(OutputDirectory))
+ Directory.CreateDirectory(OutputDirectory);
- if (!Directory.Exists(cacheDirectory))
- Directory.CreateDirectory(cacheDirectory);
+ if (!Directory.Exists(cacheDirectory))
+ Directory.CreateDirectory(cacheDirectory);
- jsonDownloadState = Path.Combine(cacheDirectory, $"{DownloadOptions.AudibleProductId}.json");
- tempFilePath = Path.ChangeExtension(jsonDownloadState, ".aaxc");
+ jsonDownloadState = Path.Combine(cacheDirectory, $"{DownloadOptions.AudibleProductId}.json");
+ tempFilePath = Path.ChangeExtension(jsonDownloadState, ".aaxc");
- zeroProgress = new DownloadProgress
+ zeroProgress = new DownloadProgress
+ {
+ BytesReceived = 0,
+ ProgressPercentage = 0,
+ TotalBytesToReceive = 0
+ };
+
+ OnDecryptProgressUpdate(zeroProgress);
+ }
+
+ protected TempFile GetNewTempFilePath(string extension)
+ {
+ extension = FileUtility.GetStandardizedExtension(extension);
+ var path = Path.Combine(OutputDirectory, Guid.NewGuid().ToString("N") + extension);
+ return new(path, extension);
+ }
+
+ public async Task RunAsync()
+ {
+ await InputFileStream.BeginDownloadingAsync();
+ var progressTask = Task.Run(reportProgress);
+
+ (bool success, var elapsed) = await AsyncSteps.RunAsync();
+
+ //Stop the downloader so it doesn't keep running in the background.
+ if (!success)
+ NfsPersister.Dispose();
+
+ await progressTask;
+
+ var speedup = DownloadOptions.RuntimeLength / elapsed;
+ Serilog.Log.Information($"Speedup is {speedup:F0}x realtime.");
+
+ NfsPersister.Dispose();
+ return success;
+
+ async Task reportProgress()
+ {
+ AverageSpeed averageSpeed = new();
+
+ while (
+ InputFileStream.CanRead
+ && InputFileStream.Length > InputFilePosition
+ && !InputFileStream.IsCancelled
+ && !downloadFinished)
{
- BytesReceived = 0,
- ProgressPercentage = 0,
- TotalBytesToReceive = 0
- };
+ averageSpeed.AddPosition(InputFilePosition);
+ var estSecsRemaining = (InputFileStream.Length - InputFilePosition) / averageSpeed.Average;
+
+ if (double.IsNormal(estSecsRemaining))
+ OnDecryptTimeRemaining(TimeSpan.FromSeconds(estSecsRemaining));
+
+ var progressPercent = 100d * InputFilePosition / InputFileStream.Length;
+
+ OnDecryptProgressUpdate(
+ new DownloadProgress
+ {
+ ProgressPercentage = progressPercent,
+ BytesReceived = InputFilePosition,
+ TotalBytesToReceive = InputFileStream.Length
+ });
+
+ await Task.Delay(200);
+ }
+
+ OnDecryptTimeRemaining(TimeSpan.Zero);
OnDecryptProgressUpdate(zeroProgress);
}
+ }
- protected TempFile GetNewTempFilePath(string extension)
+ public virtual Task CancelAsync()
+ {
+ IsCanceled = true;
+ FinalizeDownload();
+ return Task.CompletedTask;
+ }
+ protected abstract Task Step_DownloadAndDecryptAudiobookAsync();
+
+ public virtual void SetCoverArt(byte[] coverArt) { }
+ protected void OnRetrievedTitle(string? title)
+ => RetrievedTitle?.Invoke(this, title);
+ protected void OnRetrievedAuthors(string? authors)
+ => RetrievedAuthors?.Invoke(this, authors);
+ protected void OnRetrievedNarrators(string? narrators)
+ => RetrievedNarrators?.Invoke(this, narrators);
+ protected void OnRetrievedCoverArt(byte[]? coverArt)
+ => RetrievedCoverArt?.Invoke(this, coverArt);
+ protected void OnDecryptProgressUpdate(DownloadProgress downloadProgress)
+ => DecryptProgressUpdate?.Invoke(this, downloadProgress);
+ protected void OnDecryptTimeRemaining(TimeSpan timeRemaining)
+ => DecryptTimeRemaining?.Invoke(this, timeRemaining);
+ public void OnTempFileCreated(TempFile path)
+ => TempFileCreated?.Invoke(this, path);
+
+ protected virtual void FinalizeDownload()
+ {
+ NfsPersister.Dispose();
+ downloadFinished = true;
+ }
+
+ protected async Task Step_CreateCueAsync()
+ {
+ if (!DownloadOptions.CreateCueSheet) return !IsCanceled;
+
+ if (DownloadOptions.ChapterInfo.Count <= 1)
{
- extension = FileUtility.GetStandardizedExtension(extension);
- var path = Path.Combine(OutputDirectory, Guid.NewGuid().ToString("N") + extension);
- return new(path, extension);
- }
-
- public async Task RunAsync()
- {
- await InputFileStream.BeginDownloadingAsync();
- var progressTask = Task.Run(reportProgress);
-
- (bool success, var elapsed) = await AsyncSteps.RunAsync();
-
- //Stop the downloader so it doesn't keep running in the background.
- if (!success)
- NfsPersister.Dispose();
-
- await progressTask;
-
- var speedup = DownloadOptions.RuntimeLength / elapsed;
- Serilog.Log.Information($"Speedup is {speedup:F0}x realtime.");
-
- NfsPersister.Dispose();
- return success;
-
- async Task reportProgress()
- {
- AverageSpeed averageSpeed = new();
-
- while (
- InputFileStream.CanRead
- && InputFileStream.Length > InputFilePosition
- && !InputFileStream.IsCancelled
- && !downloadFinished)
- {
- averageSpeed.AddPosition(InputFilePosition);
-
- var estSecsRemaining = (InputFileStream.Length - InputFilePosition) / averageSpeed.Average;
-
- if (double.IsNormal(estSecsRemaining))
- OnDecryptTimeRemaining(TimeSpan.FromSeconds(estSecsRemaining));
-
- var progressPercent = 100d * InputFilePosition / InputFileStream.Length;
-
- OnDecryptProgressUpdate(
- new DownloadProgress
- {
- ProgressPercentage = progressPercent,
- BytesReceived = InputFilePosition,
- TotalBytesToReceive = InputFileStream.Length
- });
-
- await Task.Delay(200);
- }
-
- OnDecryptTimeRemaining(TimeSpan.Zero);
- OnDecryptProgressUpdate(zeroProgress);
- }
- }
-
- public virtual Task CancelAsync()
- {
- IsCanceled = true;
- FinalizeDownload();
- return Task.CompletedTask;
- }
- protected abstract Task Step_DownloadAndDecryptAudiobookAsync();
-
- public virtual void SetCoverArt(byte[] coverArt) { }
- protected void OnRetrievedTitle(string? title)
- => RetrievedTitle?.Invoke(this, title);
- protected void OnRetrievedAuthors(string? authors)
- => RetrievedAuthors?.Invoke(this, authors);
- protected void OnRetrievedNarrators(string? narrators)
- => RetrievedNarrators?.Invoke(this, narrators);
- protected void OnRetrievedCoverArt(byte[]? coverArt)
- => RetrievedCoverArt?.Invoke(this, coverArt);
- protected void OnDecryptProgressUpdate(DownloadProgress downloadProgress)
- => DecryptProgressUpdate?.Invoke(this, downloadProgress);
- protected void OnDecryptTimeRemaining(TimeSpan timeRemaining)
- => DecryptTimeRemaining?.Invoke(this, timeRemaining);
- public void OnTempFileCreated(TempFile path)
- => TempFileCreated?.Invoke(this, path);
-
- protected virtual void FinalizeDownload()
- {
- NfsPersister.Dispose();
- downloadFinished = true;
- }
-
- protected async Task Step_CreateCueAsync()
- {
- if (!DownloadOptions.CreateCueSheet) return !IsCanceled;
-
- if (DownloadOptions.ChapterInfo.Count <= 1)
- {
- Serilog.Log.Logger.Information($"Skipped creating .cue because book has no chapters.");
- return !IsCanceled;
- }
-
- // not a critical step. its failure should not prevent future steps from running
- try
- {
- var tempFile = GetNewTempFilePath(".cue");
- await File.WriteAllTextAsync(tempFile.FilePath, Cue.CreateContents(Path.GetFileName(tempFile.FilePath), DownloadOptions.ChapterInfo));
- OnTempFileCreated(tempFile);
- }
- catch (Exception ex)
- {
- Serilog.Log.Logger.Error(ex, $"{nameof(Step_CreateCueAsync)} Failed");
- }
+ Serilog.Log.Logger.Information($"Skipped creating .cue because book has no chapters.");
return !IsCanceled;
}
- private NetworkFileStreamPersister OpenNetworkFileStream()
+ // not a critical step. its failure should not prevent future steps from running
+ try
{
- NetworkFileStreamPersister? nfsp = default;
- try
- {
- if (!File.Exists(jsonDownloadState))
- return nfsp = newNetworkFilePersister();
+ var tempFile = GetNewTempFilePath(".cue");
+ await File.WriteAllTextAsync(tempFile.FilePath, Cue.CreateContents(Path.GetFileName(tempFile.FilePath), DownloadOptions.ChapterInfo));
+ OnTempFileCreated(tempFile);
+ }
+ catch (Exception ex)
+ {
+ Serilog.Log.Logger.Error(ex, $"{nameof(Step_CreateCueAsync)} Failed");
+ }
+ return !IsCanceled;
+ }
- nfsp = new NetworkFileStreamPersister(jsonDownloadState);
- // The download url expires after 1 hour.
- // The new url points to the same file.
- nfsp.NetworkFileStream.SetUriForSameFile(new Uri(DownloadOptions.DownloadUrl));
- return nfsp;
- }
- catch
- {
- nfsp?.Target?.Dispose();
- FileUtility.SaferDelete(jsonDownloadState);
- FileUtility.SaferDelete(tempFilePath);
+ private NetworkFileStreamPersister OpenNetworkFileStream()
+ {
+ NetworkFileStreamPersister? nfsp = default;
+ try
+ {
+ if (!File.Exists(jsonDownloadState))
return nfsp = newNetworkFilePersister();
- }
- finally
- {
- //nfsp will only be null when an unhandled exception occurs. Let the caller handle it.
- if (nfsp is not null)
- {
- nfsp.NetworkFileStream.RequestHeaders["User-Agent"] = DownloadOptions.UserAgent;
- nfsp.NetworkFileStream.SpeedLimit = DownloadOptions.DownloadSpeedBps;
- OnTempFileCreated(new(tempFilePath, DownloadOptions.InputType.ToString()));
- OnTempFileCreated(new(jsonDownloadState));
- }
- }
- NetworkFileStreamPersister newNetworkFilePersister()
+ nfsp = new NetworkFileStreamPersister(jsonDownloadState);
+ // The download url expires after 1 hour.
+ // The new url points to the same file.
+ nfsp.NetworkFileStream.SetUriForSameFile(new Uri(DownloadOptions.DownloadUrl));
+ return nfsp;
+ }
+ catch
+ {
+ nfsp?.Target?.Dispose();
+ FileUtility.SaferDelete(jsonDownloadState);
+ FileUtility.SaferDelete(tempFilePath);
+ return nfsp = newNetworkFilePersister();
+ }
+ finally
+ {
+ //nfsp will only be null when an unhandled exception occurs. Let the caller handle it.
+ if (nfsp is not null)
{
- var networkFileStream = new NetworkFileStream(tempFilePath, new Uri(DownloadOptions.DownloadUrl), 0, new() { { "User-Agent", DownloadOptions.UserAgent } });
- return new NetworkFileStreamPersister(networkFileStream, jsonDownloadState);
+ nfsp.NetworkFileStream.RequestHeaders["User-Agent"] = DownloadOptions.UserAgent;
+ nfsp.NetworkFileStream.SpeedLimit = DownloadOptions.DownloadSpeedBps;
+ OnTempFileCreated(new(tempFilePath, DownloadOptions.InputType.ToString()));
+ OnTempFileCreated(new(jsonDownloadState));
}
}
+
+ NetworkFileStreamPersister newNetworkFilePersister()
+ {
+ var networkFileStream = new NetworkFileStream(tempFilePath, new Uri(DownloadOptions.DownloadUrl), 0, new() { { "User-Agent", DownloadOptions.UserAgent } });
+ return new NetworkFileStreamPersister(networkFileStream, jsonDownloadState);
+ }
}
}
diff --git a/Source/AaxDecrypter/AverageSpeed.cs b/Source/AaxDecrypter/AverageSpeed.cs
index df12cb69..b0016e7b 100644
--- a/Source/AaxDecrypter/AverageSpeed.cs
+++ b/Source/AaxDecrypter/AverageSpeed.cs
@@ -125,7 +125,7 @@ public class AverageSpeed
var time = now - start;
- while (speeds.Count > MAX_SPEEDS || (speeds.Count > 2 && time - speeds.First.Value.Time > SlowWindow))
+ while (speeds.Count > MAX_SPEEDS || (speeds.Count > 2 && time - speeds.First!.Value.Time > SlowWindow))
speeds.RemoveFirst();
if (!double.IsNaN(lastPosition))
@@ -145,7 +145,7 @@ public class AverageSpeed
if (speeds.Count == 0)
return 0;
else if (speeds.Count == 1)
- return speeds.Last.Value.Velocity;
+ return speeds.Last!.Value.Velocity;
else
{
var n_newest = speeds.Count(s => s.Time > lastTime.Subtract(FastWindow));
diff --git a/Source/AaxDecrypter/Cue.cs b/Source/AaxDecrypter/Cue.cs
index 9d55ae1e..7cac3e02 100644
--- a/Source/AaxDecrypter/Cue.cs
+++ b/Source/AaxDecrypter/Cue.cs
@@ -3,58 +3,57 @@ using Mpeg4Lib;
using System.IO;
using System.Text;
-namespace AaxDecrypter
+namespace AaxDecrypter;
+
+public static class Cue
{
- public static class Cue
- {
- public static string CreateContents(string filePath, ChapterInfo chapters)
- {
- var stringBuilder = new StringBuilder();
+ public static string CreateContents(string filePath, ChapterInfo chapters)
+ {
+ var stringBuilder = new StringBuilder();
- stringBuilder.AppendLine(GetFileLine(filePath, "MP3"));
+ stringBuilder.AppendLine(GetFileLine(filePath, "MP3"));
- var startOffset = chapters.StartOffset;
+ var startOffset = chapters.StartOffset;
- var trackCount = 1;
- foreach (var c in chapters.Chapters)
- {
- var startTime = c.StartOffset - startOffset;
+ var trackCount = 1;
+ foreach (var c in chapters.Chapters)
+ {
+ var startTime = c.StartOffset - startOffset;
- stringBuilder.AppendLine($"TRACK {trackCount++} AUDIO");
- stringBuilder.AppendLine($" TITLE \"{c.Title}\"");
- stringBuilder.AppendLine($" INDEX 01 {(int)startTime.TotalMinutes}:{startTime:ss}:{(int)(startTime.Milliseconds * 75d / 1000):D2}");
- }
+ stringBuilder.AppendLine($"TRACK {trackCount++} AUDIO");
+ stringBuilder.AppendLine($" TITLE \"{c.Title}\"");
+ stringBuilder.AppendLine($" INDEX 01 {(int)startTime.TotalMinutes}:{startTime:ss}:{(int)(startTime.Milliseconds * 75d / 1000):D2}");
+ }
- return stringBuilder.ToString();
- }
+ return stringBuilder.ToString();
+ }
- public static void UpdateFileName(FileInfo cueFileInfo, string audioFilePath)
- => UpdateFileName(cueFileInfo.FullName, audioFilePath);
+ public static void UpdateFileName(FileInfo cueFileInfo, string audioFilePath)
+ => UpdateFileName(cueFileInfo.FullName, audioFilePath);
- public static void UpdateFileName(string cueFilePath, FileInfo audioFileInfo)
- => UpdateFileName(cueFilePath, audioFileInfo.FullName);
+ public static void UpdateFileName(string cueFilePath, FileInfo audioFileInfo)
+ => UpdateFileName(cueFilePath, audioFileInfo.FullName);
- public static void UpdateFileName(FileInfo cueFileInfo, FileInfo audioFileInfo)
- => UpdateFileName(cueFileInfo.FullName, audioFileInfo.FullName);
+ public static void UpdateFileName(FileInfo cueFileInfo, FileInfo audioFileInfo)
+ => UpdateFileName(cueFileInfo.FullName, audioFileInfo.FullName);
- public static void UpdateFileName(string cueFilePath, string audioFilePath)
- {
- var cueContents = File.ReadAllLines(cueFilePath);
+ public static void UpdateFileName(string cueFilePath, string audioFilePath)
+ {
+ var cueContents = File.ReadAllLines(cueFilePath);
- for (var i = 0; i < cueContents.Length; i++)
- {
- var line = cueContents[i];
- if (!line.Trim().StartsWith("FILE") || !line.Contains(' '))
- continue;
+ for (var i = 0; i < cueContents.Length; i++)
+ {
+ var line = cueContents[i];
+ if (!line.Trim().StartsWith("FILE") || !line.Contains(' '))
+ continue;
- var fileTypeBegins = line.LastIndexOf(" ") + 1;
- cueContents[i] = GetFileLine(audioFilePath, line[fileTypeBegins..]);
- break;
- }
+ var fileTypeBegins = line.LastIndexOf(" ") + 1;
+ cueContents[i] = GetFileLine(audioFilePath, line[fileTypeBegins..]);
+ break;
+ }
- File.WriteAllLines(cueFilePath, cueContents);
- }
+ File.WriteAllLines(cueFilePath, cueContents);
+ }
- private static string GetFileLine(string filePath, string audioType) => $"FILE {Path.GetFileName(filePath).SurroundWithQuotes()} {audioType}";
- }
+ private static string GetFileLine(string filePath, string audioType) => $"FILE {Path.GetFileName(filePath).SurroundWithQuotes()} {audioType}";
}
diff --git a/Source/AaxDecrypter/IDownloadOptions.cs b/Source/AaxDecrypter/IDownloadOptions.cs
index 7146c9b6..50790f32 100644
--- a/Source/AaxDecrypter/IDownloadOptions.cs
+++ b/Source/AaxDecrypter/IDownloadOptions.cs
@@ -2,55 +2,53 @@
using Mpeg4Lib;
using System;
-#nullable enable
-namespace AaxDecrypter
+namespace AaxDecrypter;
+
+public class KeyData
{
- public class KeyData
- {
- public byte[] KeyPart1 { get; }
- public byte[]? KeyPart2 { get; }
+ public byte[] KeyPart1 { get; }
+ public byte[]? KeyPart2 { get; }
- public KeyData(byte[] keyPart1, byte[]? keyPart2 = null)
- {
- KeyPart1 = keyPart1;
- KeyPart2 = keyPart2;
- }
+ public KeyData(byte[] keyPart1, byte[]? keyPart2 = null)
+ {
+ KeyPart1 = keyPart1;
+ KeyPart2 = keyPart2;
+ }
- [Newtonsoft.Json.JsonConstructor]
- public KeyData(string keyPart1, string? keyPart2 = null)
- {
- ArgumentNullException.ThrowIfNull(keyPart1, nameof(keyPart1));
- KeyPart1 = Convert.FromHexString(keyPart1);
- if (keyPart2 != null)
- KeyPart2 = Convert.FromHexString(keyPart2);
- }
- }
-
- public interface IDownloadOptions
- {
- event EventHandler DownloadSpeedChanged;
- string DownloadUrl { get; }
- string UserAgent { get; }
- KeyData[]? DecryptionKeys { get; }
- TimeSpan RuntimeLength { get; }
- OutputFormat OutputFormat { get; }
- bool StripUnabridged { get; }
- bool CreateCueSheet { get; }
- long DownloadSpeedBps { get; }
- ChapterInfo ChapterInfo { get; }
- bool FixupFile { get; }
- string? AudibleProductId { get; }
- string? Title { get; }
- string? Subtitle { get; }
- string? Publisher { get; }
- string? Language { get; }
- string? SeriesName { get; }
- string? SeriesNumber { get; }
- NAudio.Lame.LameConfig? LameConfig { get; }
- bool Downsample { get; }
- bool MatchSourceBitrate { get; }
- bool MoveMoovToBeginning { get; }
- string GetMultipartTitle(MultiConvertFileProperties props);
- public FileType? InputType { get; }
- }
+ [Newtonsoft.Json.JsonConstructor]
+ public KeyData(string keyPart1, string? keyPart2 = null)
+ {
+ ArgumentNullException.ThrowIfNull(keyPart1, nameof(keyPart1));
+ KeyPart1 = Convert.FromHexString(keyPart1);
+ if (keyPart2 != null)
+ KeyPart2 = Convert.FromHexString(keyPart2);
+ }
+}
+
+public interface IDownloadOptions
+{
+ event EventHandler DownloadSpeedChanged;
+ string DownloadUrl { get; }
+ string UserAgent { get; }
+ KeyData[]? DecryptionKeys { get; }
+ TimeSpan RuntimeLength { get; }
+ OutputFormat OutputFormat { get; }
+ bool StripUnabridged { get; }
+ bool CreateCueSheet { get; }
+ long DownloadSpeedBps { get; }
+ ChapterInfo ChapterInfo { get; }
+ bool FixupFile { get; }
+ string? AudibleProductId { get; }
+ string? Title { get; }
+ string? Subtitle { get; }
+ string? Publisher { get; }
+ string? Language { get; }
+ string? SeriesName { get; }
+ string? SeriesNumber { get; }
+ NAudio.Lame.LameConfig? LameConfig { get; }
+ bool Downsample { get; }
+ bool MatchSourceBitrate { get; }
+ bool MoveMoovToBeginning { get; }
+ string GetMultipartTitle(MultiConvertFileProperties props);
+ public FileType? InputType { get; }
}
diff --git a/Source/AaxDecrypter/MpegUtil.cs b/Source/AaxDecrypter/MpegUtil.cs
index 18af6d33..ae3f3a7f 100644
--- a/Source/AaxDecrypter/MpegUtil.cs
+++ b/Source/AaxDecrypter/MpegUtil.cs
@@ -4,70 +4,68 @@ using NAudio.Lame;
using System;
using System.Linq;
-#nullable enable
-namespace AaxDecrypter
+namespace AaxDecrypter;
+
+public static class MpegUtil
{
- public static class MpegUtil
+ private const string TagDomain = "com.pilabor.tone";
+ public static void ConfigureLameOptions(
+ Mpeg4File mp4File,
+ LameConfig lameConfig,
+ bool downsample,
+ bool matchSourceBitrate,
+ ChapterInfo? chapters)
{
- private const string TagDomain = "com.pilabor.tone";
- public static void ConfigureLameOptions(
- Mpeg4File mp4File,
- LameConfig lameConfig,
- bool downsample,
- bool matchSourceBitrate,
- ChapterInfo? chapters)
+ double bitrateMultiple = 1;
+
+ if (mp4File.TimeScale < lameConfig.OutputSampleRate)
{
- double bitrateMultiple = 1;
+ lameConfig.OutputSampleRate = mp4File.TimeScale;
+ }
+ else if (mp4File.TimeScale > lameConfig.OutputSampleRate)
+ {
+ bitrateMultiple *= (double)lameConfig.OutputSampleRate / mp4File.TimeScale;
+ }
- if (mp4File.TimeScale < lameConfig.OutputSampleRate)
- {
- lameConfig.OutputSampleRate = mp4File.TimeScale;
- }
- else if (mp4File.TimeScale > lameConfig.OutputSampleRate)
- {
- bitrateMultiple *= (double)lameConfig.OutputSampleRate / mp4File.TimeScale;
- }
+ if (mp4File.AudioChannels == 2)
+ {
+ if (downsample)
+ bitrateMultiple /= 2;
+ else
+ lameConfig.Mode = MPEGMode.Stereo;
+ }
- if (mp4File.AudioChannels == 2)
- {
- if (downsample)
- bitrateMultiple /= 2;
- else
- lameConfig.Mode = MPEGMode.Stereo;
- }
+ if (matchSourceBitrate)
+ {
+ int kbps = (int)Math.Round(mp4File.AverageBitrate * bitrateMultiple / 1024);
- if (matchSourceBitrate)
- {
- int kbps = (int)Math.Round(mp4File.AverageBitrate * bitrateMultiple / 1024);
+ if (lameConfig.VBR is null)
+ lameConfig.BitRate = kbps;
+ else if (lameConfig.VBR == VBRMode.ABR)
+ lameConfig.ABRRateKbps = kbps;
+ }
- if (lameConfig.VBR is null)
- lameConfig.BitRate = kbps;
- else if (lameConfig.VBR == VBRMode.ABR)
- lameConfig.ABRRateKbps = kbps;
- }
+ //Setup metadata tags
+ lameConfig.ID3 = mp4File.MetadataItems.ToIDTags();
- //Setup metadata tags
- lameConfig.ID3 = mp4File.MetadataItems.ToIDTags();
+ if (mp4File.MetadataItems.AppleListBox.GetFreeformTagString(TagDomain, "SUBTITLE") is string subtitle)
+ lameConfig.ID3.Subtitle = subtitle;
- if (mp4File.MetadataItems.AppleListBox.GetFreeformTagString(TagDomain, "SUBTITLE") is string subtitle)
- lameConfig.ID3.Subtitle = subtitle;
+ if (chapters?.Count > 0)
+ {
+ var cue = Cue.CreateContents(lameConfig.ID3.Title + ".mp3", chapters);
+ lameConfig.ID3.UserDefinedText.Add("CUESHEET", cue);
+ }
- if (chapters?.Count > 0)
- {
- var cue = Cue.CreateContents(lameConfig.ID3.Title + ".mp3", chapters);
- lameConfig.ID3.UserDefinedText.Add("CUESHEET", cue);
- }
-
- //Copy over all other freeform tags
- foreach (var t in mp4File.MetadataItems.AppleListBox.Tags.OfType())
- {
- if (t.Name?.Name is string name &&
- t.Mean?.ReverseDnsDomain is string domain &&
- !lameConfig.ID3.UserDefinedText.ContainsKey(name) &&
- mp4File.MetadataItems.AppleListBox.GetFreeformTagString(domain, name) is string tagStr &&
- !string.IsNullOrWhiteSpace(tagStr))
- lameConfig.ID3.UserDefinedText.Add(name, tagStr);
- }
+ //Copy over all other freeform tags
+ foreach (var t in mp4File.MetadataItems.AppleListBox.Tags.OfType())
+ {
+ if (t.Name?.Name is string name &&
+ t.Mean?.ReverseDnsDomain is string domain &&
+ !lameConfig.ID3.UserDefinedText.ContainsKey(name) &&
+ mp4File.MetadataItems.AppleListBox.GetFreeformTagString(domain, name) is string tagStr &&
+ !string.IsNullOrWhiteSpace(tagStr))
+ lameConfig.ID3.UserDefinedText.Add(name, tagStr);
}
}
}
diff --git a/Source/AaxDecrypter/MultiConvertFileProperties.cs b/Source/AaxDecrypter/MultiConvertFileProperties.cs
index bfb9d9f7..e4bab59b 100644
--- a/Source/AaxDecrypter/MultiConvertFileProperties.cs
+++ b/Source/AaxDecrypter/MultiConvertFileProperties.cs
@@ -4,10 +4,10 @@ namespace AaxDecrypter
{
public class MultiConvertFileProperties
{
- public string OutputFileName { get; set; }
+ public required string OutputFileName { get; set; }
public int PartsPosition { get; set; }
public int PartsTotal { get; set; }
- public string Title { get; set; }
+ public string? Title { get; set; }
public DateTime FileDate { get; } = DateTime.Now;
}
}
diff --git a/Source/AaxDecrypter/NetworkFileStream.cs b/Source/AaxDecrypter/NetworkFileStream.cs
index 725018ca..a63e8b42 100644
--- a/Source/AaxDecrypter/NetworkFileStream.cs
+++ b/Source/AaxDecrypter/NetworkFileStream.cs
@@ -8,420 +8,419 @@ using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
-namespace AaxDecrypter
+namespace AaxDecrypter;
+
+/// A resumable, simultaneous file downloader and reader.
+public class NetworkFileStream : Stream, IUpdatable
{
- /// A resumable, simultaneous file downloader and reader.
- public class NetworkFileStream : Stream, IUpdatable
+ public event EventHandler? Updated;
+
+ #region Public Properties
+
+ /// Location to save the downloaded data.
+ [JsonProperty(Required = Required.Always)]
+ public string SaveFilePath { get; }
+
+ /// Http(s) address of the file to download.
+ [JsonProperty(Required = Required.Always)]
+ public Uri Uri { get; private set; }
+
+ /// Http headers to be sent to the server with the request.
+ [JsonProperty(Required = Required.Always)]
+ public Dictionary RequestHeaders { get; private set; }
+
+ /// The position in that has been written and flushed to disk.
+ [JsonProperty(Required = Required.Always)]
+ public long WritePosition { get; private set; }
+
+ /// The total length of the file to download.
+ [JsonProperty(Required = Required.Always)]
+ public long ContentLength { get; private set; }
+
+ [JsonIgnore]
+ public bool IsCancelled => _cancellationSource.IsCancellationRequested;
+
+ [JsonIgnore]
+ public Task? DownloadTask { get; private set; }
+
+ private long _speedLimit = 0;
+ /// bytes per second
+ public long SpeedLimit { get => _speedLimit; set => _speedLimit = value <= 0 ? 0 : Math.Max(value, MIN_BYTES_PER_SECOND); }
+
+ #endregion
+
+ #region Private Properties
+ private FileStream _writeFile { get; }
+ private FileStream _readFile { get; }
+ private CancellationTokenSource _cancellationSource { get; } = new();
+ private EventWaitHandle? _downloadedPiece { get; set; }
+
+ private DateTime NextUpdateTime { get; set; }
+
+ #endregion
+
+ #region Constants
+
+ //Download memory buffer size
+ private const int DOWNLOAD_BUFF_SZ = 8 * 1024;
+
+ //NetworkFileStream will flush all data in _writeFile to disk after every
+ //DATA_FLUSH_SZ bytes are written to the file stream.
+ private const int DATA_FLUSH_SZ = 1024 * 1024;
+
+ //Number of times per second the download rate is checked and throttled
+ private const int THROTTLE_FREQUENCY = 8;
+
+ //Minimum throttle rate. The minimum amount of data that can be throttled
+ //on each iteration of the download loop is DOWNLOAD_BUFF_SZ.
+ public const int MIN_BYTES_PER_SECOND = DOWNLOAD_BUFF_SZ * THROTTLE_FREQUENCY;
+
+ #endregion
+
+ #region Constructor
+
+ /// A resumable, simultaneous file downloader and reader.
+ /// Path to a location on disk to save the downloaded data from
+ /// Http(s) address of the file to download.
+ /// The position in to begin downloading.
+ /// Http headers to be sent to the server with the .
+ public NetworkFileStream(string saveFilePath, Uri uri, long writePosition = 0, Dictionary? requestHeaders = null)
{
- public event EventHandler Updated;
+ SaveFilePath = ArgumentValidator.EnsureNotNullOrWhiteSpace(saveFilePath, nameof(saveFilePath));
+ Uri = ArgumentValidator.EnsureNotNull(uri, nameof(uri));
+ WritePosition = ArgumentValidator.EnsureGreaterThan(writePosition, nameof(writePosition), -1);
- #region Public Properties
+ if (!Directory.Exists(Path.GetDirectoryName(saveFilePath)))
+ throw new ArgumentException($"Specified {nameof(saveFilePath)} directory \"{Path.GetDirectoryName(saveFilePath)}\" does not exist.");
- /// Location to save the downloaded data.
- [JsonProperty(Required = Required.Always)]
- public string SaveFilePath { get; }
+ RequestHeaders = requestHeaders ?? new();
- /// Http(s) address of the file to download.
- [JsonProperty(Required = Required.Always)]
- public Uri Uri { get; private set; }
-
- /// Http headers to be sent to the server with the request.
- [JsonProperty(Required = Required.Always)]
- public Dictionary RequestHeaders { get; private set; }
-
- /// The position in that has been written and flushed to disk.
- [JsonProperty(Required = Required.Always)]
- public long WritePosition { get; private set; }
-
- /// The total length of the file to download.
- [JsonProperty(Required = Required.Always)]
- public long ContentLength { get; private set; }
-
- [JsonIgnore]
- public bool IsCancelled => _cancellationSource.IsCancellationRequested;
-
- [JsonIgnore]
- public Task DownloadTask { get; private set; }
-
- private long _speedLimit = 0;
- /// bytes per second
- public long SpeedLimit { get => _speedLimit; set => _speedLimit = value <= 0 ? 0 : Math.Max(value, MIN_BYTES_PER_SECOND); }
-
- #endregion
-
- #region Private Properties
- private FileStream _writeFile { get; }
- private FileStream _readFile { get; }
- private CancellationTokenSource _cancellationSource { get; } = new();
- private EventWaitHandle _downloadedPiece { get; set; }
-
- private DateTime NextUpdateTime { get; set; }
-
- #endregion
-
- #region Constants
-
- //Download memory buffer size
- private const int DOWNLOAD_BUFF_SZ = 8 * 1024;
-
- //NetworkFileStream will flush all data in _writeFile to disk after every
- //DATA_FLUSH_SZ bytes are written to the file stream.
- private const int DATA_FLUSH_SZ = 1024 * 1024;
-
- //Number of times per second the download rate is checked and throttled
- private const int THROTTLE_FREQUENCY = 8;
-
- //Minimum throttle rate. The minimum amount of data that can be throttled
- //on each iteration of the download loop is DOWNLOAD_BUFF_SZ.
- public const int MIN_BYTES_PER_SECOND = DOWNLOAD_BUFF_SZ * THROTTLE_FREQUENCY;
-
- #endregion
-
- #region Constructor
-
- /// A resumable, simultaneous file downloader and reader.
- /// Path to a location on disk to save the downloaded data from
- /// Http(s) address of the file to download.
- /// The position in to begin downloading.
- /// Http headers to be sent to the server with the .
- public NetworkFileStream(string saveFilePath, Uri uri, long writePosition = 0, Dictionary requestHeaders = null)
+ _writeFile = new FileStream(SaveFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite)
{
- SaveFilePath = ArgumentValidator.EnsureNotNullOrWhiteSpace(saveFilePath, nameof(saveFilePath));
- Uri = ArgumentValidator.EnsureNotNull(uri, nameof(uri));
- WritePosition = ArgumentValidator.EnsureGreaterThan(writePosition, nameof(writePosition), -1);
+ Position = WritePosition
+ };
- if (!Directory.Exists(Path.GetDirectoryName(saveFilePath)))
- throw new ArgumentException($"Specified {nameof(saveFilePath)} directory \"{Path.GetDirectoryName(saveFilePath)}\" does not exist.");
-
- RequestHeaders = requestHeaders ?? new();
-
- _writeFile = new FileStream(SaveFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite)
- {
- Position = WritePosition
- };
-
- if (_writeFile.Length < WritePosition)
- {
- _writeFile.Dispose();
- throw new InvalidDataException($"{SaveFilePath} file length is shorter than {WritePosition}");
- }
-
- _readFile = new FileStream(SaveFilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
-
- SetUriForSameFile(uri);
+ if (_writeFile.Length < WritePosition)
+ {
+ _writeFile.Dispose();
+ throw new InvalidDataException($"{SaveFilePath} file length is shorter than {WritePosition}");
}
- #endregion
+ _readFile = new FileStream(SaveFilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
- #region Downloader
+ SetUriForSameFile(uri);
+ }
- /// Update the .
- private void OnUpdate(bool waitForWrite = false)
+ #endregion
+
+ #region Downloader
+
+ /// Update the .
+ private void OnUpdate(bool waitForWrite = false)
+ {
+ try
{
- try
+ if (waitForWrite || DateTime.UtcNow > NextUpdateTime)
{
- if (waitForWrite || DateTime.UtcNow > NextUpdateTime)
+ Updated?.Invoke(this, EventArgs.Empty);
+ //JsonFilePersister Will not allow update intervals shorter than 100 milliseconds
+ //If an update is called less than 100 ms since the last update, persister will
+ //sleep the thread until 100 ms has elapsed.
+ NextUpdateTime = DateTime.UtcNow.AddMilliseconds(110);
+ }
+ }
+ catch (Exception ex)
+ {
+ Serilog.Log.Error(ex, "An error was encountered while saving the download progress to JSON");
+ }
+ }
+
+ /// Set a different to the same file targeted by this instance of
+ /// New host must match existing host.
+ public void SetUriForSameFile(Uri uriToSameFile)
+ {
+ ArgumentValidator.EnsureNotNullOrWhiteSpace(uriToSameFile?.AbsoluteUri, nameof(uriToSameFile));
+
+ if (Path.GetFileName(uriToSameFile.LocalPath) != Path.GetFileName(Uri.LocalPath))
+ throw new ArgumentException($"New uri to the same file must have the same file name.");
+ if (uriToSameFile.Host != Uri.Host)
+ throw new ArgumentException($"New uri to the same file must have the same host.\r\n Old Host :{Uri.Host}\r\nNew Host: {uriToSameFile.Host}");
+ if (DownloadTask is not null)
+ throw new InvalidOperationException("Cannot change Uri after download has started.");
+
+ Uri = uriToSameFile;
+ }
+
+ /// Begins downloading to in a background thread.
+ /// The downloader
+ public async Task BeginDownloadingAsync()
+ {
+ if (ContentLength != 0 && WritePosition == ContentLength)
+ {
+ DownloadTask = Task.CompletedTask;
+ return;
+ }
+
+ if (ContentLength != 0 && WritePosition > ContentLength)
+ throw new WebException($"Specified write position (0x{WritePosition:X10}) is larger than {nameof(ContentLength)} (0x{ContentLength:X10}).");
+
+ //Initiate connection with the first request block and
+ //get the total content length before returning.
+ var client = new HttpClient();
+ var response = await RequestNextByteRangeAsync(client);
+
+ if (ContentLength != 0 && ContentLength != response.FileSize)
+ throw new WebException($"Content length of 0x{response.FileSize:X10} differs from partially downloaded content length of 0x{ContentLength:X10}");
+
+ ContentLength = response.FileSize;
+
+ _downloadedPiece = new EventWaitHandle(false, EventResetMode.AutoReset);
+ //Hand off the client and the open request to the downloader to download and write data to file.
+ DownloadTask = Task.Run(() => DownloadLoopInternal(client , response), _cancellationSource.Token);
+ }
+
+ private async Task DownloadLoopInternal(HttpClient client, BlockResponse blockResponse)
+ {
+ try
+ {
+ long startPosition = WritePosition;
+
+ while (WritePosition < ContentLength && !IsCancelled)
+ {
+ try
{
- Updated?.Invoke(this, EventArgs.Empty);
- //JsonFilePersister Will not allow update intervals shorter than 100 milliseconds
- //If an update is called less than 100 ms since the last update, persister will
- //sleep the thread until 100 ms has elapsed.
- NextUpdateTime = DateTime.UtcNow.AddMilliseconds(110);
+ await DownloadToFile(blockResponse);
+ }
+ catch (HttpIOException e)
+ when (e.HttpRequestError is HttpRequestError.ResponseEnded
+ && WritePosition != startPosition
+ && WritePosition < ContentLength && !IsCancelled)
+ {
+ Serilog.Log.Logger.Debug($"The download connection ended before the file completed downloading all 0x{ContentLength:X10} bytes");
+
+ //the download made *some* progress since the last attempt.
+ //Try again to complete the download from where it left off.
+ //Make sure to rewind file to last flush position.
+ _writeFile.Position = startPosition = WritePosition;
+ blockResponse.Dispose();
+ blockResponse = await RequestNextByteRangeAsync(client);
+
+ Serilog.Log.Logger.Debug($"Resuming the file download starting at position 0x{WritePosition:X10}.");
}
}
- catch (Exception ex)
- {
- Serilog.Log.Error(ex, "An error was encountered while saving the download progress to JSON");
- }
}
-
- /// Set a different to the same file targeted by this instance of
- /// New host must match existing host.
- public void SetUriForSameFile(Uri uriToSameFile)
+ catch (Exception ex)
{
- ArgumentValidator.EnsureNotNullOrWhiteSpace(uriToSameFile?.AbsoluteUri, nameof(uriToSameFile));
-
- if (Path.GetFileName(uriToSameFile.LocalPath) != Path.GetFileName(Uri.LocalPath))
- throw new ArgumentException($"New uri to the same file must have the same file name.");
- if (uriToSameFile.Host != Uri.Host)
- throw new ArgumentException($"New uri to the same file must have the same host.\r\n Old Host :{Uri.Host}\r\nNew Host: {uriToSameFile.Host}");
- if (DownloadTask is not null)
- throw new InvalidOperationException("Cannot change Uri after download has started.");
-
- Uri = uriToSameFile;
+ //Don't throw from DownloadTask.
+ //This task gets awaited in Dispose() and we don't want to have an unhandled exception there.
+ Serilog.Log.Error(ex, "An error was encountered during the download process.");
}
-
- /// Begins downloading to in a background thread.
- /// The downloader
- public async Task BeginDownloadingAsync()
+ finally
{
- if (ContentLength != 0 && WritePosition == ContentLength)
- {
- DownloadTask = Task.CompletedTask;
- return;
- }
-
- if (ContentLength != 0 && WritePosition > ContentLength)
- throw new WebException($"Specified write position (0x{WritePosition:X10}) is larger than {nameof(ContentLength)} (0x{ContentLength:X10}).");
-
- //Initiate connection with the first request block and
- //get the total content length before returning.
- var client = new HttpClient();
- var response = await RequestNextByteRangeAsync(client);
-
- if (ContentLength != 0 && ContentLength != response.FileSize)
- throw new WebException($"Content length of 0x{response.FileSize:X10} differs from partially downloaded content length of 0x{ContentLength:X10}");
-
- ContentLength = response.FileSize;
-
- _downloadedPiece = new EventWaitHandle(false, EventResetMode.AutoReset);
- //Hand off the client and the open request to the downloader to download and write data to file.
- DownloadTask = Task.Run(() => DownloadLoopInternal(client , response), _cancellationSource.Token);
+ _writeFile.Dispose();
+ blockResponse.Dispose();
+ client.Dispose();
}
+ }
- private async Task DownloadLoopInternal(HttpClient client, BlockResponse blockResponse)
+ private async Task RequestNextByteRangeAsync(HttpClient client)
+ {
+ using var request = new HttpRequestMessage(HttpMethod.Get, Uri);
+
+ //Just in case it snuck in the saved json (Issue #1232)
+ RequestHeaders.Remove("Range");
+
+ foreach (var header in RequestHeaders)
+ request.Headers.Add(header.Key, header.Value);
+
+ request.Headers.Add("Range", $"bytes={WritePosition}-");
+
+ var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, _cancellationSource.Token);
+
+ if (response.StatusCode != HttpStatusCode.PartialContent)
+ throw new WebException($"Server at {Uri.Host} responded with unexpected status code: {response.StatusCode}.");
+
+ var totalSize = response.Content.Headers.ContentRange?.Length ??
+ throw new WebException("The response did not contain a total content length.");
+
+ var rangeSize = response.Content.Headers.ContentLength ??
+ throw new WebException($"The response did not contain a {nameof(response.Content.Headers.ContentLength)};");
+
+ return new BlockResponse(response, rangeSize, totalSize);
+ }
+
+ private readonly record struct BlockResponse(HttpResponseMessage Response, long BlockSize, long FileSize) : IDisposable
+ {
+ public void Dispose() => Response?.Dispose();
+ }
+
+ /// Download to .
+ private async Task DownloadToFile(BlockResponse block)
+ {
+ var endPosition = WritePosition + block.BlockSize;
+ using var networkStream = await block.Response.Content.ReadAsStreamAsync(_cancellationSource.Token);
+
+ var downloadPosition = WritePosition;
+ var nextFlush = downloadPosition + DATA_FLUSH_SZ;
+ var buff = new byte[DOWNLOAD_BUFF_SZ];
+
+ try
{
- try
+ DateTime startTime = DateTime.UtcNow;
+ long bytesReadSinceThrottle = 0;
+ int bytesRead;
+ do
{
- long startPosition = WritePosition;
+ bytesRead = await networkStream.ReadAsync(buff, _cancellationSource.Token);
+ await _writeFile.WriteAsync(buff, 0, bytesRead, _cancellationSource.Token);
- while (WritePosition < ContentLength && !IsCancelled)
+ downloadPosition += bytesRead;
+
+ if (downloadPosition > nextFlush)
{
- try
- {
- await DownloadToFile(blockResponse);
- }
- catch (HttpIOException e)
- when (e.HttpRequestError is HttpRequestError.ResponseEnded
- && WritePosition != startPosition
- && WritePosition < ContentLength && !IsCancelled)
- {
- Serilog.Log.Logger.Debug($"The download connection ended before the file completed downloading all 0x{ContentLength:X10} bytes");
-
- //the download made *some* progress since the last attempt.
- //Try again to complete the download from where it left off.
- //Make sure to rewind file to last flush position.
- _writeFile.Position = startPosition = WritePosition;
- blockResponse.Dispose();
- blockResponse = await RequestNextByteRangeAsync(client);
-
- Serilog.Log.Logger.Debug($"Resuming the file download starting at position 0x{WritePosition:X10}.");
- }
+ await _writeFile.FlushAsync(_cancellationSource.Token);
+ WritePosition = downloadPosition;
+ OnUpdate();
+ nextFlush = downloadPosition + DATA_FLUSH_SZ;
+ _downloadedPiece?.Set();
}
- }
- catch (Exception ex)
- {
- //Don't throw from DownloadTask.
- //This task gets awaited in Dispose() and we don't want to have an unhandled exception there.
- Serilog.Log.Error(ex, "An error was encountered during the download process.");
- }
- finally
- {
- _writeFile.Dispose();
- blockResponse.Dispose();
- client.Dispose();
- }
- }
- private async Task RequestNextByteRangeAsync(HttpClient client)
- {
- using var request = new HttpRequestMessage(HttpMethod.Get, Uri);
+ #region throttle
- //Just in case it snuck in the saved json (Issue #1232)
- RequestHeaders.Remove("Range");
+ bytesReadSinceThrottle += bytesRead;
- foreach (var header in RequestHeaders)
- request.Headers.Add(header.Key, header.Value);
-
- request.Headers.Add("Range", $"bytes={WritePosition}-");
-
- var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, _cancellationSource.Token);
-
- if (response.StatusCode != HttpStatusCode.PartialContent)
- throw new WebException($"Server at {Uri.Host} responded with unexpected status code: {response.StatusCode}.");
-
- var totalSize = response.Content.Headers.ContentRange?.Length ??
- throw new WebException("The response did not contain a total content length.");
-
- var rangeSize = response.Content.Headers.ContentLength ??
- throw new WebException($"The response did not contain a {nameof(response.Content.Headers.ContentLength)};");
-
- return new BlockResponse(response, rangeSize, totalSize);
- }
-
- private readonly record struct BlockResponse(HttpResponseMessage Response, long BlockSize, long FileSize) : IDisposable
- {
- public void Dispose() => Response?.Dispose();
- }
-
- /// Download to .
- private async Task DownloadToFile(BlockResponse block)
- {
- var endPosition = WritePosition + block.BlockSize;
- using var networkStream = await block.Response.Content.ReadAsStreamAsync(_cancellationSource.Token);
-
- var downloadPosition = WritePosition;
- var nextFlush = downloadPosition + DATA_FLUSH_SZ;
- var buff = new byte[DOWNLOAD_BUFF_SZ];
-
- try
- {
- DateTime startTime = DateTime.UtcNow;
- long bytesReadSinceThrottle = 0;
- int bytesRead;
- do
+ if (SpeedLimit >= MIN_BYTES_PER_SECOND && bytesReadSinceThrottle > SpeedLimit / THROTTLE_FREQUENCY)
{
- bytesRead = await networkStream.ReadAsync(buff, _cancellationSource.Token);
- await _writeFile.WriteAsync(buff, 0, bytesRead, _cancellationSource.Token);
+ var delayMS = (int)(startTime.AddSeconds(1d / THROTTLE_FREQUENCY) - DateTime.UtcNow).TotalMilliseconds;
+ if (delayMS > 0)
+ await Task.Delay(delayMS, _cancellationSource.Token);
- downloadPosition += bytesRead;
+ startTime = DateTime.UtcNow;
+ bytesReadSinceThrottle = 0;
+ }
- if (downloadPosition > nextFlush)
- {
- await _writeFile.FlushAsync(_cancellationSource.Token);
- WritePosition = downloadPosition;
- OnUpdate();
- nextFlush = downloadPosition + DATA_FLUSH_SZ;
- _downloadedPiece.Set();
- }
+ #endregion
- #region throttle
+ } while (downloadPosition < endPosition && !IsCancelled && bytesRead > 0);
- bytesReadSinceThrottle += bytesRead;
+ await _writeFile.FlushAsync(_cancellationSource.Token);
+ WritePosition = downloadPosition;
- if (SpeedLimit >= MIN_BYTES_PER_SECOND && bytesReadSinceThrottle > SpeedLimit / THROTTLE_FREQUENCY)
- {
- var delayMS = (int)(startTime.AddSeconds(1d / THROTTLE_FREQUENCY) - DateTime.UtcNow).TotalMilliseconds;
- if (delayMS > 0)
- await Task.Delay(delayMS, _cancellationSource.Token);
+ if (!IsCancelled && WritePosition < endPosition)
+ throw new WebException($"Downloaded size (0x{WritePosition:X10}) is less than {nameof(ContentLength)} (0x{ContentLength:X10}).");
- startTime = DateTime.UtcNow;
- bytesReadSinceThrottle = 0;
- }
-
- #endregion
-
- } while (downloadPosition < endPosition && !IsCancelled && bytesRead > 0);
-
- await _writeFile.FlushAsync(_cancellationSource.Token);
- WritePosition = downloadPosition;
-
- if (!IsCancelled && WritePosition < endPosition)
- throw new WebException($"Downloaded size (0x{WritePosition:X10}) is less than {nameof(ContentLength)} (0x{ContentLength:X10}).");
-
- if (WritePosition > endPosition)
- throw new WebException($"Downloaded size (0x{WritePosition:X10}) is greater than {nameof(ContentLength)} (0x{ContentLength:X10}).");
- }
- catch (OperationCanceledException)
- {
- Serilog.Log.Information("Download was cancelled");
- }
- finally
- {
- _downloadedPiece.Set();
- OnUpdate(waitForWrite: true);
- }
+ if (WritePosition > endPosition)
+ throw new WebException($"Downloaded size (0x{WritePosition:X10}) is greater than {nameof(ContentLength)} (0x{ContentLength:X10}).");
}
-
- #endregion
-
- #region Download Stream Reader
-
- [JsonIgnore]
- public override bool CanRead => _readFile.CanRead;
-
- [JsonIgnore]
- public override bool CanSeek => _readFile.CanSeek;
-
- [JsonIgnore]
- public override bool CanWrite => false;
-
- [JsonIgnore]
- public override long Length
+ catch (OperationCanceledException)
{
- get
- {
- if (DownloadTask is null)
- throw new InvalidOperationException($"Background downloader must first be started by calling {nameof(BeginDownloadingAsync)}");
- return ContentLength;
- }
+ Serilog.Log.Information("Download was cancelled");
}
+ finally
+ {
+ _downloadedPiece?.Set();
+ OnUpdate(waitForWrite: true);
+ }
+ }
- [JsonIgnore]
- public override long Position { get => _readFile.Position; set => Seek(value, SeekOrigin.Begin); }
+ #endregion
- [JsonIgnore]
- public override bool CanTimeout => false;
+ #region Download Stream Reader
- [JsonIgnore]
- public override int ReadTimeout { get => base.ReadTimeout; set => base.ReadTimeout = value; }
+ [JsonIgnore]
+ public override bool CanRead => _readFile.CanRead;
- [JsonIgnore]
- public override int WriteTimeout { get => base.WriteTimeout; set => base.WriteTimeout = value; }
+ [JsonIgnore]
+ public override bool CanSeek => _readFile.CanSeek;
- public override void Flush() => throw new InvalidOperationException();
- public override void SetLength(long value) => throw new InvalidOperationException();
- public override void Write(byte[] buffer, int offset, int count) => throw new InvalidOperationException();
+ [JsonIgnore]
+ public override bool CanWrite => false;
- public override int Read(byte[] buffer, int offset, int count)
+ [JsonIgnore]
+ public override long Length
+ {
+ get
{
if (DownloadTask is null)
throw new InvalidOperationException($"Background downloader must first be started by calling {nameof(BeginDownloadingAsync)}");
-
- var toRead = Math.Min(count, Length - Position);
- WaitToPosition(Position + toRead);
- return IsCancelled ? 0 : _readFile.Read(buffer, offset, count);
+ return ContentLength;
}
-
- public override long Seek(long offset, SeekOrigin origin)
- {
- var newPosition = origin switch
- {
- SeekOrigin.Current => Position + offset,
- SeekOrigin.End => ContentLength + offset,
- _ => offset,
- };
-
- WaitToPosition(newPosition);
- return _readFile.Position = newPosition;
- }
-
- /// Blocks until the file has downloaded to at least , then returns.
- /// The minimum required flushed data length in .
- private void WaitToPosition(long requiredPosition)
- {
- while (WritePosition < requiredPosition
- && DownloadTask?.IsCompleted is false
- && !IsCancelled)
- {
- _downloadedPiece.WaitOne(50);
- }
- }
-
- private bool disposed = false;
-
- /*
- * https://learn.microsoft.com/en-us/dotnet/api/system.io.stream.dispose?view=net-7.0
- *
- * In derived classes, do not override the Close() method, instead, put all of the
- * Stream cleanup logic in the Dispose(Boolean) method.
- */
- protected override void Dispose(bool disposing)
- {
- if (disposing && !Interlocked.CompareExchange(ref disposed, true, false))
- {
- _cancellationSource.Cancel();
- DownloadTask?.GetAwaiter().GetResult();
- _downloadedPiece?.Dispose();
- _cancellationSource?.Dispose();
- _readFile.Dispose();
- _writeFile.Dispose();
- OnUpdate(waitForWrite: true);
- }
-
- base.Dispose(disposing);
- }
-
- #endregion
}
+
+ [JsonIgnore]
+ public override long Position { get => _readFile.Position; set => Seek(value, SeekOrigin.Begin); }
+
+ [JsonIgnore]
+ public override bool CanTimeout => false;
+
+ [JsonIgnore]
+ public override int ReadTimeout { get => base.ReadTimeout; set => base.ReadTimeout = value; }
+
+ [JsonIgnore]
+ public override int WriteTimeout { get => base.WriteTimeout; set => base.WriteTimeout = value; }
+
+ public override void Flush() => throw new InvalidOperationException();
+ public override void SetLength(long value) => throw new InvalidOperationException();
+ public override void Write(byte[] buffer, int offset, int count) => throw new InvalidOperationException();
+
+ public override int Read(byte[] buffer, int offset, int count)
+ {
+ if (DownloadTask is null)
+ throw new InvalidOperationException($"Background downloader must first be started by calling {nameof(BeginDownloadingAsync)}");
+
+ var toRead = Math.Min(count, Length - Position);
+ WaitToPosition(Position + toRead);
+ return IsCancelled ? 0 : _readFile.Read(buffer, offset, count);
+ }
+
+ public override long Seek(long offset, SeekOrigin origin)
+ {
+ var newPosition = origin switch
+ {
+ SeekOrigin.Current => Position + offset,
+ SeekOrigin.End => ContentLength + offset,
+ _ => offset,
+ };
+
+ WaitToPosition(newPosition);
+ return _readFile.Position = newPosition;
+ }
+
+ /// Blocks until the file has downloaded to at least , then returns.
+ /// The minimum required flushed data length in .
+ private void WaitToPosition(long requiredPosition)
+ {
+ while (WritePosition < requiredPosition
+ && DownloadTask?.IsCompleted is false
+ && !IsCancelled)
+ {
+ _downloadedPiece?.WaitOne(50);
+ }
+ }
+
+ private bool disposed = false;
+
+ /*
+ * https://learn.microsoft.com/en-us/dotnet/api/system.io.stream.dispose?view=net-7.0
+ *
+ * In derived classes, do not override the Close() method, instead, put all of the
+ * Stream cleanup logic in the Dispose(Boolean) method.
+ */
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing && !Interlocked.CompareExchange(ref disposed, true, false))
+ {
+ _cancellationSource.Cancel();
+ DownloadTask?.GetAwaiter().GetResult();
+ _downloadedPiece?.Dispose();
+ _cancellationSource?.Dispose();
+ _readFile.Dispose();
+ _writeFile.Dispose();
+ OnUpdate(waitForWrite: true);
+ }
+
+ base.Dispose(disposing);
+ }
+
+ #endregion
}
diff --git a/Source/AaxDecrypter/NetworkFileStreamPersister.cs b/Source/AaxDecrypter/NetworkFileStreamPersister.cs
index fed3ad7c..58724ba4 100644
--- a/Source/AaxDecrypter/NetworkFileStreamPersister.cs
+++ b/Source/AaxDecrypter/NetworkFileStreamPersister.cs
@@ -1,25 +1,24 @@
using Dinah.Core.IO;
-namespace AaxDecrypter
+namespace AaxDecrypter;
+
+internal class NetworkFileStreamPersister : JsonFilePersister
{
- internal class NetworkFileStreamPersister : JsonFilePersister
+ /// Alias for Target
+ public NetworkFileStream NetworkFileStream => Target;
+
+ /// uses path. create file if doesn't yet exist
+ public NetworkFileStreamPersister(NetworkFileStream networkFileStream, string path, string? jsonPath = null)
+ : base(networkFileStream, path, jsonPath) { }
+
+ /// load from existing file
+ public NetworkFileStreamPersister(string path, string? jsonPath = null)
+ : base(path, jsonPath) { }
+
+ protected override void Dispose(bool disposing)
{
- /// Alias for Target
- public NetworkFileStream NetworkFileStream => Target;
-
- /// uses path. create file if doesn't yet exist
- public NetworkFileStreamPersister(NetworkFileStream networkFileStream, string path, string jsonPath = null)
- : base(networkFileStream, path, jsonPath) { }
-
- /// load from existing file
- public NetworkFileStreamPersister(string path, string jsonPath = null)
- : base(path, jsonPath) { }
-
- protected override void Dispose(bool disposing)
- {
- if (disposing)
- NetworkFileStream?.Dispose();
- base.Dispose(disposing);
- }
+ if (disposing)
+ NetworkFileStream?.Dispose();
+ base.Dispose(disposing);
}
}
diff --git a/Source/AaxDecrypter/TempFile.cs b/Source/AaxDecrypter/TempFile.cs
index f6b37ee4..34cfe093 100644
--- a/Source/AaxDecrypter/TempFile.cs
+++ b/Source/AaxDecrypter/TempFile.cs
@@ -1,6 +1,5 @@
using FileManager;
-#nullable enable
namespace AaxDecrypter;
public record TempFile
diff --git a/Source/AaxDecrypter/UnencryptedAudiobookDownloader.cs b/Source/AaxDecrypter/UnencryptedAudiobookDownloader.cs
index 9b9a4b4e..cab25152 100644
--- a/Source/AaxDecrypter/UnencryptedAudiobookDownloader.cs
+++ b/Source/AaxDecrypter/UnencryptedAudiobookDownloader.cs
@@ -1,34 +1,33 @@
using FileManager;
using System.Threading.Tasks;
-#nullable enable
-namespace AaxDecrypter
+
+namespace AaxDecrypter;
+
+public class UnencryptedAudiobookDownloader : AudiobookDownloadBase
{
- public class UnencryptedAudiobookDownloader : AudiobookDownloadBase
+ protected override long InputFilePosition => InputFileStream.WritePosition;
+
+ public UnencryptedAudiobookDownloader(string outDirectory, string cacheDirectory, IDownloadOptions dlLic)
+ : base(outDirectory, cacheDirectory, dlLic)
{
- protected override long InputFilePosition => InputFileStream.WritePosition;
+ AsyncSteps.Name = "Download Unencrypted Audiobook";
+ AsyncSteps["Step 1: Download Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
+ AsyncSteps["Step 2: Create Cue"] = Step_CreateCueAsync;
+ }
- public UnencryptedAudiobookDownloader(string outDirectory, string cacheDirectory, IDownloadOptions dlLic)
- : base(outDirectory, cacheDirectory, dlLic)
+ protected override async Task Step_DownloadAndDecryptAudiobookAsync()
+ {
+ await (InputFileStream.DownloadTask ?? Task.CompletedTask);
+
+ if (IsCanceled)
+ return false;
+ else
{
- AsyncSteps.Name = "Download Unencrypted Audiobook";
- AsyncSteps["Step 1: Download Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
- AsyncSteps["Step 2: Create Cue"] = Step_CreateCueAsync;
- }
-
- protected override async Task Step_DownloadAndDecryptAudiobookAsync()
- {
- await InputFileStream.DownloadTask;
-
- if (IsCanceled)
- return false;
- else
- {
- FinalizeDownload();
- var tempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
- FileUtility.SaferMove(InputFileStream.SaveFilePath, tempFile.FilePath);
- OnTempFileCreated(tempFile);
- return true;
- }
+ FinalizeDownload();
+ var tempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
+ FileUtility.SaferMove(InputFileStream.SaveFilePath, tempFile.FilePath);
+ OnTempFileCreated(tempFile);
+ return true;
}
}
}
diff --git a/Source/AppScaffolding/AppScaffolding.csproj b/Source/AppScaffolding/AppScaffolding.csproj
index 315dc9ed..e335f63b 100644
--- a/Source/AppScaffolding/AppScaffolding.csproj
+++ b/Source/AppScaffolding/AppScaffolding.csproj
@@ -3,6 +3,7 @@
net10.0
13.1.8.1
+ enable
diff --git a/Source/AppScaffolding/LibationScaffolding.cs b/Source/AppScaffolding/LibationScaffolding.cs
index 72f3de07..fbbb94ec 100644
--- a/Source/AppScaffolding/LibationScaffolding.cs
+++ b/Source/AppScaffolding/LibationScaffolding.cs
@@ -46,21 +46,19 @@ namespace AppScaffolding
public static Variety Variety { get; private set; }
// AppScaffolding
- private static Assembly _executingAssembly;
+ private static Assembly? _executingAssembly;
private static Assembly ExecutingAssembly
=> _executingAssembly ??= Assembly.GetExecutingAssembly();
// LibationWinForms or LibationCli
- private static Assembly _entryAssembly;
- private static Assembly EntryAssembly
+ private static Assembly? _entryAssembly;
+ private static Assembly? EntryAssembly
=> _entryAssembly ??= Assembly.GetEntryAssembly();
// previously: System.Reflection.Assembly.GetExecutingAssembly().GetName().Version;
- private static Version _buildVersion;
- public static Version BuildVersion
- => _buildVersion
- ??= new[] { ExecutingAssembly.GetName(), EntryAssembly.GetName() }
- .Max(a => a.Version);
+ private static Version? _buildVersion;
+ public static Version? BuildVersion
+ => _buildVersion ??= new[] { ExecutingAssembly.GetName(), EntryAssembly?.GetName() }.Max(a => a?.Version);
/// Run migrations before loading Configuration for the first time. Then load and return Configuration
public static Configuration RunPreConfigMigrations()
@@ -305,8 +303,8 @@ namespace AppScaffolding
Log.Logger.Information("Begin. {@DebugInfo}", new
{
- AppName = EntryAssembly.GetName().Name,
- Version = BuildVersion.ToString(),
+ AppName = EntryAssembly?.GetName().Name,
+ Version = BuildVersion?.ToString(),
ReleaseIdentifier,
Configuration.OS,
Environment.OSVersion,
@@ -342,14 +340,14 @@ namespace AppScaffolding
#nullable restore
private static void wireUpSystemEvents(Configuration configuration)
{
- LibraryCommands.LibrarySizeChanged += (object _, List libraryBooks)
+ LibraryCommands.LibrarySizeChanged += (object? _, List libraryBooks)
=> SearchEngineCommands.FullReIndex(libraryBooks);
LibraryCommands.BookUserDefinedItemCommitted += (_, books)
=> SearchEngineCommands.UpdateBooks(books);
}
- public static UpgradeProperties GetLatestRelease()
+ public static UpgradeProperties? GetLatestRelease()
{
// timed out
(var version, var latest, var zip) = getLatestRelease(TimeSpan.FromSeconds(10));
@@ -358,7 +356,7 @@ namespace AppScaffolding
return null;
// we have an update
- var zipUrl = zip?.BrowserDownloadUrl;
+ var zipUrl = zip.BrowserDownloadUrl;
Log.Logger.Information("Update available: {@DebugInfo}", new
{
@@ -369,7 +367,7 @@ namespace AppScaffolding
return new(zipUrl, latest.HtmlUrl, zip.Name, version, latest.Body);
}
- private static (Version releaseVersion, Octokit.Release, Octokit.ReleaseAsset) getLatestRelease(TimeSpan timeout)
+ private static (Version? releaseVersion, Octokit.Release?, Octokit.ReleaseAsset?) getLatestRelease(TimeSpan timeout)
{
try
{
@@ -385,7 +383,7 @@ namespace AppScaffolding
}
return (null, null, null);
}
- private static async System.Threading.Tasks.Task<(Version releaseVersion, Octokit.Release, Octokit.ReleaseAsset)> getLatestRelease()
+ private static async System.Threading.Tasks.Task<(Version? releaseVersion, Octokit.Release?, Octokit.ReleaseAsset?)> getLatestRelease()
{
const string ownerAccount = "rmcrackan";
const string repoName = "Libation";
@@ -404,7 +402,7 @@ namespace AppScaffolding
var bts = await gitHubClient.Repository.Content.GetRawContent(ownerAccount, repoName, ".releaseindex.json");
var releaseIndex = JObject.Parse(System.Text.Encoding.ASCII.GetString(bts));
- string regexPattern;
+ string? regexPattern;
try
{
@@ -414,6 +412,8 @@ namespace AppScaffolding
{
regexPattern = releaseIndex.Value(ReleaseIdentifier.ToString());
}
+ if (regexPattern is null)
+ return (null, null, null);
var regex = new System.Text.RegularExpressions.Regex(regexPattern, System.Text.RegularExpressions.RegexOptions.IgnoreCase);
@@ -516,7 +516,7 @@ namespace AppScaffolding
.Select(i => new JObject
{
{ "Id", i.Id },
- { "FileType", (int)FileTypes.GetFileTypeFromPath(i.Path) },
+ { "FileType", (int)FileTypes.GetFileTypeFromPath(i.Path!) },
{ "Path", new JObject{ { "Path", i.Path } } }
})
.ToArray();
diff --git a/Source/ApplicationServices/ApplicationServices.csproj b/Source/ApplicationServices/ApplicationServices.csproj
index 91127f17..7c5dcd6e 100644
--- a/Source/ApplicationServices/ApplicationServices.csproj
+++ b/Source/ApplicationServices/ApplicationServices.csproj
@@ -2,6 +2,7 @@
net10.0
+ enable
diff --git a/Source/ApplicationServices/DbContexts.cs b/Source/ApplicationServices/DbContexts.cs
index 8ae52402..9063914b 100644
--- a/Source/ApplicationServices/DbContexts.cs
+++ b/Source/ApplicationServices/DbContexts.cs
@@ -3,7 +3,6 @@ using LibationFileManager;
using Microsoft.EntityFrameworkCore;
using System.Collections.Generic;
-#nullable enable
namespace ApplicationServices
{
public static class DbContexts
diff --git a/Source/ApplicationServices/ExportDto.cs b/Source/ApplicationServices/ExportDto.cs
index 7221deaf..f170f7cf 100644
--- a/Source/ApplicationServices/ExportDto.cs
+++ b/Source/ApplicationServices/ExportDto.cs
@@ -4,7 +4,6 @@ using Newtonsoft.Json;
using System;
using System.Linq;
-#nullable enable
namespace ApplicationServices;
internal class ExportDto(LibraryBook libBook)
@@ -46,7 +45,7 @@ internal class ExportDto(LibraryBook libBook)
public string Description { get; } = libBook.Book.Description;
[Name("Publisher")]
- public string Publisher { get; } = libBook.Book.Publisher;
+ public string? Publisher { get; } = libBook.Book.Publisher;
[Name("Has PDF")]
public bool HasPdf { get; } = libBook.Book.HasPdf;
@@ -67,10 +66,10 @@ internal class ExportDto(LibraryBook libBook)
public float? CommunityRatingStory { get; } = ZeroIsNull(libBook.Book.Rating?.StoryRating);
[Name("Cover Id")]
- public string PictureId { get; } = libBook.Book.PictureId;
+ public string? PictureId { get; } = libBook.Book.PictureId;
[Name("Cover Id Large")]
- public string PictureLarge { get; } = libBook.Book.PictureLarge;
+ public string? PictureLarge { get; } = libBook.Book.PictureLarge;
[Name("Is Abridged?")]
public bool IsAbridged { get; } = libBook.Book.IsAbridged;
@@ -103,7 +102,7 @@ internal class ExportDto(LibraryBook libBook)
public string ContentType { get; } = libBook.Book.ContentType.ToString();
[Name("Language")]
- public string Language { get; } = libBook.Book.Language;
+ public string? Language { get; } = libBook.Book.Language;
[Name("Last Downloaded")]
public DateTime? LastDownloaded { get; } = libBook.Book.UserDefinedItem.LastDownloaded;
diff --git a/Source/ApplicationServices/ISearchEngine.cs b/Source/ApplicationServices/ISearchEngine.cs
index d7e171e9..89e2b065 100644
--- a/Source/ApplicationServices/ISearchEngine.cs
+++ b/Source/ApplicationServices/ISearchEngine.cs
@@ -1,6 +1,5 @@
using LibationSearchEngine;
-#nullable enable
namespace ApplicationServices;
public interface ISearchEngine
diff --git a/Source/ApplicationServices/LibraryCommands.cs b/Source/ApplicationServices/LibraryCommands.cs
index ae165e7e..8e8a491c 100644
--- a/Source/ApplicationServices/LibraryCommands.cs
+++ b/Source/ApplicationServices/LibraryCommands.cs
@@ -6,7 +6,6 @@ using Dinah.Core.Logging;
using DtoImporterService;
using FileManager;
using LibationFileManager;
-using Microsoft.Extensions.DependencyModel;
using Newtonsoft.Json.Linq;
using Serilog;
using System;
@@ -16,7 +15,6 @@ using System.Text;
using System.Threading.Tasks;
using static DtoImporterService.PerfLogger;
-#nullable enable
namespace ApplicationServices
{
public static class LibraryCommands
@@ -184,16 +182,11 @@ namespace ApplicationServices
public static Task ImportSingleToDbAsync(AudibleApi.Common.Item item, string accountId, string localeName) => Task.Run(() => importSingleToDb(item, accountId, localeName));
private static int importSingleToDb(AudibleApi.Common.Item item, string accountId, string localeName)
{
- ArgumentValidator.EnsureNotNull(item, "item");
- ArgumentValidator.EnsureNotNull(accountId, "accountId");
- ArgumentValidator.EnsureNotNull(localeName, "localeName");
+ ArgumentValidator.EnsureNotNull(item, nameof(item));
+ ArgumentValidator.EnsureNotNull(accountId, nameof(accountId));
+ ArgumentValidator.EnsureNotNull(localeName, nameof(localeName));
- var importItem = new ImportItem
- {
- DtoItem = item,
- AccountId = accountId,
- LocaleName = localeName
- };
+ var importItem = new ImportItem(item, accountId, localeName);
var importItems = new List { importItem };
var validator = new LibraryValidator();
@@ -207,6 +200,9 @@ namespace ApplicationServices
return DoDbSizeChangeOperation(ctx =>
{
+ if (importItem.DtoItem.ProductId is null)
+ return;
+
var bookImporter = new BookImporter(ctx);
bookImporter.Import(importItems);
var book = ctx.LibraryBooks.FirstOrDefault(lb => lb.Book.AudibleProductId == importItem.DtoItem.ProductId);
@@ -291,6 +287,7 @@ namespace ApplicationServices
private static async Task> scanAccountAsync(ApiExtended apiExtended, Account account, LibraryOptions libraryOptions, LogArchiver? archiver)
{
ArgumentValidator.EnsureNotNull(account, nameof(account));
+ var locale = ArgumentValidator.EnsureNotNull(account.Locale, nameof(account.Locale));
Log.Logger.Information("ImportLibraryAsync. {@DebugInfo}", new
{
@@ -307,7 +304,7 @@ namespace ApplicationServices
await logDtoItemsAsync(dtoItems);
- return dtoItems.Select(d => new ImportItem { DtoItem = d, AccountId = account.AccountId, LocaleName = account.Locale?.Name }).ToList();
+ return dtoItems.Select(d => new ImportItem(d, account.AccountId, locale.Name)).ToList();
}
catch(ImportValidationException ex)
{
diff --git a/Source/ApplicationServices/LibraryExporter.cs b/Source/ApplicationServices/LibraryExporter.cs
index afa73620..497a1990 100644
--- a/Source/ApplicationServices/LibraryExporter.cs
+++ b/Source/ApplicationServices/LibraryExporter.cs
@@ -8,7 +8,6 @@ using System.Globalization;
using System.Linq;
using System.Reflection;
-#nullable enable
namespace ApplicationServices;
public static class LibraryExporter
diff --git a/Source/ApplicationServices/MainSearchEngine.cs b/Source/ApplicationServices/MainSearchEngine.cs
index 52783801..e94e0fd6 100644
--- a/Source/ApplicationServices/MainSearchEngine.cs
+++ b/Source/ApplicationServices/MainSearchEngine.cs
@@ -1,6 +1,5 @@
using LibationSearchEngine;
-#nullable enable
namespace ApplicationServices;
///
diff --git a/Source/ApplicationServices/SearchEngineCommands.cs b/Source/ApplicationServices/SearchEngineCommands.cs
index 7b292e44..c3867cfe 100644
--- a/Source/ApplicationServices/SearchEngineCommands.cs
+++ b/Source/ApplicationServices/SearchEngineCommands.cs
@@ -29,7 +29,7 @@ namespace ApplicationServices
}
#endregion
- public static event EventHandler SearchEngineUpdated;
+ public static event EventHandler? SearchEngineUpdated;
#region Update
private static bool isUpdating;
@@ -85,7 +85,7 @@ namespace ApplicationServices
action(new SearchEngine());
if (!prevIsUpdating)
- SearchEngineUpdated?.Invoke(null, null);
+ SearchEngineUpdated?.Invoke(null, EventArgs.Empty);
}
finally
{
diff --git a/Source/ApplicationServices/TempSearchEngine.cs b/Source/ApplicationServices/TempSearchEngine.cs
index 527f67fe..c345042b 100644
--- a/Source/ApplicationServices/TempSearchEngine.cs
+++ b/Source/ApplicationServices/TempSearchEngine.cs
@@ -3,7 +3,6 @@ using LibationFileManager;
using LibationSearchEngine;
using System.Collections.Generic;
-#nullable enable
namespace ApplicationServices;
///
diff --git a/Source/AudibleUtilities/Account.cs b/Source/AudibleUtilities/Account.cs
index 8fe9e3f6..b8895836 100644
--- a/Source/AudibleUtilities/Account.cs
+++ b/Source/AudibleUtilities/Account.cs
@@ -1,99 +1,98 @@
using System;
-using System.Collections.Generic;
-using System.Linq;
+using System.Diagnostics.CodeAnalysis;
using AudibleApi;
using AudibleApi.Authorization;
using Dinah.Core;
using Newtonsoft.Json;
-namespace AudibleUtilities
+namespace AudibleUtilities;
+
+public class Account : IUpdatable
{
- public class Account : IUpdatable
+ public event EventHandler? Updated;
+ private void update(object? sender = null, EventArgs? e = null)
+ => Updated?.Invoke(this, EventArgs.Empty);
+
+ // canonical. immutable. email or phone number
+ public string AccountId { get; }
+
+ // user-friendly, non-canonical name. mutable
+ public string? AccountName
{
- public event EventHandler Updated;
- private void update(object sender = null, EventArgs e = null)
- => Updated?.Invoke(this, new EventArgs());
-
- // canonical. immutable. email or phone number
- public string AccountId { get; }
-
- // user-friendly, non-canonical name. mutable
- public string AccountName
+ get => field;
+ set
{
- get => field;
- set
- {
- if (string.IsNullOrWhiteSpace(value))
- return;
- var v = value.Trim();
- if (v == field)
- return;
- field = v;
- update();
- }
+ if (string.IsNullOrWhiteSpace(value))
+ return;
+ var v = value.Trim();
+ if (v == field)
+ return;
+ field = v;
+ update();
}
-
- // whether to include this account when scanning libraries.
- // technically this is an app setting; not an attribute of account. but since it's managed with accounts, it makes sense to put this exception-to-the-rule here
- public bool LibraryScan
- {
- get => field;
- set
- {
- if (value == field)
- return;
- field = value;
- update();
- }
- }
-
- /// aka: activation bytes
- public string DecryptKey
- {
- get => field ?? "";
- set
- {
- var v = (value ?? "").Trim();
- if (v == field)
- return;
- field = v;
- update();
- }
- }
-
- public Identity IdentityTokens
- {
- get => field;
- set
- {
- if (field is null && value is null)
- return;
-
- if (field is not null)
- field.Updated -= update;
-
- if (value is not null)
- value.Updated += update;
-
- field = value;
- update();
- }
- }
-
- [JsonIgnore]
- public Locale Locale => IdentityTokens?.Locale;
-
- public Account(string accountId)
- {
- AccountId = ArgumentValidator.EnsureNotNullOrWhiteSpace(accountId, nameof(accountId)).Trim();
- }
-
- public override string ToString() => $"{AccountId} - {Locale?.Name ?? "[empty]"}";
-
- public string MaskedLogEntry => @$"AccountId={mask(AccountId)}|AccountName={mask(AccountName)}|Locale={Locale?.Name ?? "[empty]"}";
- private static string mask(string str)
- => str is null ? "[null]"
- : str == string.Empty ? "[empty]"
- : str.ToMask();
}
+
+ // whether to include this account when scanning libraries.
+ // technically this is an app setting; not an attribute of account. but since it's managed with accounts, it makes sense to put this exception-to-the-rule here
+ public bool LibraryScan
+ {
+ get => field;
+ set
+ {
+ if (value == field)
+ return;
+ field = value;
+ update();
+ }
+ }
+
+ /// aka: activation bytes
+ [AllowNull]
+ public string? DecryptKey
+ {
+ get => field;
+ set
+ {
+ var v = (value ?? "").Trim();
+ if (v == field)
+ return;
+ field = v;
+ update();
+ }
+ }
+
+ public Identity? IdentityTokens
+ {
+ get => field;
+ set
+ {
+ if (field is null && value is null)
+ return;
+
+ if (field is not null)
+ field.Updated -= update;
+
+ if (value is not null)
+ value.Updated += update;
+
+ field = value;
+ update();
+ }
+ }
+
+ [JsonIgnore]
+ public Locale? Locale => IdentityTokens?.Locale;
+
+ public Account(string accountId)
+ {
+ AccountId = ArgumentValidator.EnsureNotNullOrWhiteSpace(accountId, nameof(accountId)).Trim();
+ }
+
+ public override string ToString() => $"{AccountId} - {Locale?.Name ?? "[empty]"}";
+
+ public string MaskedLogEntry => @$"AccountId={mask(AccountId)}|AccountName={mask(AccountName)}|Locale={Locale?.Name ?? "[empty]"}";
+ private static string mask(string? str)
+ => str is null ? "[null]"
+ : str == string.Empty ? "[empty]"
+ : str.ToMask();
}
diff --git a/Source/AudibleUtilities/AccountsSettings.cs b/Source/AudibleUtilities/AccountsSettings.cs
index 8d7c4ae7..c9ed0677 100644
--- a/Source/AudibleUtilities/AccountsSettings.cs
+++ b/Source/AudibleUtilities/AccountsSettings.cs
@@ -6,154 +6,152 @@ using AudibleApi.Authorization;
using Dinah.Core;
using Newtonsoft.Json;
-#nullable enable
-namespace AudibleUtilities
+namespace AudibleUtilities;
+
+// 'AccountsSettings' is intentionally NOT IEnumerable<> so that properties can be added/extended
+// from newtonsoft (https://www.newtonsoft.com/json/help/html/SerializationGuide.htm):
+// .NET : IList, IEnumerable, IList, Array
+// JSON : Array (properties on the collection will not be serialized)
+public class AccountsSettings : IUpdatable
{
- // 'AccountsSettings' is intentionally NOT IEnumerable<> so that properties can be added/extended
- // from newtonsoft (https://www.newtonsoft.com/json/help/html/SerializationGuide.htm):
- // .NET : IList, IEnumerable, IList, Array
- // JSON : Array (properties on the collection will not be serialized)
- public class AccountsSettings : IUpdatable
+ public event EventHandler? Updated;
+ private void update(object? sender = null, EventArgs? e = null)
{
- public event EventHandler? Updated;
- private void update(object? sender = null, EventArgs? e = null)
- {
- foreach (var account in Accounts)
- validate(account);
- update_no_validate();
- }
- private void update_no_validate() => Updated?.Invoke(this, new EventArgs());
-
- public AccountsSettings() { }
-
- // for some reason this will make the json instantiator use _accounts_json.set()
- [JsonConstructor]
- protected AccountsSettings(List accountsSettings) { }
-
- #region Accounts
- private List _accounts_backing = new List();
- [JsonProperty(PropertyName = nameof(Accounts))]
- private List _accounts_json
- {
- get => _accounts_backing;
- // 'set' is only used by json deser
- set
- {
- if (value is null)
- return;
-
- foreach (var account in value)
- _add(account);
-
- update_no_validate();
- }
- }
-
- private string? _cdm;
- [JsonProperty]
- public string? Cdm
- {
- get => _cdm;
- set
- {
- if (value is null)
- return;
-
- _cdm = value;
- update_no_validate();
- }
- }
-
- [JsonIgnore]
- public IReadOnlyList Accounts => _accounts_json.AsReadOnly();
- #endregion
-
- #region de/serialize
- public static AccountsSettings? FromJson(string json)
- => JsonConvert.DeserializeObject(json, Identity.GetJsonSerializerSettings());
-
- public string ToJson(Formatting formatting = Formatting.Indented)
- => JsonConvert.SerializeObject(this, formatting, Identity.GetJsonSerializerSettings());
- #endregion
-
- // more common naming convention alias for internal collection
- public IReadOnlyList GetAll() => Accounts;
-
- public Account Upsert(string accountId, string? locale)
- {
- var acct = GetAccount(accountId, locale);
-
- if (acct is not null)
- return acct;
-
- var l = Localization.Get(locale);
- var id = new Identity(l);
-
- var account = new Account(accountId) { IdentityTokens = id };
- Add(account);
- return account;
- }
-
- public void Add(Account account)
- {
- _add(account);
- update_no_validate();
- }
-
- public void _add(Account account)
- {
+ foreach (var account in Accounts)
validate(account);
+ update_no_validate();
+ }
+ private void update_no_validate() => Updated?.Invoke(this, new EventArgs());
- _accounts_backing.Add(account);
- account.Updated += update;
- }
+ public AccountsSettings() { }
- public Account? GetAccount(string accountId, string? locale)
+ // for some reason this will make the json instantiator use _accounts_json.set()
+ [JsonConstructor]
+ protected AccountsSettings(List accountsSettings) { }
+
+ #region Accounts
+ private List _accounts_backing = new List();
+ [JsonProperty(PropertyName = nameof(Accounts))]
+ private List _accounts_json
+ {
+ get => _accounts_backing;
+ // 'set' is only used by json deser
+ set
{
- if (locale is null)
- return null;
+ if (value is null)
+ return;
- return Accounts.SingleOrDefault(a => a.AccountId == accountId && a.IdentityTokens.Locale.Name == locale);
- }
+ foreach (var account in value)
+ _add(account);
- public bool Delete(string accountId, string locale)
- {
- var acct = GetAccount(accountId, locale);
- if (acct is null)
- return false;
- return Delete(acct);
- }
-
- public bool Delete(Account account)
- {
- if (!_accounts_backing.Contains(account))
- return false;
-
- account.Updated -= update;
- var result = _accounts_backing.Remove(account);
update_no_validate();
- return result;
- }
-
- private void validate(Account account)
- {
- ArgumentValidator.EnsureNotNull(account, nameof(account));
-
- var accountId = account.AccountId;
- var locale = account?.IdentityTokens?.Locale?.Name;
-
- var acct = GetAccount(accountId, locale);
-
- // new: ok
- if (acct is null)
- return;
-
- // same account instance: ok
- if (acct == account)
- return;
-
- // same account id + locale, different instance: bad
- throw new InvalidOperationException("Cannot add an account with the same account Id and Locale");
}
}
+
+ private string? _cdm;
+ [JsonProperty]
+ public string? Cdm
+ {
+ get => _cdm;
+ set
+ {
+ if (value is null)
+ return;
+
+ _cdm = value;
+ update_no_validate();
+ }
+ }
+
+ [JsonIgnore]
+ public IReadOnlyList Accounts => _accounts_json.AsReadOnly();
+ #endregion
+
+ #region de/serialize
+ public static AccountsSettings? FromJson(string json)
+ => JsonConvert.DeserializeObject(json, Identity.GetJsonSerializerSettings());
+
+ public string ToJson(Formatting formatting = Formatting.Indented)
+ => JsonConvert.SerializeObject(this, formatting, Identity.GetJsonSerializerSettings());
+ #endregion
+
+ // more common naming convention alias for internal collection
+ public IReadOnlyList GetAll() => Accounts;
+
+ public Account Upsert(string accountId, string? locale)
+ {
+ var acct = GetAccount(accountId, locale);
+
+ if (acct is not null)
+ return acct;
+
+ var l = Localization.Get(locale);
+ var id = new Identity(l);
+
+ var account = new Account(accountId) { IdentityTokens = id };
+ Add(account);
+ return account;
+ }
+
+ public void Add(Account account)
+ {
+ _add(account);
+ update_no_validate();
+ }
+
+ public void _add(Account account)
+ {
+ validate(account);
+
+ _accounts_backing.Add(account);
+ account.Updated += update;
+ }
+
+ public Account? GetAccount(string accountId, string? locale)
+ {
+ if (locale is null)
+ return null;
+
+ return Accounts.SingleOrDefault(a => a.AccountId == accountId && a.Locale?.Name == locale);
+ }
+
+ public bool Delete(string accountId, string locale)
+ {
+ var acct = GetAccount(accountId, locale);
+ if (acct is null)
+ return false;
+ return Delete(acct);
+ }
+
+ public bool Delete(Account account)
+ {
+ if (!_accounts_backing.Contains(account))
+ return false;
+
+ account.Updated -= update;
+ var result = _accounts_backing.Remove(account);
+ update_no_validate();
+ return result;
+ }
+
+ private void validate(Account account)
+ {
+ ArgumentValidator.EnsureNotNull(account, nameof(account));
+
+ var accountId = account.AccountId;
+ var locale = account?.IdentityTokens?.Locale?.Name;
+
+ var acct = GetAccount(accountId, locale);
+
+ // new: ok
+ if (acct is null)
+ return;
+
+ // same account instance: ok
+ if (acct == account)
+ return;
+
+ // same account id + locale, different instance: bad
+ throw new InvalidOperationException("Cannot add an account with the same account Id and Locale");
+ }
}
diff --git a/Source/AudibleUtilities/AccountsSettingsPersister.cs b/Source/AudibleUtilities/AccountsSettingsPersister.cs
index 914d2d8a..b1d09e53 100644
--- a/Source/AudibleUtilities/AccountsSettingsPersister.cs
+++ b/Source/AudibleUtilities/AccountsSettingsPersister.cs
@@ -3,28 +3,27 @@ using AudibleApi.Authorization;
using Dinah.Core.IO;
using Newtonsoft.Json;
-namespace AudibleUtilities
+namespace AudibleUtilities;
+
+public class AccountsSettingsPersister : JsonFilePersister
{
- public class AccountsSettingsPersister : JsonFilePersister
- {
- public static event EventHandler Saving;
- public static event EventHandler Saved;
+ public static event EventHandler? Saving;
+ public static event EventHandler? Saved;
- protected override void OnSaving() => Saving?.Invoke(null, null);
- protected override void OnSaved() => Saved?.Invoke(null, null);
+ protected override void OnSaving() => Saving?.Invoke(null, EventArgs.Empty);
+ protected override void OnSaved() => Saved?.Invoke(null, EventArgs.Empty);
- /// Alias for Target
- public AccountsSettings AccountsSettings => Target;
+ /// Alias for Target
+ public AccountsSettings AccountsSettings => Target;
- /// uses path. create file if doesn't yet exist
- public AccountsSettingsPersister(AccountsSettings target, string path, string jsonPath = null)
- : base(target, path, jsonPath) { }
+ /// uses path. create file if doesn't yet exist
+ public AccountsSettingsPersister(AccountsSettings target, string path, string? jsonPath = null)
+ : base(target, path, jsonPath) { }
- /// load from existing file
- public AccountsSettingsPersister(string path, string jsonPath = null)
- : base(path, jsonPath) { }
+ /// load from existing file
+ public AccountsSettingsPersister(string path, string? jsonPath = null)
+ : base(path, jsonPath) { }
- protected override JsonSerializerSettings GetSerializerSettings()
- => Identity.GetJsonSerializerSettings();
- }
+ protected override JsonSerializerSettings GetSerializerSettings()
+ => Identity.GetJsonSerializerSettings();
}
diff --git a/Source/AudibleUtilities/ApiExtended.cs b/Source/AudibleUtilities/ApiExtended.cs
index f97a087e..ec966831 100644
--- a/Source/AudibleUtilities/ApiExtended.cs
+++ b/Source/AudibleUtilities/ApiExtended.cs
@@ -13,300 +13,300 @@ using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
-#nullable enable
-namespace AudibleUtilities
+namespace AudibleUtilities;
+
+/// USE THIS from within Libation. It wraps the call with correct JSONPath
+public class ApiExtended
{
- /// USE THIS from within Libation. It wraps the call with correct JSONPath
- public class ApiExtended
+ public static Func? LoginChoiceFactory { get; set; }
+ public Api Api { get; private set; }
+
+ private const int MaxConcurrency = 10;
+ private const int BatchSize = 50;
+
+ private ApiExtended(Api api) => Api = api;
+
+ /// Get api from existing tokens else login with 'eager' choice. External browser url is provided. Response can be external browser login or continuing with native api callbacks.
+ public static async Task CreateAsync(Account account)
{
- public static Func? LoginChoiceFactory { get; set; }
- public Api Api { get; private set; }
+ ArgumentValidator.EnsureNotNull(account, nameof(account));
+ ArgumentValidator.EnsureNotNull(account.AccountId, nameof(account.AccountId));
+ var locale = ArgumentValidator.EnsureNotNull(account.Locale, nameof(account.Locale));
- private const int MaxConcurrency = 10;
- private const int BatchSize = 50;
-
- private ApiExtended(Api api) => Api = api;
-
- /// Get api from existing tokens else login with 'eager' choice. External browser url is provided. Response can be external browser login or continuing with native api callbacks.
- public static async Task CreateAsync(Account account)
+ try
{
- ArgumentValidator.EnsureNotNull(account, nameof(account));
- ArgumentValidator.EnsureNotNull(account.AccountId, nameof(account.AccountId));
- ArgumentValidator.EnsureNotNull(account.Locale, nameof(account.Locale));
-
- try
+ Serilog.Log.Logger.Information("{@DebugInfo}", new
{
- Serilog.Log.Logger.Information("{@DebugInfo}", new
- {
- AccountMaskedLogEntry = account.MaskedLogEntry
- });
+ AccountMaskedLogEntry = account.MaskedLogEntry
+ });
- var api = await EzApiCreator.GetApiAsync(
- account.Locale,
- AudibleApiStorage.AccountsSettingsFile,
- account.GetIdentityTokensJsonPath());
- return new ApiExtended(api);
- }
- catch
- {
- if (LoginChoiceFactory is null)
- throw new InvalidOperationException($"The UI module must first set {nameof(LoginChoiceFactory)} before attempting to create the api");
-
- Serilog.Log.Logger.Information("{@DebugInfo}", new
- {
- LoginType = nameof(ILoginChoiceEager),
- Account = account.MaskedLogEntry ?? "[null]",
- LocaleName = account.Locale?.Name
- });
-
- var api = await EzApiCreator.GetApiAsync(
- LoginChoiceFactory(account),
- account.Locale,
+ var api = await EzApiCreator.GetApiAsync(
+ locale,
AudibleApiStorage.AccountsSettingsFile,
account.GetIdentityTokensJsonPath());
+ return new ApiExtended(api);
+ }
+ catch
+ {
+ if (LoginChoiceFactory is null)
+ throw new InvalidOperationException($"The UI module must first set {nameof(LoginChoiceFactory)} before attempting to create the api");
- return new ApiExtended(api);
+ Serilog.Log.Logger.Information("{@DebugInfo}", new
+ {
+ LoginType = nameof(ILoginChoiceEager),
+ Account = account.MaskedLogEntry ?? "[null]",
+ LocaleName = locale.Name
+ });
+
+ var api = await EzApiCreator.GetApiAsync(
+ LoginChoiceFactory(account),
+ locale,
+ AudibleApiStorage.AccountsSettingsFile,
+ account.GetIdentityTokensJsonPath());
+
+ return new ApiExtended(api);
+ }
+ }
+
+ private static AsyncRetryPolicy policy { get; }
+ = Policy.Handle()
+ // 2 retries == 3 total
+ .RetryAsync(2);
+
+ public Task> GetLibraryValidatedAsync(LibraryOptions libraryOptions)
+ {
+ // bug on audible's side. the 1st time after a long absence, a query to get library will return without titles or authors. a subsequent identical query will be successful. this is true whether or not tokens are refreshed
+ // worse, this 1st dummy call doesn't seem to help:
+ // var page = await api.GetLibraryAsync(new AudibleApi.LibraryOptions { NumberOfResultPerPage = 1, PageNumber = 1, PurchasedAfter = DateTime.Now.AddYears(-20), ResponseGroups = AudibleApi.LibraryOptions.ResponseGroupOptions.ALL_OPTIONS });
+ // i don't want to incur the cost of making a full dummy call every time because it fails sometimes
+ return policy.ExecuteAsync(() => getItemsAsync(libraryOptions));
+ }
+
+ ///
+ /// A debugging method used to simulate a library scan from a LibraryScans.zip json file.
+ /// Simply replace the Api call to GetLibraryItemsPagesAsync() with a call to this method.
+ ///
+ private static async IAsyncEnumerable- GetItemsFromJsonFile()
+ {
+ var libraryScanJsonPath = @"Path/to/libraryscan.json";
+ using var jsonFile = System.IO.File.OpenText(libraryScanJsonPath);
+
+ var json = await JToken.ReadFromAsync(new Newtonsoft.Json.JsonTextReader(jsonFile));
+ if (json?["Items"] is not JArray items)
+ yield break;
+
+ foreach (var batch in items.OfType().Select(Item.FromJson).OfType
- ().Chunk(BatchSize))
+ yield return batch;
+ }
+
+ private async Task
> getItemsAsync(LibraryOptions libraryOptions)
+ {
+ Serilog.Log.Logger.Debug("Beginning library scan.");
+
+ List- items = new();
+ var sw = Stopwatch.StartNew();
+ var totalTime = TimeSpan.Zero;
+ using var semaphore = new SemaphoreSlim(MaxConcurrency);
+
+ var episodeChannel = Channel.CreateUnbounded(new UnboundedChannelOptions { SingleReader = true, SingleWriter = true });
+ var batchReaderTask = readAllAsinsAsync(episodeChannel.Reader, semaphore);
+
+ //Scan the library for all added books.
+ //Get relationship asins from episode-type items and write them to episodeChannel where they will be batched and queried.
+ await foreach (var itemsBatch in Api.GetLibraryItemsPagesAsync(libraryOptions, BatchSize, semaphore))
+ {
+ if (Configuration.Instance.ImportEpisodes)
+ {
+ var episodes = itemsBatch.Where(i => i.IsEpisodes).ToList();
+ var series = itemsBatch.Where(i => i.IsSeriesParent).ToList();
+
+ var parentAsins = episodes
+ .SelectMany(i => i.Relationships ?? [])
+ .Where(r => r.RelationshipToProduct == RelationshipToProduct.Parent)
+ .Select(r => r.Asin)
+ .OfType();
+
+ var episodeAsins = series
+ .SelectMany(i => i.Relationships ?? [])
+ .Where(r => r.RelationshipToProduct == RelationshipToProduct.Child && r.RelationshipType == RelationshipType.Episode)
+ .Select(r => r.Asin)
+ .OfType();
+
+ foreach (var asin in parentAsins.Concat(episodeAsins))
+ episodeChannel.Writer.TryWrite(asin);
+
+ items.AddRange(episodes);
+ items.AddRange(series);
}
- }
- private static AsyncRetryPolicy policy { get; }
- = Policy.Handle()
- // 2 retries == 3 total
- .RetryAsync(2);
-
- public Task
> GetLibraryValidatedAsync(LibraryOptions libraryOptions)
- {
- // bug on audible's side. the 1st time after a long absence, a query to get library will return without titles or authors. a subsequent identical query will be successful. this is true whether or not tokens are refreshed
- // worse, this 1st dummy call doesn't seem to help:
- // var page = await api.GetLibraryAsync(new AudibleApi.LibraryOptions { NumberOfResultPerPage = 1, PageNumber = 1, PurchasedAfter = DateTime.Now.AddYears(-20), ResponseGroups = AudibleApi.LibraryOptions.ResponseGroupOptions.ALL_OPTIONS });
- // i don't want to incur the cost of making a full dummy call every time because it fails sometimes
- return policy.ExecuteAsync(() => getItemsAsync(libraryOptions));
+ var booksInBatch
+ = itemsBatch
+ .Where(i => !i.IsSeriesParent && !i.IsEpisodes)
+ .Where(i => i.IsAyce is not true || Configuration.Instance.ImportPlusTitles);
+ items.AddRange(booksInBatch);
}
- ///
- /// A debugging method used to simulate a library scan from a LibraryScans.zip json file.
- /// Simply replace the Api call to GetLibraryItemsPagesAsync() with a call to this method.
- ///
- private static async IAsyncEnumerable- GetItemsFromJsonFile()
+ sw.Stop();
+ totalTime += sw.Elapsed;
+ Serilog.Log.Logger.Debug("Library scan complete after {elappsed_ms} ms. Found {count} books and series. Waiting on series episode scans to complete.", sw.ElapsedMilliseconds, items.Count);
+ sw.Restart();
+
+ //Signal that we're done adding asins
+ episodeChannel.Writer.Complete();
+
+ //Wait for all episodes/parents to be retrived
+ var allEps = await batchReaderTask;
+
+ sw.Stop();
+ totalTime += sw.Elapsed;
+ Serilog.Log.Logger.Debug("Episode scan complete after {elappsed_ms} ms. Found {count} episodes and series .", sw.ElapsedMilliseconds, allEps.Count);
+ sw.Restart();
+
+ Serilog.Log.Logger.Debug("Begin indexing series episodes");
+ items.AddRange(allEps);
+
+ //Set the Item.Series info for episodes and parents.
+ foreach (var parent in items.Where(i => i.IsSeriesParent))
{
- var libraryScanJsonPath = @"Path/to/libraryscan.json";
- using var jsonFile = System.IO.File.OpenText(libraryScanJsonPath);
-
- var json = await JToken.ReadFromAsync(new Newtonsoft.Json.JsonTextReader(jsonFile));
- if (json?["Items"] is not JArray items)
- yield break;
-
- foreach (var batch in items.Select(i => Item.FromJson(i as JObject)).Chunk(BatchSize))
- yield return batch;
+ var children = items.Where(i => i.IsEpisodes && i.Relationships?.Any(r => r.Asin == parent.Asin) is true);
+ SetSeries(parent, children);
}
- private async Task
> getItemsAsync(LibraryOptions libraryOptions)
- {
- Serilog.Log.Logger.Debug("Beginning library scan.");
+ int orphansRemoved = items.RemoveAll(i => (i.IsEpisodes || i.IsSeriesParent) && i.Series is null);
+ if (orphansRemoved > 0)
+ Serilog.Log.Debug("{orphansRemoved} podcast orphans not imported", orphansRemoved);
- List- items = new();
+ sw.Stop();
+ totalTime += sw.Elapsed;
+ Serilog.Log.Logger.Information("Completed indexing series episodes after {elappsed_ms} ms.", sw.ElapsedMilliseconds);
+ Serilog.Log.Logger.Information($"Completed library scan in {totalTime.TotalMilliseconds:F0} ms.");
+
+ Array.ForEach(ISanitizer.GetAllSanitizers(), s => s.Sanitize(items));
+ var allExceptions = IValidator.GetAllValidators().SelectMany(v => v.Validate(items)).ToList();
+ if (allExceptions?.Count > 0)
+ throw new ImportValidationException(items, allExceptions);
+
+ return items;
+ }
+
+ #region episodes and podcasts
+
+ ///
+ /// Read asins from the channel and request catalog item info in batches of . Blocks until is closed.
+ ///
+ /// Input asins to batch
+ /// Shared semaphore to limit concurrency
+ /// All s of asins written to the channel.
+ private async Task
> readAllAsinsAsync(ChannelReader channelReader, SemaphoreSlim semaphore)
+ {
+ int batchNum = 1;
+ List>> getTasks = new();
+
+ while (await channelReader.WaitToReadAsync())
+ {
+ List asins = new();
+
+ while (asins.Count < BatchSize && await channelReader.WaitToReadAsync())
+ {
+ var asin = await channelReader.ReadAsync();
+
+ if (!asins.Contains(asin))
+ asins.Add(asin);
+ }
+ await semaphore.WaitAsync();
+ getTasks.Add(getProductsAsync(batchNum++, asins, semaphore));
+ }
+
+ var completed = await Task.WhenAll(getTasks);
+ //We only want Series parents and Series episodes. Exclude other relationship types (e.g. 'season')
+ return completed.SelectMany(l => l).Where(i => i.IsSeriesParent || i.IsEpisodes).ToList();
+ }
+
+ private async Task> getProductsAsync(int batchNum, List asins, SemaphoreSlim semaphore)
+ {
+ Serilog.Log.Logger.Debug($"Batch {batchNum} Begin: Fetching {asins.Count} asins");
+ try
+ {
var sw = Stopwatch.StartNew();
- var totalTime = TimeSpan.Zero;
- using var semaphore = new SemaphoreSlim(MaxConcurrency);
-
- var episodeChannel = Channel.CreateUnbounded(new UnboundedChannelOptions { SingleReader = true, SingleWriter = true });
- var batchReaderTask = readAllAsinsAsync(episodeChannel.Reader, semaphore);
-
- //Scan the library for all added books.
- //Get relationship asins from episode-type items and write them to episodeChannel where they will be batched and queried.
- await foreach (var itemsBatch in Api.GetLibraryItemsPagesAsync(libraryOptions, BatchSize, semaphore))
- {
- if (Configuration.Instance.ImportEpisodes)
- {
- var episodes = itemsBatch.Where(i => i.IsEpisodes).ToList();
- var series = itemsBatch.Where(i => i.IsSeriesParent).ToList();
-
- var parentAsins = episodes
- .SelectMany(i => i.Relationships)
- .Where(r => r.RelationshipToProduct == RelationshipToProduct.Parent)
- .Select(r => r.Asin);
-
- var episodeAsins = series
- .SelectMany(i => i.Relationships)
- .Where(r => r.RelationshipToProduct == RelationshipToProduct.Child && r.RelationshipType == RelationshipType.Episode)
- .Select(r => r.Asin);
-
- foreach (var asin in parentAsins.Concat(episodeAsins))
- episodeChannel.Writer.TryWrite(asin);
-
- items.AddRange(episodes);
- items.AddRange(series);
- }
-
- var booksInBatch
- = itemsBatch
- .Where(i => !i.IsSeriesParent && !i.IsEpisodes)
- .Where(i => i.IsAyce is not true || Configuration.Instance.ImportPlusTitles);
- items.AddRange(booksInBatch);
- }
-
+ var items = await Api.GetCatalogProductsAsync(asins, CatalogOptions.ResponseGroupOptions.Rating | CatalogOptions.ResponseGroupOptions.Media
+ | CatalogOptions.ResponseGroupOptions.Relationships | CatalogOptions.ResponseGroupOptions.ProductDesc
+ | CatalogOptions.ResponseGroupOptions.Contributors | CatalogOptions.ResponseGroupOptions.ProvidedReview
+ | CatalogOptions.ResponseGroupOptions.ProductPlans | CatalogOptions.ResponseGroupOptions.Series
+ | CatalogOptions.ResponseGroupOptions.CategoryLadders | CatalogOptions.ResponseGroupOptions.ProductExtendedAttrs);
sw.Stop();
- totalTime += sw.Elapsed;
- Serilog.Log.Logger.Debug("Library scan complete after {elappsed_ms} ms. Found {count} books and series. Waiting on series episode scans to complete.", sw.ElapsedMilliseconds, items.Count);
- sw.Restart();
- //Signal that we're done adding asins
- episodeChannel.Writer.Complete();
-
- //Wait for all episodes/parents to be retrived
- var allEps = await batchReaderTask;
-
- sw.Stop();
- totalTime += sw.Elapsed;
- Serilog.Log.Logger.Debug("Episode scan complete after {elappsed_ms} ms. Found {count} episodes and series .", sw.ElapsedMilliseconds, allEps.Count);
- sw.Restart();
-
- Serilog.Log.Logger.Debug("Begin indexing series episodes");
- items.AddRange(allEps);
-
- //Set the Item.Series info for episodes and parents.
- foreach (var parent in items.Where(i => i.IsSeriesParent))
- {
- var children = items.Where(i => i.IsEpisodes && i.Relationships.Any(r => r.Asin == parent.Asin));
- SetSeries(parent, children);
- }
-
- int orphansRemoved = items.RemoveAll(i => (i.IsEpisodes || i.IsSeriesParent) && i.Series is null);
- if (orphansRemoved > 0)
- Serilog.Log.Debug("{orphansRemoved} podcast orphans not imported", orphansRemoved);
-
- sw.Stop();
- totalTime += sw.Elapsed;
- Serilog.Log.Logger.Information("Completed indexing series episodes after {elappsed_ms} ms.", sw.ElapsedMilliseconds);
- Serilog.Log.Logger.Information($"Completed library scan in {totalTime.TotalMilliseconds:F0} ms.");
-
- Array.ForEach(ISanitizer.GetAllSanitizers(), s => s.Sanitize(items));
- var allExceptions = IValidator.GetAllValidators().SelectMany(v => v.Validate(items)).ToList();
- if (allExceptions?.Count > 0)
- throw new ImportValidationException(items, allExceptions);
+ Serilog.Log.Logger.Debug($"Batch {batchNum} End: Retrieved {items.Count} items in {sw.ElapsedMilliseconds} ms");
return items;
}
-
- #region episodes and podcasts
-
- ///
- /// Read asins from the channel and request catalog item info in batches of . Blocks until is closed.
- ///
- /// Input asins to batch
- /// Shared semaphore to limit concurrency
- /// All s of asins written to the channel.
- private async Task> readAllAsinsAsync(ChannelReader channelReader, SemaphoreSlim semaphore)
+ catch (Exception ex)
{
- int batchNum = 1;
- List>> getTasks = new();
+ Serilog.Log.Logger.Error(ex, "Error fetching batch of episodes. {@DebugInfo}", new { asins });
+ throw;
+ }
+ finally { semaphore.Release(); }
+ }
- while (await channelReader.WaitToReadAsync())
+ public static void SetSeries(Item parent, IEnumerable- children)
+ {
+ ArgumentValidator.EnsureNotNull(parent, nameof(parent));
+ ArgumentValidator.EnsureNotNull(children, nameof(children));
+
+ //A series parent will always have exactly 1 Series
+ parent.Series = new[]
+ {
+ new Series
{
- List asins = new();
-
- while (asins.Count < BatchSize && await channelReader.WaitToReadAsync())
- {
- var asin = await channelReader.ReadAsync();
-
- if (!asins.Contains(asin))
- asins.Add(asin);
- }
- await semaphore.WaitAsync();
- getTasks.Add(getProductsAsync(batchNum++, asins, semaphore));
+ Asin = parent.Asin,
+ Sequence = "-1",
+ Title = parent.TitleWithSubtitle
}
+ };
- var completed = await Task.WhenAll(getTasks);
- //We only want Series parents and Series episodes. Explude other relationship types (e.g. 'season')
- return completed.SelectMany(l => l).Where(i => i.IsSeriesParent || i.IsEpisodes).ToList();
+ if (parent.PurchaseDate == default)
+ {
+ parent.PurchaseDate = children.Select(c => c.PurchaseDate).Order().FirstOrDefault(d => d != default);
+
+ if (parent.PurchaseDate == default)
+ {
+ Serilog.Log.Logger.Warning("{series} doesn't have a purchase date. Using UtcNow", parent);
+ parent.PurchaseDate = DateTimeOffset.UtcNow;
+ }
}
- private async Task
> getProductsAsync(int batchNum, List asins, SemaphoreSlim semaphore)
+ int lastEpNum = -1, dupeCount = 0;
+ foreach (var child in children.OrderBy(i => i.EpisodeNumber).ThenBy(i => i.PublicationDateTime))
{
- Serilog.Log.Logger.Debug($"Batch {batchNum} Begin: Fetching {asins.Count} asins");
- try
+ string sequence;
+ if (child.EpisodeNumber is null)
{
- var sw = Stopwatch.StartNew();
- var items = await Api.GetCatalogProductsAsync(asins, CatalogOptions.ResponseGroupOptions.Rating | CatalogOptions.ResponseGroupOptions.Media
- | CatalogOptions.ResponseGroupOptions.Relationships | CatalogOptions.ResponseGroupOptions.ProductDesc
- | CatalogOptions.ResponseGroupOptions.Contributors | CatalogOptions.ResponseGroupOptions.ProvidedReview
- | CatalogOptions.ResponseGroupOptions.ProductPlans | CatalogOptions.ResponseGroupOptions.Series
- | CatalogOptions.ResponseGroupOptions.CategoryLadders | CatalogOptions.ResponseGroupOptions.ProductExtendedAttrs);
- sw.Stop();
-
- Serilog.Log.Logger.Debug($"Batch {batchNum} End: Retrieved {items.Count} items in {sw.ElapsedMilliseconds} ms");
-
- return items;
+ // This should properly be Single() not FirstOrDefault(), but FirstOrDefault is defensive for malformed data from audible
+ sequence = parent.Relationships?.FirstOrDefault(r => r.Asin == child.Asin)?.Sort?.ToString() ?? "0";
}
- catch (Exception ex)
+ else
{
- Serilog.Log.Logger.Error(ex, "Error fetching batch of episodes. {@DebugInfo}", new { asins });
- throw;
+ //multipart episodes may have the same episode number
+ if (child.EpisodeNumber == lastEpNum)
+ dupeCount++;
+ else
+ lastEpNum = child.EpisodeNumber.Value;
+
+ sequence = (lastEpNum + dupeCount).ToString();
}
- finally { semaphore.Release(); }
- }
- public static void SetSeries(Item parent, IEnumerable- children)
- {
- ArgumentValidator.EnsureNotNull(parent, nameof(parent));
- ArgumentValidator.EnsureNotNull(children, nameof(children));
-
- //A series parent will always have exactly 1 Series
- parent.Series = new[]
+ // use parent's 'DateAdded'. DateAdded is just a convenience prop for: PurchaseDate.UtcDateTime
+ child.PurchaseDate = parent.PurchaseDate;
+ // parent is essentially a series
+ child.Series = new[]
{
new Series
{
Asin = parent.Asin,
- Sequence = "-1",
+ Sequence = sequence,
Title = parent.TitleWithSubtitle
}
};
-
- if (parent.PurchaseDate == default)
- {
- parent.PurchaseDate = children.Select(c => c.PurchaseDate).Order().FirstOrDefault(d => d != default);
-
- if (parent.PurchaseDate == default)
- {
- Serilog.Log.Logger.Warning("{series} doesn't have a purchase date. Using UtcNow", parent);
- parent.PurchaseDate = DateTimeOffset.UtcNow;
- }
- }
-
- int lastEpNum = -1, dupeCount = 0;
- foreach (var child in children.OrderBy(i => i.EpisodeNumber).ThenBy(i => i.PublicationDateTime))
- {
- string sequence;
- if (child.EpisodeNumber is null)
- {
- // This should properly be Single() not FirstOrDefault(), but FirstOrDefault is defensive for malformed data from audible
- sequence = parent.Relationships.FirstOrDefault(r => r.Asin == child.Asin)?.Sort?.ToString() ?? "0";
- }
- else
- {
- //multipart episodes may have the same episode number
- if (child.EpisodeNumber == lastEpNum)
- dupeCount++;
- else
- lastEpNum = child.EpisodeNumber.Value;
-
- sequence = (lastEpNum + dupeCount).ToString();
- }
-
- // use parent's 'DateAdded'. DateAdded is just a convenience prop for: PurchaseDate.UtcDateTime
- child.PurchaseDate = parent.PurchaseDate;
- // parent is essentially a series
- child.Series = new[]
- {
- new Series
- {
- Asin = parent.Asin,
- Sequence = sequence,
- Title = parent.TitleWithSubtitle
- }
- };
- }
}
- #endregion
}
+ #endregion
}
diff --git a/Source/AudibleUtilities/AudibleApiSanitizers.cs b/Source/AudibleUtilities/AudibleApiSanitizers.cs
index 48e602b0..94248c33 100644
--- a/Source/AudibleUtilities/AudibleApiSanitizers.cs
+++ b/Source/AudibleUtilities/AudibleApiSanitizers.cs
@@ -2,7 +2,6 @@
using System.Collections.Generic;
using System.Linq;
-#nullable enable
namespace AudibleUtilities;
public interface ISanitizer
diff --git a/Source/AudibleUtilities/AudibleApiStorage.cs b/Source/AudibleUtilities/AudibleApiStorage.cs
index f0d07005..7e98486a 100644
--- a/Source/AudibleUtilities/AudibleApiStorage.cs
+++ b/Source/AudibleUtilities/AudibleApiStorage.cs
@@ -3,84 +3,83 @@ using System.IO;
using LibationFileManager;
using Newtonsoft.Json;
-namespace AudibleUtilities
+namespace AudibleUtilities;
+
+public class AccountSettingsLoadErrorEventArgs : ErrorEventArgs
{
- public class AccountSettingsLoadErrorEventArgs : ErrorEventArgs
+ ///
+ /// Create a new, empty file if true, otherwise throw
+ ///
+ public bool Handled { get; set; }
+ ///
+ /// The file path of the AccountsSettings.json file
+ ///
+ public string SettingsFilePath { get; }
+
+ public AccountSettingsLoadErrorEventArgs(string path, Exception exception)
+ : base(exception)
{
- ///
- /// Create a new, empty file if true, otherwise throw
- ///
- public bool Handled { get; set; }
- ///
- /// The file path of the AccountsSettings.json file
- ///
- public string SettingsFilePath { get; }
-
- public AccountSettingsLoadErrorEventArgs(string path, Exception exception)
- : base(exception)
- {
- SettingsFilePath = path;
- }
- }
-
- public static class AudibleApiStorage
- {
- public static string AccountsSettingsFile => Path.Combine(Configuration.Instance.LibationFiles.Location, "AccountsSettings.json");
-
- public static event EventHandler LoadError;
-
- public static void EnsureAccountsSettingsFileExists()
- {
- // saves. BEWARE: this will overwrite an existing file
- if (!File.Exists(AccountsSettingsFile))
- {
- //Save the JSON file manually so that AccountsSettingsPersister.Saving and AccountsSettingsPersister.Saved
- //are not fired. There's no need to fire those events on an empty AccountsSettings file.
- var accountSerializerSettings = AudibleApi.Authorization.Identity.GetJsonSerializerSettings();
- File.WriteAllText(AccountsSettingsFile, JsonConvert.SerializeObject(new AccountsSettings(), Formatting.Indented, accountSerializerSettings));
- }
- }
-
- /// If you use this, be a good citizen and DISPOSE of it
- public static AccountsSettingsPersister GetAccountsSettingsPersister()
- {
- try
- {
- return new AccountsSettingsPersister(AccountsSettingsFile);
- }
- catch (Exception ex)
- {
- var args = new AccountSettingsLoadErrorEventArgs(AccountsSettingsFile, ex);
- LoadError?.Invoke(null, args);
- if (args.Handled)
- return GetAccountsSettingsPersister();
- throw;
- }
- }
-
- public static string GetIdentityTokensJsonPath(this Account account)
- => GetIdentityTokensJsonPath(account.AccountId, account.Locale?.Name);
- public static string GetIdentityTokensJsonPath(string username, string localeName)
- {
- var usernameSanitized = trimSurroundingQuotes(JsonConvert.ToString(username));
- var localeNameSanitized = trimSurroundingQuotes(JsonConvert.ToString(localeName));
-
- return $"$.Accounts[?(@.AccountId == '{usernameSanitized}' && @.IdentityTokens.LocaleName == '{localeNameSanitized}')].IdentityTokens";
- }
- private static string trimSurroundingQuotes(string str)
- {
- // SubString algo is better than .Trim("\"")
- // orig string "
- // json string "\""
- // Eg:
- // => str.Trim("\"")
- // output \
- // vs
- // => str.Substring(1, str.Length - 2)
- // output \"
- // also works with surrounding single quotes
-
- return str.Substring(1, str.Length - 2);
- }
+ SettingsFilePath = path;
+ }
+}
+
+public static class AudibleApiStorage
+{
+ public static string AccountsSettingsFile => Path.Combine(Configuration.Instance.LibationFiles.Location, "AccountsSettings.json");
+
+ public static event EventHandler? LoadError;
+
+ public static void EnsureAccountsSettingsFileExists()
+ {
+ // saves. BEWARE: this will overwrite an existing file
+ if (!File.Exists(AccountsSettingsFile))
+ {
+ //Save the JSON file manually so that AccountsSettingsPersister.Saving and AccountsSettingsPersister.Saved
+ //are not fired. There's no need to fire those events on an empty AccountsSettings file.
+ var accountSerializerSettings = AudibleApi.Authorization.Identity.GetJsonSerializerSettings();
+ File.WriteAllText(AccountsSettingsFile, JsonConvert.SerializeObject(new AccountsSettings(), Formatting.Indented, accountSerializerSettings));
+ }
+ }
+
+ /// If you use this, be a good citizen and DISPOSE of it
+ public static AccountsSettingsPersister GetAccountsSettingsPersister()
+ {
+ try
+ {
+ return new AccountsSettingsPersister(AccountsSettingsFile);
+ }
+ catch (Exception ex)
+ {
+ var args = new AccountSettingsLoadErrorEventArgs(AccountsSettingsFile, ex);
+ LoadError?.Invoke(null, args);
+ if (args.Handled)
+ return GetAccountsSettingsPersister();
+ throw;
+ }
+ }
+
+ public static string GetIdentityTokensJsonPath(this Account account)
+ => GetIdentityTokensJsonPath(account.AccountId, account.Locale?.Name);
+ public static string GetIdentityTokensJsonPath(string username, string? localeName)
+ {
+ var usernameSanitized = trimSurroundingQuotes(JsonConvert.ToString(username));
+ var localeNameSanitized = trimSurroundingQuotes(JsonConvert.ToString(localeName));
+
+ return $"$.Accounts[?(@.AccountId == '{usernameSanitized}' && @.IdentityTokens.LocaleName == '{localeNameSanitized}')].IdentityTokens";
+ }
+ private static string trimSurroundingQuotes(string str)
+ {
+ // SubString algo is better than .Trim("\"")
+ // orig string "
+ // json string "\""
+ // Eg:
+ // => str.Trim("\"")
+ // output \
+ // vs
+ // => str.Substring(1, str.Length - 2)
+ // output \"
+ // also works with surrounding single quotes
+
+ return str.Substring(1, str.Length - 2);
}
}
diff --git a/Source/AudibleUtilities/AudibleApiValidators.cs b/Source/AudibleUtilities/AudibleApiValidators.cs
index 495083be..9e8d6e30 100644
--- a/Source/AudibleUtilities/AudibleApiValidators.cs
+++ b/Source/AudibleUtilities/AudibleApiValidators.cs
@@ -3,90 +3,89 @@ using System.Collections.Generic;
using System.Linq;
using AudibleApi.Common;
-namespace AudibleUtilities
+namespace AudibleUtilities;
+
+public interface IValidator
{
- public interface IValidator
+ IEnumerable Validate(IEnumerable
- items);
+
+ public static IValidator[] GetAllValidators() => [
+ new LibraryValidator(),
+ new BookValidator(),
+ new CategoryValidator(),
+ new SeriesValidator(),
+ ];
+}
+
+///
+/// To be used when no validation is desired
+///
+public class ClearValidator : IValidator
+{
+ public IEnumerable Validate(IEnumerable
- items) => [];
+}
+public class LibraryValidator : IValidator
+{
+ public IEnumerable Validate(IEnumerable
- items)
{
- IEnumerable Validate(IEnumerable
- items);
+ var exceptions = new List();
- public static IValidator[] GetAllValidators() => [
- new LibraryValidator(),
- new BookValidator(),
- new CategoryValidator(),
- new SeriesValidator(),
- ];
- }
+ if (items.Any(i => string.IsNullOrWhiteSpace(i.ProductId)))
+ exceptions.Add(new ArgumentException($"Collection contains item(s) with null or blank {nameof(Item.ProductId)}", nameof(items)));
+ //// unfortunately, an actual user has a title with a beginning-of-time 'purchase_date'
+ //if (items.Any(i => i.DateAdded < new DateTime(1980, 1, 1)))
+ // exceptions.Add(new ArgumentException($"Collection contains item(s) with invalid {nameof(Item.DateAdded)}", nameof(items)));
- ///
- /// To be used when no validation is desired
- ///
- public class ClearValidator : IValidator
- {
- public IEnumerable Validate(IEnumerable
- items) => [];
- }
- public class LibraryValidator : IValidator
- {
- public IEnumerable Validate(IEnumerable
- items)
- {
- var exceptions = new List();
-
- if (items.Any(i => string.IsNullOrWhiteSpace(i.ProductId)))
- exceptions.Add(new ArgumentException($"Collection contains item(s) with null or blank {nameof(Item.ProductId)}", nameof(items)));
- //// unfortunately, an actual user has a title with a beginning-of-time 'purchase_date'
- //if (items.Any(i => i.DateAdded < new DateTime(1980, 1, 1)))
- // exceptions.Add(new ArgumentException($"Collection contains item(s) with invalid {nameof(Item.DateAdded)}", nameof(items)));
-
- return exceptions;
- }
- }
- public class BookValidator : IValidator
- {
- public IEnumerable Validate(IEnumerable
- items)
- {
- var exceptions = new List();
-
- // a book having no authors is rare but allowed
-
- if (items.Any(i => string.IsNullOrWhiteSpace(i.ProductId)))
- exceptions.Add(new ArgumentException($"Collection contains item(s) with blank {nameof(Item.ProductId)}", nameof(items)));
-
- // this can happen with podcast episodes
- foreach (var i in items.Where(i => string.IsNullOrWhiteSpace(i.Title)))
- i.Title = "[blank title]";
-
- return exceptions;
- }
- }
- public class CategoryValidator : IValidator
- {
- public IEnumerable Validate(IEnumerable
- items)
- {
- var exceptions = new List();
-
- var distinct = items.GetCategoriesDistinct();
- if (distinct.Any(s => s.CategoryId is null))
- exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Categories)} with null {nameof(Ladder.CategoryId)}", nameof(items)));
- if (distinct.Any(s => s.CategoryName is null))
- exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Categories)} with null {nameof(Ladder.CategoryName)}", nameof(items)));
-
- return exceptions;
- }
- }
- public class SeriesValidator : IValidator
- {
- public IEnumerable Validate(IEnumerable
- items)
- {
- var exceptions = new List();
-
- var distinct = items.GetSeriesDistinct();
- if (distinct.Any(s => s.SeriesId is null))
- exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesId)}", nameof(items)));
-
- //// unfortunately, an actual user has a series with no name
- //if (distinct.Any(s => s.SeriesName is null))
- // exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesName)}", nameof(items)));
-
- return exceptions;
- }
+ return exceptions;
+ }
+}
+public class BookValidator : IValidator
+{
+ public IEnumerable Validate(IEnumerable
- items)
+ {
+ var exceptions = new List();
+
+ // a book having no authors is rare but allowed
+
+ if (items.Any(i => string.IsNullOrWhiteSpace(i.ProductId)))
+ exceptions.Add(new ArgumentException($"Collection contains item(s) with blank {nameof(Item.ProductId)}", nameof(items)));
+
+ // this can happen with podcast episodes
+ foreach (var i in items.Where(i => string.IsNullOrWhiteSpace(i.Title)))
+ i.Title = "[blank title]";
+
+ return exceptions;
+ }
+}
+public class CategoryValidator : IValidator
+{
+ public IEnumerable Validate(IEnumerable
- items)
+ {
+ var exceptions = new List();
+
+ var distinct = items.GetCategoriesDistinct();
+ if (distinct.Any(s => s.CategoryId is null))
+ exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Categories)} with null {nameof(Ladder.CategoryId)}", nameof(items)));
+ if (distinct.Any(s => s.CategoryName is null))
+ exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Categories)} with null {nameof(Ladder.CategoryName)}", nameof(items)));
+
+ return exceptions;
+ }
+}
+public class SeriesValidator : IValidator
+{
+ public IEnumerable Validate(IEnumerable
- items)
+ {
+ var exceptions = new List();
+
+ var distinct = items.GetSeriesDistinct();
+ if (distinct.Any(s => s.SeriesId is null))
+ exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesId)}", nameof(items)));
+
+ //// unfortunately, an actual user has a series with no name
+ //if (distinct.Any(s => s.SeriesName is null))
+ // exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesName)}", nameof(items)));
+
+ return exceptions;
}
}
diff --git a/Source/AudibleUtilities/AudibleUtilities.csproj b/Source/AudibleUtilities/AudibleUtilities.csproj
index 243a0445..62adf3a0 100644
--- a/Source/AudibleUtilities/AudibleUtilities.csproj
+++ b/Source/AudibleUtilities/AudibleUtilities.csproj
@@ -2,11 +2,12 @@
net10.0
+ enable
-
-
+
+
diff --git a/Source/AudibleUtilities/ImportValidationException.cs b/Source/AudibleUtilities/ImportValidationException.cs
index 66db8574..3f70600a 100644
--- a/Source/AudibleUtilities/ImportValidationException.cs
+++ b/Source/AudibleUtilities/ImportValidationException.cs
@@ -2,14 +2,13 @@
using System;
using System.Collections.Generic;
-namespace AudibleUtilities
+namespace AudibleUtilities;
+
+public class ImportValidationException : AggregateException
{
- public class ImportValidationException : AggregateException
+ public List
- Items { get; }
+ public ImportValidationException(List
- items, IEnumerable exceptions) : base(exceptions)
{
- public List
- Items { get; }
- public ImportValidationException(List
- items, IEnumerable exceptions) : base(exceptions)
- {
- Items = items;
- }
+ Items = items;
}
}
diff --git a/Source/AudibleUtilities/Mkb79Auth.cs b/Source/AudibleUtilities/Mkb79Auth.cs
index 99f9aa82..3cbdb696 100644
--- a/Source/AudibleUtilities/Mkb79Auth.cs
+++ b/Source/AudibleUtilities/Mkb79Auth.cs
@@ -9,197 +9,202 @@ using Dinah.Core;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
-namespace AudibleUtilities
+namespace AudibleUtilities;
+
+public partial class Mkb79Auth : IIdentityMaintainer
{
- public partial class Mkb79Auth : IIdentityMaintainer
+ [JsonProperty("website_cookies")]
+ private JObject? _websiteCookies { get; set; }
+
+ [JsonProperty("adp_token")]
+ public string? AdpToken { get; private set; }
+
+ [JsonProperty("access_token")]
+ public string? AccessToken { get; private set; }
+
+ [JsonProperty("refresh_token")]
+ public string? RefreshToken { get; private set; }
+
+ [JsonProperty("device_private_key")]
+ public string? DevicePrivateKey { get; private set; }
+
+ [JsonProperty("store_authentication_cookie")]
+ private JObject? _storeAuthenticationCookie { get; set; }
+
+ [JsonProperty("device_info")]
+ public DeviceInfo? DeviceInfo { get; private set; }
+
+ [JsonProperty("customer_info")]
+ public CustomerInfo? CustomerInfo { get; private set; }
+
+ [JsonProperty("expires")]
+ private double _expires { get; set; }
+
+ [JsonProperty("locale_code")]
+ public string? LocaleCode { get; private set; }
+
+ [JsonProperty("with_username")]
+ public bool WithUsername { get; private set; }
+
+ [JsonProperty("activation_bytes")]
+ public string? ActivationBytes { get; private set; }
+
+ [JsonIgnore]
+ public Dictionary? WebsiteCookies
{
- [JsonProperty("website_cookies")]
- private JObject _websiteCookies { get; set; }
-
- [JsonProperty("adp_token")]
- public string AdpToken { get; private set; }
-
- [JsonProperty("access_token")]
- public string AccessToken { get; private set; }
-
- [JsonProperty("refresh_token")]
- public string RefreshToken { get; private set; }
-
- [JsonProperty("device_private_key")]
- public string DevicePrivateKey { get; private set; }
-
- [JsonProperty("store_authentication_cookie")]
- private JObject _storeAuthenticationCookie { get; set; }
-
- [JsonProperty("device_info")]
- public DeviceInfo DeviceInfo { get; private set; }
-
- [JsonProperty("customer_info")]
- public CustomerInfo CustomerInfo { get; private set; }
-
- [JsonProperty("expires")]
- private double _expires { get; set; }
-
- [JsonProperty("locale_code")]
- public string LocaleCode { get; private set; }
-
- [JsonProperty("with_username")]
- public bool WithUsername { get; private set; }
-
- [JsonProperty("activation_bytes")]
- public string ActivationBytes { get; private set; }
-
- [JsonIgnore]
- public Dictionary WebsiteCookies
- {
- get => _websiteCookies.ToObject>();
- private set => _websiteCookies = JObject.Parse(JsonConvert.SerializeObject(value, Converter.Settings));
- }
-
- [JsonIgnore]
- public string StoreAuthenticationCookie
- {
- get => _storeAuthenticationCookie.ToObject>()["cookie"];
- private set => _storeAuthenticationCookie = JObject.Parse(JsonConvert.SerializeObject(new Dictionary() { { "cookie", value } }, Converter.Settings));
- }
-
- [JsonIgnore]
- public DateTime AccessTokenExpires
- {
- get => DateTimeOffset.FromUnixTimeMilliseconds((long)(_expires * 1000)).DateTime;
- private set => _expires = new DateTimeOffset(value).ToUnixTimeMilliseconds() / 1000d;
- }
-
- [JsonIgnore] public ISystemDateTime SystemDateTime { get; } = new SystemDateTime();
- [JsonIgnore]
- public Locale Locale => Localization.Locales.Where(l => l.WithUsername == WithUsername).Single(l => l.CountryCode == LocaleCode);
- [JsonIgnore] public string DeviceSerialNumber => DeviceInfo.DeviceSerialNumber;
- [JsonIgnore] public string DeviceType => DeviceInfo.DeviceType;
- [JsonIgnore] public string AmazonAccountId => CustomerInfo.UserId;
-
- public Task GetAccessTokenAsync()
- => Task.FromResult(new AccessToken(AccessToken, AccessTokenExpires));
-
- public Task GetAdpTokenAsync()
- => Task.FromResult(new AdpToken(AdpToken));
-
- public Task GetPrivateKeyAsync()
- => Task.FromResult(new PrivateKey(DevicePrivateKey));
+ get => _websiteCookies?.ToObject>();
+ private set => _websiteCookies = JObject.Parse(JsonConvert.SerializeObject(value, Converter.Settings));
}
- public partial class CustomerInfo
+ [JsonIgnore]
+ public string? StoreAuthenticationCookie
{
- [JsonProperty("account_pool")]
- public string AccountPool { get; set; }
-
- [JsonProperty("user_id")]
- public string UserId { get; set; }
-
- [JsonProperty("home_region")]
- public string HomeRegion { get; set; }
-
- [JsonProperty("name")]
- public string Name { get; set; }
-
- [JsonProperty("given_name")]
- public string GivenName { get; set; }
+ get => _storeAuthenticationCookie?.ToObject>()?["cookie"];
+ private set => _storeAuthenticationCookie = JObject.Parse(JsonConvert.SerializeObject(new Dictionary() { { "cookie", value ?? "" } }, Converter.Settings));
}
- public partial class DeviceInfo
+ [JsonIgnore]
+ public DateTime AccessTokenExpires
{
- [JsonProperty("device_name")]
- public string DeviceName { get; set; }
-
- [JsonProperty("device_serial_number")]
- public string DeviceSerialNumber { get; set; }
-
- [JsonProperty("device_type")]
- public string DeviceType { get; set; }
+ get => DateTimeOffset.FromUnixTimeMilliseconds((long)(_expires * 1000)).DateTime;
+ private set => _expires = new DateTimeOffset(value).ToUnixTimeMilliseconds() / 1000d;
}
- public partial class Mkb79Auth
- {
- public static Mkb79Auth FromJson(string json)
- => JsonConvert.DeserializeObject(json, Converter.Settings);
+ [JsonIgnore] public ISystemDateTime SystemDateTime { get; } = new SystemDateTime();
+ [JsonIgnore]
+ public Locale Locale => Localization.Locales.Where(l => l.WithUsername == WithUsername).Single(l => l.CountryCode == LocaleCode);
+ [JsonIgnore] public string? DeviceSerialNumber => DeviceInfo?.DeviceSerialNumber;
+ [JsonIgnore] public string? DeviceType => DeviceInfo?.DeviceType;
+ [JsonIgnore] public string? AmazonAccountId => CustomerInfo?.UserId;
- public string ToJson()
- => JObject.Parse(JsonConvert.SerializeObject(this, Converter.Settings)).ToString(Formatting.Indented);
+ public Task GetAccessTokenAsync()
+ => AccessToken is null ? Task.FromResult((AccessToken?)null) : Task.FromResult((AccessToken?)new AccessToken(AccessToken, AccessTokenExpires));
- public async Task ToAccountAsync()
- {
- var refreshToken = new RefreshToken(RefreshToken);
+ public Task GetAdpTokenAsync()
+ => AdpToken is null ? Task.FromResult((AdpToken?)null) : Task.FromResult((AdpToken?)new AdpToken(AdpToken));
- var authorize = new Authorize(Locale);
- var newToken = await authorize.RefreshAccessTokenAsync(refreshToken);
- AccessToken = newToken.TokenValue;
- AccessTokenExpires = newToken.Expires;
-
- var api = new Api(this);
- var email = await api.GetEmailAsync();
- var account = new Account(email)
- {
- DecryptKey = ActivationBytes,
- AccountName = $"{email} - {Locale.Name}",
- IdentityTokens = new Identity(Locale)
- };
-
- account.IdentityTokens.Update(
- await GetPrivateKeyAsync(),
- await GetAdpTokenAsync(),
- await GetAccessTokenAsync(),
- refreshToken,
- WebsiteCookies.Select(c => new KeyValuePair(c.Key, c.Value)),
- DeviceSerialNumber,
- DeviceType,
- AmazonAccountId,
- DeviceInfo.DeviceName,
- StoreAuthenticationCookie);
-
- return account;
- }
-
- public static Mkb79Auth FromAccount(Account account)
- => new()
- {
- AccessToken = account.IdentityTokens.ExistingAccessToken.TokenValue,
- ActivationBytes = string.IsNullOrEmpty(account.DecryptKey) ? null : account.DecryptKey,
- AdpToken = account.IdentityTokens.AdpToken.Value,
- CustomerInfo = new CustomerInfo
- {
- AccountPool = "Amazon",
- GivenName = string.Empty,
- HomeRegion = "NA",
- Name = string.Empty,
- UserId = account.IdentityTokens.AmazonAccountId
- },
- DeviceInfo = new DeviceInfo
- {
- DeviceName = account.IdentityTokens.DeviceName,
- DeviceSerialNumber = account.IdentityTokens.DeviceSerialNumber,
- DeviceType = account.IdentityTokens.DeviceType,
- },
- DevicePrivateKey = account.IdentityTokens.PrivateKey,
- AccessTokenExpires = account.IdentityTokens.ExistingAccessToken.Expires,
- LocaleCode = account.Locale.CountryCode,
- WithUsername = account.Locale.WithUsername,
- RefreshToken = account.IdentityTokens.RefreshToken.Value,
- StoreAuthenticationCookie = account.IdentityTokens.StoreAuthenticationCookie,
- WebsiteCookies = new(account.IdentityTokens.Cookies),
- };
- }
-
- public static class Serialize
- {
- public static string ToJson(this Mkb79Auth self)
- => JObject.Parse(JsonConvert.SerializeObject(self, Converter.Settings)).ToString(Formatting.Indented);
- }
-
- internal static class Converter
- {
- public static readonly JsonSerializerSettings Settings = new JsonSerializerSettings
- {
- MetadataPropertyHandling = MetadataPropertyHandling.Ignore,
- DateParseHandling = DateParseHandling.None,
- };
- }
+ public Task GetPrivateKeyAsync()
+ => DevicePrivateKey is null ? Task.FromResult((PrivateKey?)null) : Task.FromResult((PrivateKey?)new PrivateKey(DevicePrivateKey));
+}
+
+public partial class CustomerInfo
+{
+ [JsonProperty("account_pool")]
+ public string? AccountPool { get; set; }
+
+ [JsonProperty("user_id")]
+ public string? UserId { get; set; }
+
+ [JsonProperty("home_region")]
+ public string? HomeRegion { get; set; }
+
+ [JsonProperty("name")]
+ public string? Name { get; set; }
+
+ [JsonProperty("given_name")]
+ public string? GivenName { get; set; }
+}
+
+public partial class DeviceInfo
+{
+ [JsonProperty("device_name")]
+ public string? DeviceName { get; set; }
+
+ [JsonProperty("device_serial_number")]
+ public string? DeviceSerialNumber { get; set; }
+
+ [JsonProperty("device_type")]
+ public string? DeviceType { get; set; }
+}
+
+public partial class Mkb79Auth
+{
+ public static Mkb79Auth? FromJson(string json)
+ => JsonConvert.DeserializeObject(json, Converter.Settings);
+
+ public string ToJson()
+ => JObject.Parse(JsonConvert.SerializeObject(this, Converter.Settings)).ToString(Formatting.Indented);
+
+ public async Task ToAccountAsync()
+ {
+ if (RefreshToken is null)
+ throw new InvalidOperationException("Cannot create Account from Mkb79Auth without a Refresh Token.");
+ if (await GetAdpTokenAsync() is not { } adpToken)
+ throw new InvalidOperationException("Cannot create Account from Mkb79Auth without an ADP Token.");
+ if (await GetPrivateKeyAsync() is not { } privateKey)
+ throw new InvalidOperationException("Cannot create Account from Mkb79Auth without a Private Key.");
+ var refreshToken = new RefreshToken(RefreshToken);
+
+ var authorize = new Authorize(Locale);
+ var newToken = await authorize.RefreshAccessTokenAsync(refreshToken);
+ AccessToken = newToken.TokenValue;
+ AccessTokenExpires = newToken.Expires;
+
+ var api = new Api(this);
+ var email = await api.GetEmailAsync();
+ var account = new Account(email)
+ {
+ DecryptKey = ActivationBytes,
+ AccountName = $"{email} - {Locale.Name}",
+ IdentityTokens = new Identity(Locale)
+ };
+
+ account.IdentityTokens.Update(
+ privateKey,
+ adpToken,
+ newToken,
+ refreshToken,
+ WebsiteCookies?.Select(c => new KeyValuePair(c.Key, c.Value)),
+ DeviceSerialNumber,
+ DeviceType,
+ AmazonAccountId,
+ DeviceInfo?.DeviceName,
+ StoreAuthenticationCookie);
+
+ return account;
+ }
+
+ public static Mkb79Auth FromAccount(Account account)
+ => new()
+ {
+ AccessToken = account.IdentityTokens?.ExistingAccessToken.TokenValue,
+ ActivationBytes = string.IsNullOrEmpty(account.DecryptKey) ? null : account.DecryptKey,
+ AdpToken = account.IdentityTokens?.AdpToken?.Value,
+ CustomerInfo = new CustomerInfo
+ {
+ AccountPool = "Amazon",
+ GivenName = string.Empty,
+ HomeRegion = "NA",
+ Name = string.Empty,
+ UserId = account.IdentityTokens?.AmazonAccountId
+ },
+ DeviceInfo = new DeviceInfo
+ {
+ DeviceName = account.IdentityTokens?.DeviceName,
+ DeviceSerialNumber = account.IdentityTokens?.DeviceSerialNumber,
+ DeviceType = account.IdentityTokens?.DeviceType,
+ },
+ DevicePrivateKey = account.IdentityTokens?.PrivateKey?.Value,
+ AccessTokenExpires = account.IdentityTokens?.ExistingAccessToken.Expires ?? default,
+ LocaleCode = account.Locale?.CountryCode,
+ WithUsername = account.Locale?.WithUsername ?? false,
+ RefreshToken = account.IdentityTokens?.RefreshToken?.Value,
+ StoreAuthenticationCookie = account.IdentityTokens?.StoreAuthenticationCookie,
+ WebsiteCookies = new(account.IdentityTokens?.Cookies ?? []),
+ };
+}
+
+public static class Serialize
+{
+ public static string ToJson(this Mkb79Auth self)
+ => JObject.Parse(JsonConvert.SerializeObject(self, Converter.Settings)).ToString(Formatting.Indented);
+}
+
+internal static class Converter
+{
+ public static readonly JsonSerializerSettings Settings = new JsonSerializerSettings
+ {
+ MetadataPropertyHandling = MetadataPropertyHandling.Ignore,
+ DateParseHandling = DateParseHandling.None,
+ };
}
diff --git a/Source/AudibleUtilities/Widevine/Cdm.Api.cs b/Source/AudibleUtilities/Widevine/Cdm.Api.cs
index 2ef9769c..0c887a92 100644
--- a/Source/AudibleUtilities/Widevine/Cdm.Api.cs
+++ b/Source/AudibleUtilities/Widevine/Cdm.Api.cs
@@ -23,7 +23,7 @@ public partial class Cdm
using var persister = AudibleApiStorage.GetAccountsSettingsPersister();
//Check if there are any Android accounts. If not, we can't use Widevine.
- if (!persister.Target.Accounts.Any(a => a.IdentityTokens.DeviceType == Resources.DeviceType))
+ if (!persister.Target.Accounts.Any(a => a.IdentityTokens?.DeviceType == Resources.DeviceType))
return null;
if (!string.IsNullOrEmpty(persister.Target.Cdm))
@@ -49,7 +49,7 @@ public partial class Cdm
//try to get a CDM file for any account that's registered as an android device.
//CDMs are not account-specific, so it doesn't matter which account we're successful with.
- foreach (var account in persister.Target.Accounts.Where(a => a.IdentityTokens.DeviceType == Resources.DeviceType))
+ foreach (var account in persister.Target.Accounts.Where(a => a.IdentityTokens?.DeviceType == Resources.DeviceType))
{
try
{
@@ -174,7 +174,13 @@ public partial class Cdm
{
const string ACCOUNT_INFO_PATH = "/1.0/account/information";
+ if (account?.Locale is null)
+ throw new ArgumentException("Account does not have a valid locale.", nameof(account));
+ if (account.IdentityTokens?.AdpToken is null || account.IdentityTokens.PrivateKey is null)
+ throw new ArgumentException("Account does not have valid identity tokens.", nameof(account));
+
var message = new HttpRequestMessage(HttpMethod.Get, ACCOUNT_INFO_PATH);
+
message.SignRequest(
DateTime.UtcNow,
account.IdentityTokens.AdpToken,
diff --git a/Source/AudibleUtilities/Widevine/Cdm.cs b/Source/AudibleUtilities/Widevine/Cdm.cs
index 99eecf3e..631eb4c2 100644
--- a/Source/AudibleUtilities/Widevine/Cdm.cs
+++ b/Source/AudibleUtilities/Widevine/Cdm.cs
@@ -7,7 +7,6 @@ using System.Linq;
using System.Security.Cryptography;
using System.Text;
-#nullable enable
namespace AudibleUtilities.Widevine;
public enum KeyType
@@ -40,7 +39,7 @@ public enum KeyType
public interface ISession : IDisposable
{
- string? GetLicenseChallenge(MpegDash dash);
+ string GetLicenseChallenge(MpegDash dash);
WidevineKey[] ParseLicense(string licenseMessage);
}
@@ -107,10 +106,10 @@ public partial class Cdm
Cdm.Sessions.TryRemove(Id, out var session);
}
- public string? GetLicenseChallenge(MpegDash dash)
+ public string GetLicenseChallenge(MpegDash dash)
{
if (!dash.TryGetPssh(Cdm.WidevineContentProtection, out var pssh))
- return null;
+ throw new InvalidDataException("No Widevine PSSH found in DASH");
var licRequest = new LicenseRequest
{
diff --git a/Source/AudibleUtilities/Widevine/Device.cs b/Source/AudibleUtilities/Widevine/Device.cs
index 9fa911c2..0fed4e8c 100644
--- a/Source/AudibleUtilities/Widevine/Device.cs
+++ b/Source/AudibleUtilities/Widevine/Device.cs
@@ -3,7 +3,6 @@ using System.IO;
using System.Numerics;
using System.Security.Cryptography;
-#nullable enable
namespace AudibleUtilities.Widevine;
internal enum DeviceTypes : byte
diff --git a/Source/AudibleUtilities/Widevine/Extensions.cs b/Source/AudibleUtilities/Widevine/Extensions.cs
index 8b464382..ca40f524 100644
--- a/Source/AudibleUtilities/Widevine/Extensions.cs
+++ b/Source/AudibleUtilities/Widevine/Extensions.cs
@@ -1,6 +1,5 @@
using System;
-#nullable enable
namespace AudibleUtilities.Widevine;
internal static class Extensions
diff --git a/Source/AudibleUtilities/Widevine/MpegDash.cs b/Source/AudibleUtilities/Widevine/MpegDash.cs
index 96bd0c42..4f279d90 100644
--- a/Source/AudibleUtilities/Widevine/MpegDash.cs
+++ b/Source/AudibleUtilities/Widevine/MpegDash.cs
@@ -6,7 +6,6 @@ using System.Xml;
using System.Xml.Linq;
using System.Xml.XPath;
-#nullable enable
namespace AudibleUtilities.Widevine;
public class MpegDash
diff --git a/Source/DataLayer.Postgres/Migrations/20260205171041_MakeDbNullable.Designer.cs b/Source/DataLayer.Postgres/Migrations/20260205171041_MakeDbNullable.Designer.cs
new file mode 100644
index 00000000..5b312299
--- /dev/null
+++ b/Source/DataLayer.Postgres/Migrations/20260205171041_MakeDbNullable.Designer.cs
@@ -0,0 +1,508 @@
+//
+using System;
+using DataLayer;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.EntityFrameworkCore.Infrastructure;
+using Microsoft.EntityFrameworkCore.Migrations;
+using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
+using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
+
+#nullable disable
+
+namespace DataLayer.Postgres.Migrations
+{
+ [DbContext(typeof(LibationContext))]
+ [Migration("20260205171041_MakeDbNullable")]
+ partial class MakeDbNullable
+ {
+ ///
+ protected override void BuildTargetModel(ModelBuilder modelBuilder)
+ {
+#pragma warning disable 612, 618
+ modelBuilder
+ .HasAnnotation("ProductVersion", "10.0.2")
+ .HasAnnotation("Relational:MaxIdentifierLength", 63);
+
+ NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
+
+ modelBuilder.Entity("CategoryCategoryLadder", b =>
+ {
+ b.Property("_categoriesCategoryId")
+ .HasColumnType("integer");
+
+ b.Property("_categoryLaddersCategoryLadderId")
+ .HasColumnType("integer");
+
+ b.HasKey("_categoriesCategoryId", "_categoryLaddersCategoryLadderId");
+
+ b.HasIndex("_categoryLaddersCategoryLadderId");
+
+ b.ToTable("CategoryCategoryLadder");
+ });
+
+ modelBuilder.Entity("DataLayer.Book", b =>
+ {
+ b.Property("BookId")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("BookId"));
+
+ b.Property("AudibleProductId")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("ContentType")
+ .HasColumnType("integer");
+
+ b.Property("DatePublished")
+ .HasColumnType("timestamp without time zone");
+
+ b.Property("Description")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("IsAbridged")
+ .HasColumnType("boolean");
+
+ b.Property("IsSpatial")
+ .HasColumnType("boolean");
+
+ b.Property("Language")
+ .HasColumnType("text");
+
+ b.Property("LengthInMinutes")
+ .HasColumnType("integer");
+
+ b.Property("Locale")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("PictureId")
+ .HasColumnType("text");
+
+ b.Property("PictureLarge")
+ .HasColumnType("text");
+
+ b.Property("Subtitle")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("Title")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.HasKey("BookId");
+
+ b.HasIndex("AudibleProductId");
+
+ b.ToTable("Books");
+ });
+
+ modelBuilder.Entity("DataLayer.BookCategory", b =>
+ {
+ b.Property("BookId")
+ .HasColumnType("integer");
+
+ b.Property("CategoryLadderId")
+ .HasColumnType("integer");
+
+ b.HasKey("BookId", "CategoryLadderId");
+
+ b.HasIndex("BookId");
+
+ b.HasIndex("CategoryLadderId");
+
+ b.ToTable("BookCategory");
+ });
+
+ modelBuilder.Entity("DataLayer.BookContributor", b =>
+ {
+ b.Property("BookId")
+ .HasColumnType("integer");
+
+ b.Property("ContributorId")
+ .HasColumnType("integer");
+
+ b.Property("Role")
+ .HasColumnType("integer");
+
+ b.Property("Order")
+ .HasColumnType("smallint");
+
+ b.HasKey("BookId", "ContributorId", "Role");
+
+ b.HasIndex("BookId");
+
+ b.HasIndex("ContributorId");
+
+ b.ToTable("BookContributor");
+ });
+
+ modelBuilder.Entity("DataLayer.Category", b =>
+ {
+ b.Property("CategoryId")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("CategoryId"));
+
+ b.Property("AudibleCategoryId")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.HasKey("CategoryId");
+
+ b.HasIndex("AudibleCategoryId");
+
+ b.ToTable("Categories");
+ });
+
+ modelBuilder.Entity("DataLayer.CategoryLadder", b =>
+ {
+ b.Property("CategoryLadderId")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("CategoryLadderId"));
+
+ b.HasKey("CategoryLadderId");
+
+ b.ToTable("CategoryLadders");
+ });
+
+ modelBuilder.Entity("DataLayer.Contributor", b =>
+ {
+ b.Property("ContributorId")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("ContributorId"));
+
+ b.Property("AudibleContributorId")
+ .HasColumnType("text");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.HasKey("ContributorId");
+
+ b.HasIndex("Name");
+
+ b.ToTable("Contributors");
+
+ b.HasData(
+ new
+ {
+ ContributorId = -1,
+ Name = ""
+ });
+ });
+
+ modelBuilder.Entity("DataLayer.LibraryBook", b =>
+ {
+ b.Property("BookId")
+ .HasColumnType("integer");
+
+ b.Property("AbsentFromLastScan")
+ .HasColumnType("boolean");
+
+ b.Property("Account")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("DateAdded")
+ .HasColumnType("timestamp without time zone");
+
+ b.Property("IncludedUntil")
+ .HasColumnType("timestamp without time zone");
+
+ b.Property("IsAudiblePlus")
+ .HasColumnType("boolean");
+
+ b.Property("IsDeleted")
+ .HasColumnType("boolean");
+
+ b.HasKey("BookId");
+
+ b.ToTable("LibraryBooks");
+ });
+
+ modelBuilder.Entity("DataLayer.Series", b =>
+ {
+ b.Property("SeriesId")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("SeriesId"));
+
+ b.Property("AudibleSeriesId")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("Name")
+ .HasColumnType("text");
+
+ b.HasKey("SeriesId");
+
+ b.HasIndex("AudibleSeriesId");
+
+ b.ToTable("Series");
+ });
+
+ modelBuilder.Entity("DataLayer.SeriesBook", b =>
+ {
+ b.Property("SeriesId")
+ .HasColumnType("integer");
+
+ b.Property("BookId")
+ .HasColumnType("integer");
+
+ b.Property("Order")
+ .HasColumnType("text");
+
+ b.HasKey("SeriesId", "BookId");
+
+ b.HasIndex("BookId");
+
+ b.HasIndex("SeriesId");
+
+ b.ToTable("SeriesBook");
+ });
+
+ modelBuilder.Entity("CategoryCategoryLadder", b =>
+ {
+ b.HasOne("DataLayer.Category", null)
+ .WithMany()
+ .HasForeignKey("_categoriesCategoryId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.HasOne("DataLayer.CategoryLadder", null)
+ .WithMany()
+ .HasForeignKey("_categoryLaddersCategoryLadderId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+ });
+
+ modelBuilder.Entity("DataLayer.Book", b =>
+ {
+ b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
+ {
+ b1.Property("BookId")
+ .HasColumnType("integer");
+
+ b1.Property("OverallRating")
+ .HasColumnType("real");
+
+ b1.Property("PerformanceRating")
+ .HasColumnType("real");
+
+ b1.Property("StoryRating")
+ .HasColumnType("real");
+
+ b1.HasKey("BookId");
+
+ b1.ToTable("Books");
+
+ b1.WithOwner()
+ .HasForeignKey("BookId");
+ });
+
+ b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
+ {
+ b1.Property("SupplementId")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b1.Property("SupplementId"));
+
+ b1.Property("BookId")
+ .HasColumnType("integer");
+
+ b1.Property("Url")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b1.HasKey("SupplementId");
+
+ b1.HasIndex("BookId");
+
+ b1.ToTable("Supplement");
+
+ b1.WithOwner("Book")
+ .HasForeignKey("BookId");
+
+ b1.Navigation("Book");
+ });
+
+ b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
+ {
+ b1.Property("BookId")
+ .HasColumnType("integer");
+
+ b1.Property("BookStatus")
+ .HasColumnType("integer");
+
+ b1.Property("IsFinished")
+ .HasColumnType("boolean");
+
+ b1.Property("LastDownloaded")
+ .HasColumnType("timestamp without time zone");
+
+ b1.Property("LastDownloadedFileVersion")
+ .HasColumnType("text");
+
+ b1.Property("LastDownloadedFormat")
+ .HasColumnType("bigint");
+
+ b1.Property("LastDownloadedVersion")
+ .HasColumnType("text");
+
+ b1.Property("PdfStatus")
+ .HasColumnType("integer");
+
+ b1.Property("Tags")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b1.HasKey("BookId");
+
+ b1.ToTable("UserDefinedItem", (string)null);
+
+ b1.WithOwner("Book")
+ .HasForeignKey("BookId");
+
+ b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
+ {
+ b2.Property("UserDefinedItemBookId")
+ .HasColumnType("integer");
+
+ b2.Property("OverallRating")
+ .HasColumnType("real");
+
+ b2.Property("PerformanceRating")
+ .HasColumnType("real");
+
+ b2.Property("StoryRating")
+ .HasColumnType("real");
+
+ b2.HasKey("UserDefinedItemBookId");
+
+ b2.ToTable("UserDefinedItem");
+
+ b2.WithOwner()
+ .HasForeignKey("UserDefinedItemBookId");
+ });
+
+ b1.Navigation("Book");
+
+ b1.Navigation("Rating")
+ .IsRequired();
+ });
+
+ b.Navigation("Rating")
+ .IsRequired();
+
+ b.Navigation("Supplements");
+
+ b.Navigation("UserDefinedItem")
+ .IsRequired();
+ });
+
+ modelBuilder.Entity("DataLayer.BookCategory", b =>
+ {
+ b.HasOne("DataLayer.Book", "Book")
+ .WithMany("CategoriesLink")
+ .HasForeignKey("BookId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.HasOne("DataLayer.CategoryLadder", "CategoryLadder")
+ .WithMany("BooksLink")
+ .HasForeignKey("CategoryLadderId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.Navigation("Book");
+
+ b.Navigation("CategoryLadder");
+ });
+
+ modelBuilder.Entity("DataLayer.BookContributor", b =>
+ {
+ b.HasOne("DataLayer.Book", "Book")
+ .WithMany("ContributorsLink")
+ .HasForeignKey("BookId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.HasOne("DataLayer.Contributor", "Contributor")
+ .WithMany("BooksLink")
+ .HasForeignKey("ContributorId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.Navigation("Book");
+
+ b.Navigation("Contributor");
+ });
+
+ modelBuilder.Entity("DataLayer.LibraryBook", b =>
+ {
+ b.HasOne("DataLayer.Book", "Book")
+ .WithOne()
+ .HasForeignKey("DataLayer.LibraryBook", "BookId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.Navigation("Book");
+ });
+
+ modelBuilder.Entity("DataLayer.SeriesBook", b =>
+ {
+ b.HasOne("DataLayer.Book", "Book")
+ .WithMany("SeriesLink")
+ .HasForeignKey("BookId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.HasOne("DataLayer.Series", "Series")
+ .WithMany("BooksLink")
+ .HasForeignKey("SeriesId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.Navigation("Book");
+
+ b.Navigation("Series");
+ });
+
+ modelBuilder.Entity("DataLayer.Book", b =>
+ {
+ b.Navigation("CategoriesLink");
+
+ b.Navigation("ContributorsLink");
+
+ b.Navigation("SeriesLink");
+ });
+
+ modelBuilder.Entity("DataLayer.CategoryLadder", b =>
+ {
+ b.Navigation("BooksLink");
+ });
+
+ modelBuilder.Entity("DataLayer.Contributor", b =>
+ {
+ b.Navigation("BooksLink");
+ });
+
+ modelBuilder.Entity("DataLayer.Series", b =>
+ {
+ b.Navigation("BooksLink");
+ });
+#pragma warning restore 612, 618
+ }
+ }
+}
diff --git a/Source/DataLayer.Postgres/Migrations/20260205171041_MakeDbNullable.cs b/Source/DataLayer.Postgres/Migrations/20260205171041_MakeDbNullable.cs
new file mode 100644
index 00000000..d6201385
--- /dev/null
+++ b/Source/DataLayer.Postgres/Migrations/20260205171041_MakeDbNullable.cs
@@ -0,0 +1,262 @@
+using Microsoft.EntityFrameworkCore.Migrations;
+
+#nullable disable
+
+namespace DataLayer.Postgres.Migrations
+{
+ ///
+ public partial class MakeDbNullable : Migration
+ {
+ ///
+ protected override void Up(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.DropColumn(
+ name: "Name",
+ table: "Categories");
+
+ migrationBuilder.AlterColumn(
+ name: "Url",
+ table: "Supplement",
+ type: "text",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "text",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "AudibleSeriesId",
+ table: "Series",
+ type: "text",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "text",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "Account",
+ table: "LibraryBooks",
+ type: "text",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "text",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "Name",
+ table: "Contributors",
+ type: "text",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "text",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "AudibleCategoryId",
+ table: "Categories",
+ type: "text",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "text",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "Title",
+ table: "Books",
+ type: "text",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "text",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "Subtitle",
+ table: "Books",
+ type: "text",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "text",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "Rating_StoryRating",
+ table: "Books",
+ type: "real",
+ nullable: false,
+ defaultValue: 0f,
+ oldClrType: typeof(float),
+ oldType: "real",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "Rating_PerformanceRating",
+ table: "Books",
+ type: "real",
+ nullable: false,
+ defaultValue: 0f,
+ oldClrType: typeof(float),
+ oldType: "real",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "Rating_OverallRating",
+ table: "Books",
+ type: "real",
+ nullable: false,
+ defaultValue: 0f,
+ oldClrType: typeof(float),
+ oldType: "real",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "Locale",
+ table: "Books",
+ type: "text",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "text",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "Description",
+ table: "Books",
+ type: "text",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "text",
+ oldNullable: true);
+
+ migrationBuilder.AlterColumn(
+ name: "AudibleProductId",
+ table: "Books",
+ type: "text",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "text",
+ oldNullable: true);
+ }
+
+ ///
+ protected override void Down(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.AlterColumn(
+ name: "Url",
+ table: "Supplement",
+ type: "text",
+ nullable: true,
+ oldClrType: typeof(string),
+ oldType: "text");
+
+ migrationBuilder.AlterColumn(
+ name: "AudibleSeriesId",
+ table: "Series",
+ type: "text",
+ nullable: true,
+ oldClrType: typeof(string),
+ oldType: "text");
+
+ migrationBuilder.AlterColumn(
+ name: "Account",
+ table: "LibraryBooks",
+ type: "text",
+ nullable: true,
+ oldClrType: typeof(string),
+ oldType: "text");
+
+ migrationBuilder.AlterColumn(
+ name: "Name",
+ table: "Contributors",
+ type: "text",
+ nullable: true,
+ oldClrType: typeof(string),
+ oldType: "text");
+
+ migrationBuilder.AlterColumn(
+ name: "AudibleCategoryId",
+ table: "Categories",
+ type: "text",
+ nullable: true,
+ oldClrType: typeof(string),
+ oldType: "text");
+
+ migrationBuilder.AddColumn(
+ name: "Name",
+ table: "Categories",
+ type: "text",
+ nullable: true);
+
+ migrationBuilder.AlterColumn