Merge pull request #531 from Mbucari/master

Bug fixes and performance improvements
This commit is contained in:
rmcrackan 2023-03-14 07:53:25 -04:00 committed by GitHub
commit eb61ba3d69
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 176 additions and 264 deletions

View File

@ -213,12 +213,13 @@ namespace ApplicationServices
if (archiver is not null)
{
var fileName = $"{DateTime.Now:u} {account.MaskedLogEntry}.json";
var items = await Task.Run(() => JArray.FromObject(dtoItems.Select(i => i.SourceJson)));
var scanFile = new JObject
{
{ "Account", account.MaskedLogEntry },
{ "ScannedDateTime", DateTime.Now.ToString("u") },
{ "Items", await Task.Run(() => JArray.FromObject(dtoItems)) }
{ "Items", items}
};
await archiver.AddFileAsync(fileName, scanFile);

View File

@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Threading.Channels;
using System.Threading.Tasks;
@ -8,10 +7,9 @@ using System.Diagnostics;
using AudibleApi;
using AudibleApi.Common;
using Dinah.Core;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using Polly;
using Polly.Retry;
using System.Threading;
namespace AudibleUtilities
{
@ -91,49 +89,73 @@ namespace AudibleUtilities
{
Serilog.Log.Logger.Debug("Beginning library scan.");
int count = 0;
List<Item> items = new();
List<Item> seriesItems = new();
var sw = Stopwatch.StartNew();
var totalTime = TimeSpan.Zero;
using var semaphore = new SemaphoreSlim(MaxConcurrency);
//Scan the library for all added books, and add any episode-type items to seriesItems to be scanned for episodes/parents
await foreach (var item in Api.GetLibraryItemAsyncEnumerable(libraryOptions, BatchSize, MaxConcurrency))
var episodeChannel = Channel.CreateUnbounded<string>(new UnboundedChannelOptions { SingleReader = true, SingleWriter = true });
var batchReaderTask = readAllAsinsAsync(episodeChannel.Reader, semaphore);
//Scan the library for all added books.
//Get relationship asins from episode-type items and write them to episodeChannel where they will be batched and queried.
await foreach (var item in Api.GetLibraryItemsPagesAsync(libraryOptions, BatchSize, semaphore))
{
if ((item.IsEpisodes || item.IsSeriesParent) && importEpisodes)
seriesItems.Add(item);
else if (!item.IsEpisodes && !item.IsSeriesParent)
items.Add(item);
if (importEpisodes)
{
var episodes = item.Where(i => i.IsEpisodes).ToList();
var series = item.Where(i => i.IsSeriesParent).ToList();
count++;
var parentAsins = episodes
.SelectMany(i => i.Relationships)
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Parent)
.Select(r => r.Asin);
var episodeAsins = series
.SelectMany(i => i.Relationships)
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Child && r.RelationshipType == RelationshipType.Episode)
.Select(r => r.Asin);
foreach (var asin in parentAsins.Concat(episodeAsins))
episodeChannel.Writer.TryWrite(asin);
items.AddRange(episodes);
items.AddRange(series);
}
items.AddRange(item.Where(i => !i.IsSeriesParent && !i.IsEpisodes));
}
Serilog.Log.Logger.Debug("Library scan complete. Found {count} books and series. Waiting on series episode scans to complete.", count);
Serilog.Log.Logger.Debug("Beginning episode scan.");
count = 0;
//'get' Tasks are activated when they are written to the channel. To avoid more concurrency than is desired, the
//channel is bounded with a capacity of 1. Channel write operations are blocked until the current item is read
var episodeChannel = Channel.CreateBounded<Task<List<Item>>>(new BoundedChannelOptions(1) { SingleReader = true });
//Start scanning for all episodes. Episode batch 'get' Tasks are written to the channel.
var scanAllSeriesTask = scanAllSeries(seriesItems, episodeChannel.Writer);
//Read all episodes from the channel and add them to the import items.
//This method blocks until episodeChannel.Writer is closed by scanAllSeries()
await foreach (var ep in getAllEpisodesAsync(episodeChannel.Reader))
{
items.AddRange(ep);
count += ep.Count;
}
//Be sure to await the scanAllSeries Task so that any exceptions are thrown
await scanAllSeriesTask;
sw.Stop();
Serilog.Log.Logger.Debug("Episode scan complete. Found {count} episodes and series.", count);
Serilog.Log.Logger.Debug($"Completed library scan in {sw.Elapsed.TotalMilliseconds:F0} ms.");
totalTime += sw.Elapsed;
Serilog.Log.Logger.Debug("Library scan complete after {elappsed_ms} ms. Found {count} books and series. Waiting on series episode scans to complete.", sw.ElapsedMilliseconds, items.Count);
sw.Restart();
//Signal that we're done adding asins
episodeChannel.Writer.Complete();
//Wait for all episodes/parents to be retrived
var allEps = await batchReaderTask;
sw.Stop();
totalTime += sw.Elapsed;
Serilog.Log.Logger.Debug("Episode scan complete after {elappsed_ms} ms. Found {count} episodes and series .", sw.ElapsedMilliseconds, allEps.Count);
sw.Restart();
Serilog.Log.Logger.Debug("Begin indexing series episodes");
items.AddRange(allEps);
//Set the Item.Series info for episodes and parents.
foreach (var parent in items.Where(i => i.IsSeriesParent))
{
var children = items.Where(i => i.IsEpisodes && i.Relationships.Any(r => r.Asin == parent.Asin));
setSeries(parent, children);
}
sw.Stop();
totalTime += sw.Elapsed;
Serilog.Log.Logger.Information("Completed indexing series episodes after {elappsed_ms} ms.", sw.ElapsedMilliseconds);
Serilog.Log.Logger.Information($"Completed library scan in {totalTime.TotalMilliseconds:F0} ms.");
var validators = new List<IValidator>();
validators.AddRange(getValidators());
@ -159,146 +181,55 @@ namespace AudibleUtilities
#region episodes and podcasts
/// <summary>
/// Read get tasks from the <paramref name="channel"/> and await results. This method maintains
/// a list of up to <see cref="MaxConcurrency"/> get tasks. When any of the get tasks completes,
/// the Items are yielded, that task is removed from the list, and a new get task is read from
/// the channel.
/// Read asins from the channel and request catalog item info in batches of <see cref="BatchSize"/>. Blocks until <paramref name="channelReader"/> is closed.
/// </summary>
private async IAsyncEnumerable<List<Item>> getAllEpisodesAsync(ChannelReader<Task<List<Item>>> channel)
/// <param name="channelReader">Input asins to batch</param>
/// <param name="semaphore">Shared semaphore to limit concurrency</param>
/// <returns>All <see cref="Item"/>s of asins written to the channel.</returns>
private async Task<List<Item>> readAllAsinsAsync(ChannelReader<string> channelReader, SemaphoreSlim semaphore)
{
List<Task<List<Item>>> concurentGets = new();
int batchNum = 1;
List<Task<List<Item>>> getTasks = new();
for (int i = 0; i < MaxConcurrency && await channel.WaitToReadAsync(); i++)
concurentGets.Add(await channel.ReadAsync());
while (concurentGets.Count > 0)
while (await channelReader.WaitToReadAsync())
{
var completed = await Task.WhenAny(concurentGets);
concurentGets.Remove(completed);
List<string> asins = new();
if (await channel.WaitToReadAsync())
concurentGets.Add(await channel.ReadAsync());
yield return completed.Result;
}
}
/// <summary>
/// Gets all child episodes and episode parents belonging to <paramref name="seriesItems"/> in batches and
/// writes the get tasks to <paramref name="channel"/>.
/// </summary>
private async Task scanAllSeries(IEnumerable<Item> seriesItems, ChannelWriter<Task<List<Item>>> channel)
{
try
{
List<Task> episodeScanTasks = new();
foreach (var item in seriesItems)
while (asins.Count < BatchSize && await channelReader.WaitToReadAsync())
{
if (item.IsEpisodes)
await channel.WriteAsync(getEpisodeParentAsync(item));
else if (item.IsSeriesParent)
episodeScanTasks.Add(getParentEpisodesAsync(item, channel));
var asin = await channelReader.ReadAsync();
if (!asins.Contains(asin))
asins.Add(asin);
}
//episodeScanTasks complete only after all episode batch 'gets' have been written to the channel
await Task.WhenAll(episodeScanTasks);
}
finally { channel.Complete(); }
}
private async Task<List<Item>> getEpisodeParentAsync(Item episode)
{
//Item is a single episode that was added to the library.
//Get the episode's parent and add it to the database.
Serilog.Log.Logger.Debug("Supplied Parent is an episode. Beginning parent scan for {parent}", episode);
List<Item> children = new() { episode };
var parentAsins = episode.Relationships
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Parent)
.Select(p => p.Asin);
var seriesParents = await Api.GetCatalogProductsAsync(parentAsins, CatalogOptions.ResponseGroupOptions.ALL_OPTIONS);
int numSeriesParents = seriesParents.Count(p => p.IsSeriesParent);
if (numSeriesParents != 1)
{
//There should only ever be 1 top-level parent per episode. If not, log
//so we can figure out what to do about those special cases, and don't
//import the episode.
JsonSerializerSettings Settings = new()
{
MetadataPropertyHandling = MetadataPropertyHandling.Ignore,
DateParseHandling = DateParseHandling.None,
Converters = {
new IsoDateTimeConverter { DateTimeStyles = DateTimeStyles.AssumeUniversal }
}
};
Serilog.Log.Logger.Error($"Found {numSeriesParents} parents for {episode.Asin}\r\nEpisode Product:\r\n{JsonConvert.SerializeObject(episode, Formatting.None, Settings)}");
return new();
await semaphore.WaitAsync();
getTasks.Add(getProductsAsync(batchNum++, asins, semaphore));
}
var parent = seriesParents.Single(p => p.IsSeriesParent);
parent.PurchaseDate = episode.PurchaseDate;
setSeries(parent, children);
children.Add(parent);
Serilog.Log.Logger.Debug("Completed parent scan for {episode}", episode);
return children;
var completed = await Task.WhenAll(getTasks);
//We only want Series parents and Series episodes. Explude other relationship types (e.g. 'season')
return completed.SelectMany(l => l).Where(i => i.IsSeriesParent || i.IsEpisodes).ToList();
}
/// <summary>
/// Gets all episodes belonging to <paramref name="parent"/> in batches of <see cref="BatchSize"/> and writes the batch get tasks to <paramref name="channel"/>
/// This method only completes after all episode batch 'gets' have been written to the channel
/// </summary>
private async Task getParentEpisodesAsync(Item parent, ChannelWriter<Task<List<Item>>> channel)
{
Serilog.Log.Logger.Debug("Beginning episode scan for {parent}", parent);
var episodeIds = parent.Relationships
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Child && r.RelationshipType == RelationshipType.Episode)
.Select(r => r.Asin);
for (int batchNum = 0; episodeIds.Any(); batchNum++)
{
var batch = episodeIds.Take(BatchSize);
await channel.WriteAsync(getEpisodeBatchAsync(batchNum, parent, batch));
episodeIds = episodeIds.Skip(BatchSize);
}
}
private async Task<List<Item>> getEpisodeBatchAsync(int batchNum, Item parent, IEnumerable<string> childrenIds)
private async Task<List<Item>> getProductsAsync(int batchNum, List<string> asins, SemaphoreSlim semaphore)
{
Serilog.Log.Logger.Debug($"Batch {batchNum} Begin: Fetching {asins.Count} asins");
try
{
List<Item> episodeBatch = await Api.GetCatalogProductsAsync(childrenIds, CatalogOptions.ResponseGroupOptions.ALL_OPTIONS);
var sw = Stopwatch.StartNew();
var items = await Api.GetCatalogProductsAsync(asins, CatalogOptions.ResponseGroupOptions.ALL_OPTIONS);
sw.Stop();
setSeries(parent, episodeBatch);
Serilog.Log.Logger.Debug($"Batch {batchNum} End: Retrieved {items.Count} items in {sw.ElapsedMilliseconds} ms");
if (batchNum == 0)
episodeBatch.Add(parent);
Serilog.Log.Logger.Debug($"Batch {batchNum}: {episodeBatch.Count} results\t({{parent}})", parent);
return episodeBatch;
return items;
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error fetching batch of episodes. {@DebugInfo}", new
{
ParentId = parent.Asin,
ParentTitle = parent.Title,
BatchNumber = batchNum,
ChildIdBatch = childrenIds
});
Serilog.Log.Logger.Error(ex, "Error fetching batch of episodes. {@DebugInfo}", new { asins });
throw;
}
finally { semaphore.Release(); }
}
private static void setSeries(Item parent, IEnumerable<Item> children)
@ -314,6 +245,9 @@ namespace AudibleUtilities
}
};
if (parent.PurchaseDate == default)
parent.PurchaseDate = children.Select(c => c.PurchaseDate).Order().First();
foreach (var child in children)
{
// use parent's 'DateAdded'. DateAdded is just a convenience prop for: PurchaseDate.UtcDateTime
@ -333,4 +267,4 @@ namespace AudibleUtilities
}
#endregion
}
}
}

View File

@ -91,29 +91,8 @@ namespace DtoImporterService
return qtyNew;
}
/*
* Subscription Plan Names:
*
* US: "SpecialBenefit"
* IT: "Rodizio"
*
* Audible Plus Plan Names:
*
* US: "US Minerva"
* IT: "Audible-AYCL"
*
*/
//This SEEMS to work to detect plus titles which are no longer available.
//I have my doubts it won't yield false negatives, but I have more
//confidence that it won't yield many/any false positives.
private static bool isPlusTitleUnavailable(ImportItem item)
=> item.DtoItem.IsAyce is true
&& item.DtoItem.Plans?.Any(p =>
p.PlanName.ContainsInsensitive("Minerva") ||
p.PlanName.ContainsInsensitive("AYCL") ||
p.PlanName.ContainsInsensitive("Free")
) is not true;
&& item.DtoItem.Plans?.Any(p => p.IsAyce) is not true;
}
}

View File

@ -1,6 +1,8 @@
using Avalonia.Controls;
using Avalonia.Media;
using Avalonia.Media.Imaging;
using LibationAvalonia.Dialogs;
using LibationFileManager;
using System.Threading.Tasks;
namespace LibationAvalonia
@ -20,5 +22,21 @@ namespace LibationAvalonia
=> dialogWindow.ShowDialog<DialogResult>(owner ?? App.MainWindow);
public static Window GetParentWindow(this IControl control) => control.VisualRoot as Window;
private static Bitmap defaultImage;
public static Bitmap TryLoadImageOrDefault(byte[] picture, PictureSize defaultSize = PictureSize.Native)
{
try
{
using var ms = new System.IO.MemoryStream(picture);
return new Bitmap(ms);
}
catch
{
using var ms = new System.IO.MemoryStream(PictureStorage.GetDefaultImage(defaultSize));
return defaultImage ??= new Bitmap(ms);
}
}
}
}

View File

@ -1,5 +1,4 @@
using ApplicationServices;
using Avalonia;
using Avalonia.Controls;
using Avalonia.Markup.Xaml;
using Avalonia.Media.Imaging;
@ -10,7 +9,6 @@ using LibationAvalonia.ViewModels;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System;
namespace LibationAvalonia.Dialogs
{
@ -112,8 +110,7 @@ namespace LibationAvalonia.Dialogs
//init cover image
var picture = PictureStorage.GetPictureSynchronously(new PictureDefinition(libraryBook.Book.PictureId, PictureSize._80x80));
using var ms = new System.IO.MemoryStream(picture);
Cover = new Bitmap(ms);
Cover = AvaloniaUtils.TryLoadImageOrDefault(picture, PictureSize._80x80);
//init book details
DetailsText = @$"

View File

@ -2,7 +2,6 @@ using Avalonia.Markup.Xaml;
using Avalonia.Media.Imaging;
using System;
using System.ComponentModel;
using System.IO;
using ReactiveUI;
using Avalonia.Platform.Storage;
@ -29,17 +28,7 @@ namespace LibationAvalonia.Dialogs
public void SetCoverBytes(byte[] cover)
{
try
{
var ms = new MemoryStream(cover);
_bitmapHolder.CoverImage = new Bitmap(ms);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error loading cover art for {file}", PictureFileName);
using var ms = App.OpenAsset("img-coverart-prod-unavailable_500x500.jpg");
_bitmapHolder.CoverImage = new Bitmap(ms);
}
_bitmapHolder.CoverImage = AvaloniaUtils.TryLoadImageOrDefault(cover);
}
public async void SaveImage_Clicked(object sender, Avalonia.Interactivity.RoutedEventArgs e)

View File

@ -8,28 +8,17 @@ namespace LibationAvalonia.ViewModels
{
public class AvaloniaEntryStatus : EntryStatus, IEntryStatus, IComparable
{
private static Bitmap _defaultImage;
public override IBrush BackgroundBrush => IsEpisode ? App.SeriesEntryGridBackgroundBrush : Brushes.Transparent;
private AvaloniaEntryStatus(LibraryBook libraryBook) : base(libraryBook) { }
public static EntryStatus Create(LibraryBook libraryBook) => new AvaloniaEntryStatus(libraryBook);
protected override Bitmap LoadImage(byte[] picture)
{
try
{
using var ms = new System.IO.MemoryStream(picture);
return new Bitmap(ms);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error loading cover art for {Book}", Book);
return _defaultImage ??= new Bitmap(App.OpenAsset("img-coverart-prod-unavailable_80x80.jpg"));
}
}
=> AvaloniaUtils.TryLoadImageOrDefault(picture, LibationFileManager.PictureSize._80x80);
protected override Bitmap GetResourceImage(string rescName)
{
//These images are assest, so assume they will never corrupt.
using var stream = App.OpenAsset(rescName + ".png");
return new Bitmap(stream);
}

View File

@ -115,16 +115,14 @@ namespace LibationAvalonia.ViewModels
PictureStorage.PictureCached += PictureStorage_PictureCached;
// Mutable property. Set the field so PropertyChanged isn't fired.
using var ms = new System.IO.MemoryStream(picture);
_cover = new Bitmap(ms);
_cover = AvaloniaUtils.TryLoadImageOrDefault(picture, PictureSize._80x80);
}
private void PictureStorage_PictureCached(object sender, PictureCachedEventArgs e)
{
if (e.Definition.PictureId == LibraryBook.Book.PictureId)
{
using var ms = new System.IO.MemoryStream(e.Picture);
Cover = new Bitmap(ms);
Cover = AvaloniaUtils.TryLoadImageOrDefault(e.Picture, PictureSize._80x80);
PictureStorage.PictureCached -= PictureStorage_PictureCached;
}
}

View File

@ -132,7 +132,7 @@ namespace LibationAvalonia.ViewModels
//Add absent entries to grid, or update existing entry
var allEntries = SOURCE.BookEntries().ToList();
var seriesEntries = SOURCE.SeriesEntries().ToList();
var parentedEpisodes = dbBooks.ParentedEpisodes().ToList();
var parentedEpisodes = dbBooks.ParentedEpisodes().ToHashSet();
await Dispatcher.UIThread.InvokeAsync(() =>
{
@ -142,7 +142,7 @@ namespace LibationAvalonia.ViewModels
if (libraryBook.Book.IsProduct())
UpsertBook(libraryBook, existingEntry);
else if (parentedEpisodes.Any(lb => lb == libraryBook))
else if (parentedEpisodes.Contains(libraryBook))
//Only try to add or update is this LibraryBook is a know child of a parent
UpsertEpisode(libraryBook, existingEntry, seriesEntries, dbBooks);
}

View File

@ -67,7 +67,7 @@ namespace LibationFileManager
}
DownloadQueue.Add(def);
return (true, getDefaultImage(def.Size));
return (true, GetDefaultImage(def.Size));
}
}
@ -96,7 +96,7 @@ namespace LibationFileManager
public static void SetDefaultImage(PictureSize pictureSize, byte[] bytes)
=> defaultImages[pictureSize] = bytes;
private static byte[] getDefaultImage(PictureSize size)
public static byte[] GetDefaultImage(PictureSize size)
=> defaultImages.ContainsKey(size)
? defaultImages[size]
: new byte[0];
@ -120,7 +120,7 @@ namespace LibationFileManager
private static byte[] downloadBytes(PictureDefinition def)
{
if (def.PictureId is null)
return getDefaultImage(def.Size);
return GetDefaultImage(def.Size);
try
{
@ -135,7 +135,7 @@ namespace LibationFileManager
}
catch
{
return getDefaultImage(def.Size);
return GetDefaultImage(def.Size);
}
}
}

View File

@ -1,5 +1,4 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Windows.Forms;
using DataLayer;
@ -42,7 +41,7 @@ namespace LibationWinForms.Dialogs
this.Text = Book.Title;
(_, var picture) = PictureStorage.GetPicture(new PictureDefinition(Book.PictureId, PictureSize._80x80));
this.coverPb.Image = Dinah.Core.WindowsDesktop.Drawing.ImageReader.ToImage(picture);
this.coverPb.Image = WinFormsUtil.TryLoadImageOrDefault(picture, PictureSize._80x80);
var t = @$"
Title: {Book.Title}

View File

@ -2,6 +2,7 @@
using System.Linq;
using System.Windows.Forms;
using LibationFileManager;
using Microsoft.EntityFrameworkCore.Query.SqlExpressions;
namespace LibationWinForms
{
@ -44,20 +45,31 @@ namespace LibationWinForms
var rect = new Rectangle(x, y, savedState.Width, savedState.Height);
// is proposed rect on a screen?
if (Screen.AllScreens.Any(screen => screen.WorkingArea.Contains(rect)))
if (savedState.IsMaximized)
{
//When a window is maximized, the client rectangle is not on a screen (y is negative).
form.StartPosition = FormStartPosition.Manual;
form.DesktopBounds = rect;
// FINAL: for Maximized: start normal state, set size and location, THEN set max state
form.WindowState = FormWindowState.Maximized;
}
else
{
form.StartPosition = FormStartPosition.WindowsDefaultLocation;
form.Size = rect.Size;
}
// is proposed rect on a screen?
if (Screen.AllScreens.Any(screen => screen.WorkingArea.Contains(rect)))
{
form.StartPosition = FormStartPosition.Manual;
form.DesktopBounds = rect;
}
else
{
form.StartPosition = FormStartPosition.WindowsDefaultLocation;
form.Size = rect.Size;
}
// FINAL: for Maximized: start normal state, set size and location, THEN set max state
form.WindowState = savedState.IsMaximized ? FormWindowState.Maximized : FormWindowState.Normal;
form.WindowState = FormWindowState.Normal;
}
}
public static void SaveSizeAndLocation(this Form form, Configuration config)

View File

@ -19,15 +19,7 @@ namespace LibationWinForms.GridView
public void SetCoverArt(byte[] cover)
{
try
{
pictureBox1.Image = Dinah.Core.WindowsDesktop.Drawing.ImageReader.ToImage(cover);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error loading cover art for {file}", PictureFileName);
pictureBox1.Image = Properties.Resources.default_cover_500x500;
}
pictureBox1.Image = WinFormsUtil.TryLoadImageOrDefault(cover);
}
#region Make the form's aspect ratio always match the picture's aspect ratio.

View File

@ -265,9 +265,7 @@ namespace LibationWinForms.GridView
var allEntries = bindingList.AllItems().BookEntries();
var seriesEntries = bindingList.AllItems().SeriesEntries().ToList();
var parentedEpisodes = dbBooks.ParentedEpisodes().ToList();
var sw = new Stopwatch();
var parentedEpisodes = dbBooks.ParentedEpisodes().ToHashSet();
foreach (var libraryBook in dbBooks.OrderBy(e => e.DateAdded))
{
@ -278,14 +276,11 @@ namespace LibationWinForms.GridView
AddOrUpdateBook(libraryBook, existingEntry);
continue;
}
sw.Start();
if (parentedEpisodes.Any(lb => lb == libraryBook))
if (parentedEpisodes.Contains(libraryBook))
{
sw.Stop();
//Only try to add or update is this LibraryBook is a know child of a parent
AddOrUpdateEpisode(libraryBook, existingEntry, seriesEntries, dbBooks);
}
sw.Stop();
}
bindingList.SuspendFilteringOnUpdate = false;

View File

@ -1,7 +1,5 @@
using DataLayer;
using Dinah.Core.WindowsDesktop.Drawing;
using LibationUiBase.GridView;
using System;
using System.Drawing;
namespace LibationWinForms.GridView
@ -14,23 +12,12 @@ namespace LibationWinForms.GridView
private WinFormsEntryStatus(LibraryBook libraryBook) : base(libraryBook) { }
public static EntryStatus Create(LibraryBook libraryBook) => new WinFormsEntryStatus(libraryBook);
protected override object LoadImage(byte[] picture)
{
try
{
return ImageReader.ToImage(picture);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error loading cover art for {Book}", Book);
return Properties.Resources.default_cover_80x80;
}
}
protected override Image LoadImage(byte[] picture)
=> WinFormsUtil.TryLoadImageOrDefault(picture, LibationFileManager.PictureSize._80x80);
protected override Image GetResourceImage(string rescName)
{
var image = Properties.Resources.ResourceManager.GetObject(rescName);
return image as Bitmap;
}
}

View File

@ -12,7 +12,6 @@ using AudibleApi;
using DataLayer;
using Dinah.Core;
using Dinah.Core.ErrorHandling;
using Dinah.Core.WindowsDesktop.Drawing;
using FileLiberator;
using LibationFileManager;
using LibationUiBase;
@ -87,7 +86,7 @@ namespace LibationWinForms.ProcessQueue
if (isDefault)
PictureStorage.PictureCached += PictureStorage_PictureCached;
_cover = ImageReader.ToImage(picture);
_cover = WinFormsUtil.TryLoadImageOrDefault(picture, PictureSize._80x80); ;
}
@ -95,7 +94,7 @@ namespace LibationWinForms.ProcessQueue
{
if (e.Definition.PictureId == LibraryBook.Book.PictureId)
{
Cover = ImageReader.ToImage(e.Picture);
Cover = WinFormsUtil.TryLoadImageOrDefault(e.Picture, PictureSize._80x80);
PictureStorage.PictureCached -= PictureStorage_PictureCached;
}
}
@ -260,7 +259,7 @@ namespace LibationWinForms.ProcessQueue
private void AudioDecodable_CoverImageDiscovered(object sender, byte[] coverArt)
{
Cover = ImageReader.ToImage(coverArt);
Cover = WinFormsUtil.TryLoadImageOrDefault(coverArt, PictureSize._80x80);
}
#endregion

View File

@ -0,0 +1,23 @@
using Dinah.Core.WindowsDesktop.Drawing;
using LibationFileManager;
using System.Drawing;
namespace LibationWinForms
{
internal static class WinFormsUtil
{
private static Bitmap defaultImage;
public static Image TryLoadImageOrDefault(byte[] picture, PictureSize defaultSize = PictureSize.Native)
{
try
{
return ImageReader.ToImage(picture);
}
catch
{
using var ms = new System.IO.MemoryStream(PictureStorage.GetDefaultImage(defaultSize));
return defaultImage ??= new Bitmap(ms);
}
}
}
}