1
0
mirror of https://github.com/ppy/osu.git synced 2024-11-18 16:13:21 +08:00
osu-lazer/osu.Game/Database/ArchiveModelManager.cs

765 lines
30 KiB
C#
Raw Normal View History

2019-06-20 00:33:51 +08:00
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
2018-04-13 17:19:50 +08:00
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Humanizer;
using JetBrains.Annotations;
2018-04-13 17:19:50 +08:00
using Microsoft.EntityFrameworkCore;
using osu.Framework;
2020-05-19 15:44:22 +08:00
using osu.Framework.Bindables;
using osu.Framework.Extensions;
using osu.Framework.Extensions.IEnumerableExtensions;
2018-04-13 17:19:50 +08:00
using osu.Framework.Logging;
using osu.Framework.Platform;
using osu.Framework.Threading;
2018-04-13 17:19:50 +08:00
using osu.Game.IO;
using osu.Game.IO.Archives;
using osu.Game.IPC;
using osu.Game.Overlays.Notifications;
using osu.Game.Utils;
2020-05-24 12:44:11 +08:00
using SharpCompress.Archives.Zip;
2018-04-13 17:19:50 +08:00
using SharpCompress.Common;
using FileInfo = osu.Game.IO.FileInfo;
namespace osu.Game.Database
{
/// <summary>
/// Encapsulates a model store class to give it import functionality.
/// Adds cross-functionality with <see cref="FileStore"/> to give access to the central file store for the provided model.
/// </summary>
/// <typeparam name="TModel">The model type.</typeparam>
/// <typeparam name="TFileModel">The associated file join type.</typeparam>
public abstract class ArchiveModelManager<TModel, TFileModel> : ICanAcceptFiles, IModelManager<TModel>
2018-04-13 17:19:50 +08:00
where TModel : class, IHasFiles<TFileModel>, IHasPrimaryKey, ISoftDelete
2020-01-14 17:43:06 +08:00
where TFileModel : class, INamedFileInfo, new()
2018-04-13 17:19:50 +08:00
{
private const int import_queue_request_concurrency = 1;
/// <summary>
/// A singleton scheduler shared by all <see cref="ArchiveModelManager{TModel,TFileModel}"/>.
/// </summary>
/// <remarks>
/// This scheduler generally performs IO and CPU intensive work so concurrency is limited harshly.
/// It is mainly being used as a queue mechanism for large imports.
/// </remarks>
private static readonly ThreadedTaskScheduler import_scheduler = new ThreadedTaskScheduler(import_queue_request_concurrency, nameof(ArchiveModelManager<TModel, TFileModel>));
2018-04-13 17:19:50 +08:00
/// <summary>
/// Set an endpoint for notifications to be posted to.
/// </summary>
public Action<Notification> PostNotification { protected get; set; }
/// <summary>
/// Fired when a new or updated <typeparamref name="TModel"/> becomes available in the database.
2018-04-13 17:19:50 +08:00
/// This is not guaranteed to run on the update thread.
/// </summary>
public IBindable<WeakReference<TModel>> ItemUpdated => itemUpdated;
2020-05-19 15:44:22 +08:00
private readonly Bindable<WeakReference<TModel>> itemUpdated = new Bindable<WeakReference<TModel>>();
2018-04-13 17:19:50 +08:00
/// <summary>
2019-11-17 20:48:23 +08:00
/// Fired when a <typeparamref name="TModel"/> is removed from the database.
2018-04-13 17:19:50 +08:00
/// This is not guaranteed to run on the update thread.
/// </summary>
2020-05-19 15:44:22 +08:00
public IBindable<WeakReference<TModel>> ItemRemoved => itemRemoved;
private readonly Bindable<WeakReference<TModel>> itemRemoved = new Bindable<WeakReference<TModel>>();
2018-04-13 17:19:50 +08:00
public virtual string[] HandledExtensions => new[] { ".zip" };
public virtual bool SupportsImportFromStable => RuntimeInfo.IsDesktop;
2020-01-14 18:23:34 +08:00
protected readonly FileStore Files;
2018-04-13 17:19:50 +08:00
protected readonly IDatabaseContextFactory ContextFactory;
protected readonly MutableDatabaseBackedStore<TModel> ModelStore;
// ReSharper disable once NotAccessedField.Local (we should keep a reference to this so it is not finalised)
private ArchiveImportIPCChannel ipc;
2020-05-24 12:44:11 +08:00
private readonly Storage exportStorage;
protected ArchiveModelManager(Storage storage, IDatabaseContextFactory contextFactory, MutableDatabaseBackedStoreWithFileIncludes<TModel, TFileModel> modelStore, IIpcHost importHost = null)
2018-04-13 17:19:50 +08:00
{
ContextFactory = contextFactory;
ModelStore = modelStore;
ModelStore.ItemUpdated += item => handleEvent(() => itemUpdated.Value = new WeakReference<TModel>(item));
2020-05-19 15:44:22 +08:00
ModelStore.ItemRemoved += item => handleEvent(() => itemRemoved.Value = new WeakReference<TModel>(item));
2018-04-13 17:19:50 +08:00
2020-05-24 12:44:11 +08:00
exportStorage = storage.GetStorageForDirectory("exports");
2018-04-13 17:19:50 +08:00
Files = new FileStore(contextFactory, storage);
if (importHost != null)
ipc = new ArchiveImportIPCChannel(importHost, this);
ModelStore.Cleanup();
}
/// <summary>
2019-11-17 20:48:23 +08:00
/// Import one or more <typeparamref name="TModel"/> items from filesystem <paramref name="paths"/>.
2018-04-13 17:19:50 +08:00
/// This will post notifications tracking progress.
/// </summary>
/// <param name="paths">One or more archive locations on disk.</param>
2019-06-12 16:08:50 +08:00
public Task Import(params string[] paths)
2018-04-13 17:19:50 +08:00
{
var notification = new ProgressNotification { State = ProgressNotificationState.Active };
2018-04-13 17:19:50 +08:00
PostNotification?.Invoke(notification);
2019-06-12 16:08:50 +08:00
return Import(notification, paths);
}
protected async Task<IEnumerable<TModel>> Import(ProgressNotification notification, params string[] paths)
{
notification.Progress = 0;
notification.Text = $"{HumanisedModelName.Humanize(LetterCasing.Title)} import is initialising...";
2018-04-13 17:19:50 +08:00
int current = 0;
2019-04-01 11:16:05 +08:00
var imported = new List<TModel>();
await Task.WhenAll(paths.Select(async path =>
2018-04-13 17:19:50 +08:00
{
notification.CancellationToken.ThrowIfCancellationRequested();
2018-04-13 17:19:50 +08:00
try
{
var model = await Import(path, notification.CancellationToken);
2018-12-19 03:49:53 +08:00
lock (imported)
{
if (model != null)
imported.Add(model);
2019-06-12 16:10:55 +08:00
current++;
2018-04-13 17:19:50 +08:00
notification.Text = $"Imported {current} of {paths.Length} {HumanisedModelName}s";
notification.Progress = (float)current / paths.Length;
}
}
catch (TaskCanceledException)
{
throw;
2018-04-13 17:19:50 +08:00
}
catch (Exception e)
{
Logger.Error(e, $@"Could not import ({Path.GetFileName(path)})", LoggingTarget.Database);
2018-04-13 17:19:50 +08:00
}
}));
2018-04-13 17:19:50 +08:00
2018-09-07 17:14:23 +08:00
if (imported.Count == 0)
{
notification.Text = $"{HumanisedModelName.Humanize(LetterCasing.Title)} import failed!";
2018-09-07 17:14:23 +08:00
notification.State = ProgressNotificationState.Cancelled;
}
else
{
notification.CompletionText = imported.Count == 1
? $"Imported {imported.First()}!"
: $"Imported {imported.Count} {HumanisedModelName}s!";
if (imported.Count > 0 && PresentImport != null)
{
notification.CompletionText += " Click to view.";
notification.CompletionClickAction = () =>
{
PresentImport?.Invoke(imported);
return true;
};
}
2018-09-07 17:14:23 +08:00
notification.State = ProgressNotificationState.Completed;
}
return imported;
2018-04-13 17:19:50 +08:00
}
/// <summary>
2019-11-17 20:48:23 +08:00
/// Import one <typeparamref name="TModel"/> from the filesystem and delete the file on success.
/// </summary>
/// <param name="path">The archive location on disk.</param>
/// <param name="cancellationToken">An optional cancellation token.</param>
/// <returns>The imported model, if successful.</returns>
public async Task<TModel> Import(string path, CancellationToken cancellationToken = default)
{
2019-06-10 12:37:20 +08:00
cancellationToken.ThrowIfCancellationRequested();
TModel import;
using (ArchiveReader reader = getReaderFrom(path))
import = await Import(reader, cancellationToken);
// We may or may not want to delete the file depending on where it is stored.
// e.g. reconstructing/repairing database with items from default storage.
// Also, not always a single file, i.e. for LegacyFilesystemReader
// TODO: Add a check to prevent files from storage to be deleted.
try
{
if (import != null && File.Exists(path) && ShouldDeleteArchive(path))
File.Delete(path);
}
catch (Exception e)
{
LogForModel(import, $@"Could not delete original file after import ({Path.GetFileName(path)})", e);
}
return import;
}
/// <summary>
/// Fired when the user requests to view the resulting import.
/// </summary>
public Action<IEnumerable<TModel>> PresentImport;
2018-04-13 17:19:50 +08:00
/// <summary>
/// Import an item from an <see cref="ArchiveReader"/>.
/// </summary>
/// <param name="archive">The archive to be imported.</param>
/// <param name="cancellationToken">An optional cancellation token.</param>
2019-06-21 23:01:11 +08:00
public Task<TModel> Import(ArchiveReader archive, CancellationToken cancellationToken = default)
2018-04-13 17:19:50 +08:00
{
2019-06-10 12:37:20 +08:00
cancellationToken.ThrowIfCancellationRequested();
TModel model = null;
try
{
model = CreateModel(archive);
2020-01-02 12:20:38 +08:00
if (model == null)
return Task.FromResult<TModel>(null);
}
2019-06-10 12:37:20 +08:00
catch (TaskCanceledException)
{
throw;
}
catch (Exception e)
{
LogForModel(model, $"Model creation of {archive.Name} failed.", e);
return null;
}
2019-06-21 23:01:11 +08:00
return Import(model, archive, cancellationToken);
}
/// <summary>
/// Any file extensions which should be included in hash creation.
/// Generally should include all file types which determine the file's uniqueness.
/// Large files should be avoided if possible.
/// </summary>
protected abstract string[] HashableFileTypes { get; }
2020-05-02 13:35:12 +08:00
internal static void LogForModel(TModel model, string message, Exception e = null)
{
string prefix = $"[{(model?.Hash ?? "?????").Substring(0, 5)}]";
if (e != null)
Logger.Error(e, $"{prefix} {message}", LoggingTarget.Database);
else
Logger.Log($"{prefix} {message}", LoggingTarget.Database);
}
/// <summary>
2018-11-30 14:09:15 +08:00
/// Create a SHA-2 hash from the provided archive based on file content of all files matching <see cref="HashableFileTypes"/>.
/// </summary>
/// <remarks>
/// In the case of no matching files, a hash will be generated from the passed archive's <see cref="ArchiveReader.Name"/>.
/// </remarks>
2020-01-08 11:36:07 +08:00
private string computeHash(TModel item, ArchiveReader reader = null)
{
// for now, concatenate all .osu files in the set to create a unique hash.
MemoryStream hashable = new MemoryStream();
2019-11-11 20:05:36 +08:00
2020-06-03 17:30:27 +08:00
foreach (TFileModel file in item.Files.Where(f => HashableFileTypes.Any(f.Filename.EndsWith)).OrderBy(f => f.Filename))
2019-11-11 19:53:22 +08:00
{
2020-01-08 11:36:07 +08:00
using (Stream s = Files.Store.GetStream(file.FileInfo.StoragePath))
s.CopyTo(hashable);
2019-11-11 19:53:22 +08:00
}
2020-01-08 11:36:07 +08:00
if (hashable.Length > 0)
return hashable.ComputeSHA2Hash();
if (reader != null)
return reader.Name.ComputeSHA2Hash();
return item.Hash;
}
/// <summary>
2019-11-17 20:48:23 +08:00
/// Import an item from a <typeparamref name="TModel"/>.
/// </summary>
/// <param name="item">The model to be imported.</param>
/// <param name="archive">An optional archive to use for model population.</param>
/// <param name="cancellationToken">An optional cancellation token.</param>
public async Task<TModel> Import(TModel item, ArchiveReader archive = null, CancellationToken cancellationToken = default) => await Task.Factory.StartNew(async () =>
{
2019-06-10 12:37:20 +08:00
cancellationToken.ThrowIfCancellationRequested();
2018-05-28 20:45:05 +08:00
delayEvents();
void rollback()
{
if (!Delete(item))
{
// We may have not yet added the model to the underlying table, but should still clean up files.
LogForModel(item, "Dereferencing files for incomplete import.");
Files.Dereference(item.Files.Select(f => f.FileInfo).ToArray());
}
}
try
2018-04-13 17:19:50 +08:00
{
LogForModel(item, "Beginning import...");
2018-08-17 12:50:27 +08:00
2019-06-10 13:13:36 +08:00
item.Files = archive != null ? createFileInfos(archive, Files) : new List<TFileModel>();
2020-01-08 11:36:07 +08:00
item.Hash = computeHash(item, archive);
await Populate(item, archive, cancellationToken);
2018-08-17 12:50:27 +08:00
using (var write = ContextFactory.GetForWrite()) // used to share a context for full import. keep in mind this will block all writes.
{
try
{
if (!write.IsTransactionLeader) throw new InvalidOperationException($"Ensure there is no parent transaction so errors can correctly be handled by {this}");
2018-04-13 17:19:50 +08:00
var existing = CheckForExisting(item);
2018-04-13 17:19:50 +08:00
2018-05-29 15:14:09 +08:00
if (existing != null)
{
if (CanReuseExisting(existing, item))
{
Undelete(existing);
2019-06-12 19:41:02 +08:00
LogForModel(item, $"Found existing {HumanisedModelName} for {item} (ID {existing.ID}) skipping import.");
// existing item will be used; rollback new import and exit early.
rollback();
flushEvents(true);
return existing;
}
Delete(existing);
ModelStore.PurgeDeletable(s => s.ID == existing.ID);
2018-05-29 15:14:09 +08:00
}
2018-04-13 17:19:50 +08:00
PreImport(item);
2018-04-13 17:19:50 +08:00
// import to store
ModelStore.Add(item);
}
catch (Exception e)
{
write.Errors.Add(e);
throw;
}
}
2018-05-29 15:14:09 +08:00
LogForModel(item, "Import successfully completed!");
2018-04-13 17:19:50 +08:00
}
2018-05-29 17:37:45 +08:00
catch (Exception e)
{
if (!(e is TaskCanceledException))
LogForModel(item, "Database import or population failed and has been rolled back.", e);
rollback();
flushEvents(false);
throw;
}
flushEvents(true);
return item;
}, cancellationToken, TaskCreationOptions.HideScheduler, import_scheduler).Unwrap();
2018-04-13 17:19:50 +08:00
2020-05-24 12:44:11 +08:00
/// <summary>
2020-05-24 22:09:38 +08:00
/// Exports an item to a legacy (.zip based) package.
2020-05-24 12:44:11 +08:00
/// </summary>
/// <param name="item">The item to export.</param>
public void Export(TModel item)
{
var retrievedItem = ModelStore.ConsumableItems.FirstOrDefault(s => s.ID == item.ID);
if (retrievedItem == null)
2020-05-24 22:09:38 +08:00
throw new ArgumentException("Specified model could not be found", nameof(item));
2020-05-24 12:44:11 +08:00
using (var archive = ZipArchive.Create())
{
foreach (var file in retrievedItem.Files)
archive.AddEntry(file.Filename, Files.Storage.GetStream(file.FileInfo.StoragePath));
2020-05-24 21:34:31 +08:00
using (var outputStream = exportStorage.GetStream($"{getValidFilename(item.ToString())}{HandledExtensions.First()}", FileAccess.Write, FileMode.Create))
2020-05-24 12:44:11 +08:00
archive.SaveTo(outputStream);
exportStorage.OpenInNativeExplorer();
}
}
public void UpdateFile(TModel model, TFileModel file, Stream contents)
2020-01-08 11:36:07 +08:00
{
2020-01-14 17:43:06 +08:00
using (var usage = ContextFactory.GetForWrite())
2020-01-08 11:36:07 +08:00
{
2020-01-14 17:43:06 +08:00
// Dereference the existing file info, since the file model will be removed.
Files.Dereference(file.FileInfo);
// Remove the file model.
usage.Context.Set<TFileModel>().Remove(file);
// Add the new file info and containing file model.
model.Files.Remove(file);
model.Files.Add(new TFileModel
{
Filename = file.Filename,
FileInfo = Files.Add(contents)
});
2020-01-08 11:36:07 +08:00
Update(model);
2020-01-08 11:36:07 +08:00
}
}
2018-04-13 17:19:50 +08:00
/// <summary>
/// Perform an update of the specified item.
2020-01-08 11:36:07 +08:00
/// TODO: Support file additions/removals.
2018-04-13 17:19:50 +08:00
/// </summary>
/// <param name="item">The item to update.</param>
2020-01-08 11:36:07 +08:00
public void Update(TModel item)
{
using (ContextFactory.GetForWrite())
{
item.Hash = computeHash(item);
ModelStore.Update(item);
}
}
2018-04-13 17:19:50 +08:00
/// <summary>
/// Delete an item from the manager.
/// Is a no-op for already deleted items.
/// </summary>
/// <param name="item">The item to delete.</param>
2018-09-21 11:21:27 +08:00
/// <returns>false if no operation was performed</returns>
public bool Delete(TModel item)
2018-04-13 17:19:50 +08:00
{
using (ContextFactory.GetForWrite())
2018-04-13 17:19:50 +08:00
{
// re-fetch the model on the import context.
2018-09-21 08:01:04 +08:00
var foundModel = queryModel().Include(s => s.Files).ThenInclude(f => f.FileInfo).FirstOrDefault(s => s.ID == item.ID);
2018-04-13 17:19:50 +08:00
2018-09-21 11:21:27 +08:00
if (foundModel == null || foundModel.DeletePending) return false;
2018-04-13 17:19:50 +08:00
if (ModelStore.Delete(foundModel))
Files.Dereference(foundModel.Files.Select(f => f.FileInfo).ToArray());
2018-09-21 11:21:27 +08:00
return true;
2018-04-13 17:19:50 +08:00
}
}
/// <summary>
/// Delete multiple items.
/// This will post notifications tracking progress.
/// </summary>
public void Delete(List<TModel> items, bool silent = false)
2018-04-13 17:19:50 +08:00
{
if (items.Count == 0) return;
var notification = new ProgressNotification
{
Progress = 0,
2019-06-12 19:41:02 +08:00
Text = $"Preparing to delete all {HumanisedModelName}s...",
CompletionText = $"Deleted all {HumanisedModelName}s!",
2018-04-13 17:19:50 +08:00
State = ProgressNotificationState.Active,
};
if (!silent)
PostNotification?.Invoke(notification);
2018-04-13 17:19:50 +08:00
int i = 0;
foreach (var b in items)
2018-04-13 17:19:50 +08:00
{
if (notification.State == ProgressNotificationState.Cancelled)
// user requested abort
return;
2018-04-13 17:19:50 +08:00
notification.Text = $"Deleting {HumanisedModelName}s ({++i} of {items.Count})";
2018-04-13 17:19:50 +08:00
Delete(b);
2018-04-13 17:19:50 +08:00
notification.Progress = (float)i / items.Count;
2018-04-13 17:19:50 +08:00
}
notification.State = ProgressNotificationState.Completed;
}
/// <summary>
/// Restore multiple items that were previously deleted.
/// This will post notifications tracking progress.
/// </summary>
public void Undelete(List<TModel> items, bool silent = false)
2018-04-13 17:19:50 +08:00
{
if (!items.Any()) return;
var notification = new ProgressNotification
{
CompletionText = "Restored all deleted items!",
Progress = 0,
State = ProgressNotificationState.Active,
};
if (!silent)
PostNotification?.Invoke(notification);
2018-04-13 17:19:50 +08:00
int i = 0;
foreach (var item in items)
2018-04-13 17:19:50 +08:00
{
if (notification.State == ProgressNotificationState.Cancelled)
// user requested abort
return;
2018-04-13 17:19:50 +08:00
notification.Text = $"Restoring ({++i} of {items.Count})";
2018-04-13 17:19:50 +08:00
Undelete(item);
2018-04-13 17:19:50 +08:00
notification.Progress = (float)i / items.Count;
2018-04-13 17:19:50 +08:00
}
notification.State = ProgressNotificationState.Completed;
}
/// <summary>
/// Restore an item that was previously deleted. Is a no-op if the item is not in a deleted state, or has its protected flag set.
/// </summary>
/// <param name="item">The item to restore</param>
public void Undelete(TModel item)
{
using (var usage = ContextFactory.GetForWrite())
{
usage.Context.ChangeTracker.AutoDetectChangesEnabled = false;
if (!ModelStore.Undelete(item)) return;
Files.Reference(item.Files.Select(f => f.FileInfo).ToArray());
usage.Context.ChangeTracker.AutoDetectChangesEnabled = true;
}
}
/// <summary>
/// Create all required <see cref="FileInfo"/>s for the provided archive, adding them to the global file store.
/// </summary>
private List<TFileModel> createFileInfos(ArchiveReader reader, FileStore files)
{
var fileInfos = new List<TFileModel>();
string prefix = reader.Filenames.GetCommonPrefix();
if (!(prefix.EndsWith("/") || prefix.EndsWith("\\")))
prefix = string.Empty;
2018-04-13 17:19:50 +08:00
// import files to manager
foreach (string file in reader.Filenames)
2019-11-11 19:53:22 +08:00
{
2018-04-13 17:19:50 +08:00
using (Stream s = reader.GetStream(file))
2019-11-11 19:53:22 +08:00
{
2018-04-13 17:19:50 +08:00
fileInfos.Add(new TFileModel
{
2019-12-11 16:06:56 +08:00
Filename = file.Substring(prefix.Length).ToStandardisedPath(),
2018-04-13 17:19:50 +08:00
FileInfo = files.Add(s)
});
2019-11-11 19:53:22 +08:00
}
}
2018-04-13 17:19:50 +08:00
return fileInfos;
}
#region osu-stable import
/// <summary>
/// Set a storage with access to an osu-stable install for import purposes.
/// </summary>
public Func<Storage> GetStableStorage { private get; set; }
/// <summary>
/// Denotes whether an osu-stable installation is present to perform automated imports from.
/// </summary>
public bool StableInstallationAvailable => GetStableStorage?.Invoke() != null;
/// <summary>
/// The relative path from osu-stable's data directory to import items from.
/// </summary>
protected virtual string ImportFromStablePath => null;
2019-06-20 00:33:51 +08:00
/// <summary>
2019-07-05 13:21:56 +08:00
/// Select paths to import from stable. Default implementation iterates all directories in <see cref="ImportFromStablePath"/>.
2019-06-20 00:33:51 +08:00
/// </summary>
protected virtual IEnumerable<string> GetStableImportPaths(Storage stableStoage) => stableStoage.GetDirectories(ImportFromStablePath);
2019-06-20 00:33:51 +08:00
/// <summary>
/// Whether this specified path should be removed after successful import.
/// </summary>
/// <param name="path">The path for consideration. May be a file or a directory.</param>
/// <returns>Whether to perform deletion.</returns>
protected virtual bool ShouldDeleteArchive(string path) => false;
/// <summary>
/// This is a temporary method and will likely be replaced by a full-fledged (and more correctly placed) migration process in the future.
/// </summary>
public Task ImportFromStableAsync()
{
var stable = GetStableStorage?.Invoke();
if (stable == null)
{
Logger.Log("No osu!stable installation available!", LoggingTarget.Information, LogLevel.Error);
return Task.CompletedTask;
}
if (!stable.ExistsDirectory(ImportFromStablePath))
2018-09-15 21:53:59 +08:00
{
// This handles situations like when the user does not have a Skins folder
2018-09-21 10:50:36 +08:00
Logger.Log($"No {ImportFromStablePath} folder available in osu!stable installation", LoggingTarget.Information, LogLevel.Error);
return Task.CompletedTask;
}
return Task.Run(async () => await Import(GetStableImportPaths(GetStableStorage()).Select(f => stable.GetFullPath(f)).ToArray()));
}
#endregion
2018-04-13 17:19:50 +08:00
/// <summary>
/// Create a barebones model from the provided archive.
/// Actual expensive population should be done in <see cref="Populate"/>; this should just prepare for duplicate checking.
/// </summary>
/// <param name="archive">The archive to create the model for.</param>
2018-08-25 13:51:42 +08:00
/// <returns>A model populated with minimal information. Returning a null will abort importing silently.</returns>
2018-04-13 17:19:50 +08:00
protected abstract TModel CreateModel(ArchiveReader archive);
/// <summary>
/// Populate the provided model completely from the given archive.
/// After this method, the model should be in a state ready to commit to a store.
/// </summary>
/// <param name="model">The model to populate.</param>
/// <param name="archive">The archive to use as a reference for population. May be null.</param>
/// <param name="cancellationToken">An optional cancellation token.</param>
2019-06-10 15:13:51 +08:00
protected virtual Task Populate(TModel model, [CanBeNull] ArchiveReader archive, CancellationToken cancellationToken = default) => Task.CompletedTask;
2018-04-13 17:19:50 +08:00
/// <summary>
/// Perform any final actions before the import to database executes.
/// </summary>
/// <param name="model">The model prepared for import.</param>
protected virtual void PreImport(TModel model)
{
}
2018-11-28 18:01:22 +08:00
/// <summary>
/// Check whether an existing model already exists for a new import item.
/// </summary>
2019-04-25 16:36:17 +08:00
/// <param name="model">The new model proposed for import.</param>
2018-11-28 18:01:22 +08:00
/// <returns>An existing model which matches the criteria to skip importing, else null.</returns>
protected TModel CheckForExisting(TModel model) => model.Hash == null ? null : ModelStore.ConsumableItems.FirstOrDefault(b => b.Hash == model.Hash);
/// <summary>
/// After an existing <typeparamref name="TModel"/> is found during an import process, the default behaviour is to use/restore the existing
/// item and skip the import. This method allows changing that behaviour.
/// </summary>
/// <param name="existing">The existing model.</param>
/// <param name="import">The newly imported model.</param>
/// <returns>Whether the existing model should be restored and used. Returning false will delete the existing and force a re-import.</returns>
protected virtual bool CanReuseExisting(TModel existing, TModel import) =>
// for the best or worst, we copy and import files of a new import before checking whether
// it is a duplicate. so to check if anything has changed, we can just compare all FileInfo IDs.
getIDs(existing.Files).SequenceEqual(getIDs(import.Files)) &&
getFilenames(existing.Files).SequenceEqual(getFilenames(import.Files));
private IEnumerable<long> getIDs(List<TFileModel> files)
{
foreach (var f in files.OrderBy(f => f.Filename))
yield return f.FileInfo.ID;
}
private IEnumerable<string> getFilenames(List<TFileModel> files)
{
foreach (var f in files.OrderBy(f => f.Filename))
yield return f.Filename;
}
2018-04-13 17:19:50 +08:00
private DbSet<TModel> queryModel() => ContextFactory.Get().Set<TModel>();
2019-06-12 19:41:02 +08:00
protected virtual string HumanisedModelName => $"{typeof(TModel).Name.Replace("Info", "").ToLower()}";
2018-04-13 17:19:50 +08:00
/// <summary>
/// Creates an <see cref="ArchiveReader"/> from a valid storage path.
/// </summary>
/// <param name="path">A file or folder path resolving the archive content.</param>
/// <returns>A reader giving access to the archive's content.</returns>
private ArchiveReader getReaderFrom(string path)
{
if (ZipUtils.IsZipArchive(path))
2018-08-15 14:49:55 +08:00
return new ZipArchiveReader(File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read), Path.GetFileName(path));
2018-04-13 17:19:50 +08:00
if (Directory.Exists(path))
2018-11-28 14:13:27 +08:00
return new LegacyDirectoryArchiveReader(path);
if (File.Exists(path))
return new LegacyFileArchiveReader(path);
2019-02-28 12:31:40 +08:00
2018-04-13 17:19:50 +08:00
throw new InvalidFormatException($"{path} is not a valid archive");
}
#region Event handling / delaying
private readonly List<Action> queuedEvents = new List<Action>();
/// <summary>
/// Allows delaying of outwards events until an operation is confirmed (at a database level).
/// </summary>
private bool delayingEvents;
/// <summary>
/// Begin delaying outwards events.
/// </summary>
private void delayEvents() => delayingEvents = true;
/// <summary>
/// Flush delayed events and disable delaying.
/// </summary>
/// <param name="perform">Whether the flushed events should be performed.</param>
private void flushEvents(bool perform)
{
Action[] events;
lock (queuedEvents)
{
events = queuedEvents.ToArray();
queuedEvents.Clear();
}
if (perform)
{
foreach (var a in events)
a.Invoke();
}
delayingEvents = false;
}
private void handleEvent(Action a)
{
if (delayingEvents)
2019-11-11 19:53:22 +08:00
{
lock (queuedEvents)
queuedEvents.Add(a);
2019-11-11 19:53:22 +08:00
}
else
a.Invoke();
}
#endregion
2020-05-24 21:34:31 +08:00
private string getValidFilename(string filename)
{
foreach (char c in Path.GetInvalidFileNameChars())
filename = filename.Replace(c, '_');
return filename;
}
2018-04-13 17:19:50 +08:00
}
}