// Copyright (c) 2007-2018 ppy Pty Ltd . // Licensed under the MIT Licence - https://raw.githubusercontent.com/ppy/osu/master/LICENCE using System; using System.Collections.Generic; using System.IO; using System.Linq; using Microsoft.EntityFrameworkCore; using osu.Framework.Logging; using osu.Framework.Platform; using osu.Game.IO; using osu.Game.IO.Archives; using osu.Game.IPC; using osu.Game.Overlays.Notifications; using osu.Game.Utils; using SharpCompress.Common; using FileInfo = osu.Game.IO.FileInfo; namespace osu.Game.Database { /// /// Encapsulates a model store class to give it import functionality. /// Adds cross-functionality with to give access to the central file store for the provided model. /// /// The model type. /// The associated file join type. public abstract class ArchiveModelManager : ICanAcceptFiles where TModel : class, IHasFiles, IHasPrimaryKey, ISoftDelete where TFileModel : INamedFileInfo, new() { /// /// Set an endpoint for notifications to be posted to. /// public Action PostNotification { protected get; set; } /// /// Fired when a new becomes available in the database. /// This is not guaranteed to run on the update thread. /// public event Action ItemAdded; /// /// Fired when a is removed from the database. /// This is not guaranteed to run on the update thread. /// public event Action ItemRemoved; public virtual string[] HandledExtensions => new[] { ".zip" }; protected readonly FileStore Files; protected readonly IDatabaseContextFactory ContextFactory; protected readonly MutableDatabaseBackedStore ModelStore; // ReSharper disable once NotAccessedField.Local (we should keep a reference to this so it is not finalised) private ArchiveImportIPCChannel ipc; private readonly List cachedEvents = new List(); /// /// Allows delaying of outwards events until an operation is confirmed (at a database level). /// private bool delayingEvents; /// /// Begin delaying outwards events. /// private void delayEvents() => delayingEvents = true; /// /// Flush delayed events and disable delaying. /// /// Whether the flushed events should be performed. private void flushEvents(bool perform) { if (perform) { foreach (var a in cachedEvents) a.Invoke(); } cachedEvents.Clear(); delayingEvents = false; } private void handleEvent(Action a) { if (delayingEvents) cachedEvents.Add(a); else a.Invoke(); } protected ArchiveModelManager(Storage storage, IDatabaseContextFactory contextFactory, MutableDatabaseBackedStore modelStore, IIpcHost importHost = null) { ContextFactory = contextFactory; ModelStore = modelStore; ModelStore.ItemAdded += s => handleEvent(() => ItemAdded?.Invoke(s)); ModelStore.ItemRemoved += s => handleEvent(() => ItemRemoved?.Invoke(s)); Files = new FileStore(contextFactory, storage); if (importHost != null) ipc = new ArchiveImportIPCChannel(importHost, this); ModelStore.Cleanup(); } /// /// Import one or more items from filesystem . /// This will post notifications tracking progress. /// /// One or more archive locations on disk. public void Import(params string[] paths) { var notification = new ProgressNotification { Text = "Import is initialising...", Progress = 0, State = ProgressNotificationState.Active, }; PostNotification?.Invoke(notification); List imported = new List(); int current = 0; int errors = 0; foreach (string path in paths) { if (notification.State == ProgressNotificationState.Cancelled) // user requested abort return; try { notification.Text = $"Importing ({++current} of {paths.Length})\n{Path.GetFileName(path)}"; using (ArchiveReader reader = getReaderFrom(path)) imported.Add(Import(reader)); notification.Progress = (float)current / paths.Length; // We may or may not want to delete the file depending on where it is stored. // e.g. reconstructing/repairing database with items from default storage. // Also, not always a single file, i.e. for LegacyFilesystemReader // TODO: Add a check to prevent files from storage to be deleted. try { if (File.Exists(path)) File.Delete(path); } catch (Exception e) { Logger.Error(e, $@"Could not delete original file after import ({Path.GetFileName(path)})"); } } catch (Exception e) { e = e.InnerException ?? e; Logger.Error(e, $@"Could not import ({Path.GetFileName(path)})"); errors++; } } notification.Text = errors > 0 ? $"Import complete with {errors} errors" : "Import successful!"; notification.State = ProgressNotificationState.Completed; } /// /// Import an item from an . /// /// The archive to be imported. public TModel Import(ArchiveReader archive) { TModel item = null; delayEvents(); try { using (var write = ContextFactory.GetForWrite()) // used to share a context for full import. keep in mind this will block all writes. { try { if (!write.IsTransactionLeader) throw new InvalidOperationException($"Ensure there is no parent transaction so errors can correctly be handled by {this}"); // create a new model (don't yet add to database) item = CreateModel(archive); var existing = CheckForExisting(item); if (existing != null) { Logger.Log($"Found existing {typeof(TModel)} for {archive.Name} (ID {existing.ID}). Skipping import.", LoggingTarget.Database); return existing; } item.Files = createFileInfos(archive, Files); Populate(item, archive); // import to store ModelStore.Add(item); } catch (Exception e) { write.Errors.Add(e); throw; } } Logger.Log($"Import of {archive.Name} successfully completed!", LoggingTarget.Database); } catch (Exception e) { Logger.Error(e, $"Import of {archive.Name} failed and has been rolled back.", LoggingTarget.Database); item = null; } finally { // we only want to flush events after we've confirmed the write context didn't have any errors. flushEvents(item != null); } return item; } /// /// Import an item from a . /// /// The model to be imported. public void Import(TModel item) => ModelStore.Add(item); /// /// Perform an update of the specified item. /// TODO: Support file changes. /// /// The item to update. public void Update(TModel item) => ModelStore.Update(item); /// /// Delete an item from the manager. /// Is a no-op for already deleted items. /// /// The item to delete. public void Delete(TModel item) { using (ContextFactory.GetForWrite()) { // re-fetch the model on the import context. var foundModel = queryModel().Include(s => s.Files).ThenInclude(f => f.FileInfo).First(s => s.ID == item.ID); if (foundModel.DeletePending) return; if (ModelStore.Delete(foundModel)) Files.Dereference(foundModel.Files.Select(f => f.FileInfo).ToArray()); } } /// /// Delete multiple items. /// This will post notifications tracking progress. /// public void Delete(List items) { if (items.Count == 0) return; var notification = new ProgressNotification { Progress = 0, CompletionText = "Deleted all beatmaps!", State = ProgressNotificationState.Active, }; PostNotification?.Invoke(notification); int i = 0; using (ContextFactory.GetForWrite()) { foreach (var b in items) { if (notification.State == ProgressNotificationState.Cancelled) // user requested abort return; notification.Text = $"Deleting ({++i} of {items.Count})"; Delete(b); notification.Progress = (float)i / items.Count; } } notification.State = ProgressNotificationState.Completed; } /// /// Restore multiple items that were previously deleted. /// This will post notifications tracking progress. /// public void Undelete(List items) { if (!items.Any()) return; var notification = new ProgressNotification { CompletionText = "Restored all deleted items!", Progress = 0, State = ProgressNotificationState.Active, }; PostNotification?.Invoke(notification); int i = 0; using (ContextFactory.GetForWrite()) { foreach (var item in items) { if (notification.State == ProgressNotificationState.Cancelled) // user requested abort return; notification.Text = $"Restoring ({++i} of {items.Count})"; Undelete(item); notification.Progress = (float)i / items.Count; } } notification.State = ProgressNotificationState.Completed; } /// /// Restore an item that was previously deleted. Is a no-op if the item is not in a deleted state, or has its protected flag set. /// /// The item to restore public void Undelete(TModel item) { using (var usage = ContextFactory.GetForWrite()) { usage.Context.ChangeTracker.AutoDetectChangesEnabled = false; if (!ModelStore.Undelete(item)) return; Files.Reference(item.Files.Select(f => f.FileInfo).ToArray()); usage.Context.ChangeTracker.AutoDetectChangesEnabled = true; } } /// /// Create all required s for the provided archive, adding them to the global file store. /// private List createFileInfos(ArchiveReader reader, FileStore files) { var fileInfos = new List(); // import files to manager foreach (string file in reader.Filenames) using (Stream s = reader.GetStream(file)) fileInfos.Add(new TFileModel { Filename = file, FileInfo = files.Add(s) }); return fileInfos; } /// /// Create a barebones model from the provided archive. /// Actual expensive population should be done in ; this should just prepare for duplicate checking. /// /// The archive to create the model for. /// A model populated with minimal information. protected abstract TModel CreateModel(ArchiveReader archive); /// /// Populate the provided model completely from the given archive. /// After this method, the model should be in a state ready to commit to a store. /// /// The model to populate. /// The archive to use as a reference for population. protected virtual void Populate(TModel model, ArchiveReader archive) { } protected virtual TModel CheckForExisting(TModel model) => null; private DbSet queryModel() => ContextFactory.Get().Set(); /// /// Creates an from a valid storage path. /// /// A file or folder path resolving the archive content. /// A reader giving access to the archive's content. private ArchiveReader getReaderFrom(string path) { if (ZipUtils.IsZipArchive(path)) return new ZipArchiveReader(Files.Storage.GetStream(path), Path.GetFileName(path)); if (Directory.Exists(path)) return new LegacyFilesystemReader(path); throw new InvalidFormatException($"{path} is not a valid archive"); } } }