// Copyright (c) 2007-2018 ppy Pty Ltd . // Licensed under the MIT Licence - https://raw.githubusercontent.com/ppy/osu/master/LICENCE using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; using JetBrains.Annotations; using Microsoft.EntityFrameworkCore; using osu.Framework.IO.File; using osu.Framework.Logging; using osu.Framework.Platform; using osu.Game.IO; using osu.Game.IO.Archives; using osu.Game.IPC; using osu.Game.Overlays.Notifications; using osu.Game.Utils; using SharpCompress.Common; using FileInfo = osu.Game.IO.FileInfo; namespace osu.Game.Database { /// /// Encapsulates a model store class to give it import functionality. /// Adds cross-functionality with to give access to the central file store for the provided model. /// /// The model type. /// The associated file join type. public abstract class ArchiveModelManager : ICanAcceptFiles where TModel : class, IHasFiles, IHasPrimaryKey, ISoftDelete where TFileModel : INamedFileInfo, new() { /// /// Set an endpoint for notifications to be posted to. /// public Action PostNotification { protected get; set; } /// /// Fired when a new becomes available in the database. /// This is not guaranteed to run on the update thread. /// public event Action ItemAdded; /// /// Fired when a is removed from the database. /// This is not guaranteed to run on the update thread. /// public event Action ItemRemoved; public virtual string[] HandledExtensions => new[] { ".zip" }; protected readonly FileStore Files; protected readonly IDatabaseContextFactory ContextFactory; protected readonly MutableDatabaseBackedStore ModelStore; // ReSharper disable once NotAccessedField.Local (we should keep a reference to this so it is not finalised) private ArchiveImportIPCChannel ipc; private readonly List cachedEvents = new List(); /// /// Allows delaying of outwards events until an operation is confirmed (at a database level). /// private bool delayingEvents; /// /// Begin delaying outwards events. /// private void delayEvents() => delayingEvents = true; /// /// Flush delayed events and disable delaying. /// /// Whether the flushed events should be performed. private void flushEvents(bool perform) { if (perform) { foreach (var a in cachedEvents) a.Invoke(); } cachedEvents.Clear(); delayingEvents = false; } private void handleEvent(Action a) { if (delayingEvents) cachedEvents.Add(a); else a.Invoke(); } protected ArchiveModelManager(Storage storage, IDatabaseContextFactory contextFactory, MutableDatabaseBackedStore modelStore, IIpcHost importHost = null) { ContextFactory = contextFactory; ModelStore = modelStore; ModelStore.ItemAdded += s => handleEvent(() => ItemAdded?.Invoke(s)); ModelStore.ItemRemoved += s => handleEvent(() => ItemRemoved?.Invoke(s)); Files = new FileStore(contextFactory, storage); if (importHost != null) ipc = new ArchiveImportIPCChannel(importHost, this); ModelStore.Cleanup(); } /// /// Import one or more items from filesystem . /// This will post notifications tracking progress. /// /// One or more archive locations on disk. public void Import(params string[] paths) { var notification = new ProgressNotification { Text = "Import is initialising...", Progress = 0, State = ProgressNotificationState.Active, }; PostNotification?.Invoke(notification); List imported = new List(); int current = 0; foreach (string path in paths) { if (notification.State == ProgressNotificationState.Cancelled) // user requested abort return; try { notification.Text = $"Importing ({++current} of {paths.Length})\n{Path.GetFileName(path)}"; using (ArchiveReader reader = getReaderFrom(path)) imported.Add(Import(reader)); notification.Progress = (float)current / paths.Length; // We may or may not want to delete the file depending on where it is stored. // e.g. reconstructing/repairing database with items from default storage. // Also, not always a single file, i.e. for LegacyFilesystemReader // TODO: Add a check to prevent files from storage to be deleted. try { if (File.Exists(path)) File.Delete(path); } catch (Exception e) { Logger.Error(e, $@"Could not delete original file after import ({Path.GetFileName(path)})"); } } catch (Exception e) { e = e.InnerException ?? e; Logger.Error(e, $@"Could not import ({Path.GetFileName(path)})"); } } if (imported.Count == 0) { notification.Text = "Import failed!"; notification.State = ProgressNotificationState.Cancelled; } else { notification.CompletionText = $"Imported {current} {typeof(TModel).Name.Replace("Info", "").ToLower()}s!"; notification.CompletionClickAction += () => { if (imported.Count > 0) PresentCompletedImport(imported); return true; }; notification.State = ProgressNotificationState.Completed; } } protected virtual void PresentCompletedImport(IEnumerable imported) { } /// /// Import an item from an . /// /// The archive to be imported. public TModel Import(ArchiveReader archive) { try { var model = CreateModel(archive); return model == null ? null : Import(model, archive); } catch (Exception e) { Logger.Error(e, $"Model creation of {archive.Name} failed.", LoggingTarget.Database); return null; } } /// /// Import an item from a . /// /// The model to be imported. /// An optional archive to use for model population. public TModel Import(TModel item, ArchiveReader archive = null) { delayEvents(); try { Logger.Log($"Importing {item}...", LoggingTarget.Database); using (var write = ContextFactory.GetForWrite()) // used to share a context for full import. keep in mind this will block all writes. { try { if (!write.IsTransactionLeader) throw new InvalidOperationException($"Ensure there is no parent transaction so errors can correctly be handled by {this}"); var existing = CheckForExisting(item); if (existing != null) { Logger.Log($"Found existing {typeof(TModel)} for {item} (ID {existing.ID}). Skipping import.", LoggingTarget.Database); return existing; } if (archive != null) item.Files = createFileInfos(archive, Files); Populate(item, archive); // import to store ModelStore.Add(item); } catch (Exception e) { write.Errors.Add(e); throw; } } Logger.Log($"Import of {item} successfully completed!", LoggingTarget.Database); } catch (Exception e) { Logger.Error(e, $"Import of {item} failed and has been rolled back.", LoggingTarget.Database); item = null; } finally { // we only want to flush events after we've confirmed the write context didn't have any errors. flushEvents(item != null); } return item; } /// /// Perform an update of the specified item. /// TODO: Support file changes. /// /// The item to update. public void Update(TModel item) => ModelStore.Update(item); /// /// Delete an item from the manager. /// Is a no-op for already deleted items. /// /// The item to delete. public void Delete(TModel item) { using (ContextFactory.GetForWrite()) { // re-fetch the model on the import context. var foundModel = queryModel().Include(s => s.Files).ThenInclude(f => f.FileInfo).First(s => s.ID == item.ID); if (foundModel.DeletePending) return; if (ModelStore.Delete(foundModel)) Files.Dereference(foundModel.Files.Select(f => f.FileInfo).ToArray()); } } /// /// Delete multiple items. /// This will post notifications tracking progress. /// public void Delete(List items) { if (items.Count == 0) return; var notification = new ProgressNotification { Progress = 0, CompletionText = $"Deleted all {typeof(TModel).Name.Replace("Info", "").ToLower()}s!", State = ProgressNotificationState.Active, }; PostNotification?.Invoke(notification); int i = 0; using (ContextFactory.GetForWrite()) { foreach (var b in items) { if (notification.State == ProgressNotificationState.Cancelled) // user requested abort return; notification.Text = $"Deleting ({++i} of {items.Count})"; Delete(b); notification.Progress = (float)i / items.Count; } } notification.State = ProgressNotificationState.Completed; } /// /// Restore multiple items that were previously deleted. /// This will post notifications tracking progress. /// public void Undelete(List items) { if (!items.Any()) return; var notification = new ProgressNotification { CompletionText = "Restored all deleted items!", Progress = 0, State = ProgressNotificationState.Active, }; PostNotification?.Invoke(notification); int i = 0; using (ContextFactory.GetForWrite()) { foreach (var item in items) { if (notification.State == ProgressNotificationState.Cancelled) // user requested abort return; notification.Text = $"Restoring ({++i} of {items.Count})"; Undelete(item); notification.Progress = (float)i / items.Count; } } notification.State = ProgressNotificationState.Completed; } /// /// Restore an item that was previously deleted. Is a no-op if the item is not in a deleted state, or has its protected flag set. /// /// The item to restore public void Undelete(TModel item) { using (var usage = ContextFactory.GetForWrite()) { usage.Context.ChangeTracker.AutoDetectChangesEnabled = false; if (!ModelStore.Undelete(item)) return; Files.Reference(item.Files.Select(f => f.FileInfo).ToArray()); usage.Context.ChangeTracker.AutoDetectChangesEnabled = true; } } /// /// Create all required s for the provided archive, adding them to the global file store. /// private List createFileInfos(ArchiveReader reader, FileStore files) { var fileInfos = new List(); // import files to manager foreach (string file in reader.Filenames) using (Stream s = reader.GetStream(file)) fileInfos.Add(new TFileModel { Filename = FileSafety.PathSanitise(file), FileInfo = files.Add(s) }); return fileInfos; } #region osu-stable import /// /// Set a storage with access to an osu-stable install for import purposes. /// public Func GetStableStorage { private get; set; } /// /// Denotes whether an osu-stable installation is present to perform automated imports from. /// public bool StableInstallationAvailable => GetStableStorage?.Invoke() != null; /// /// The relative path from osu-stable's data directory to import items from. /// protected virtual string ImportFromStablePath => null; /// /// This is a temporary method and will likely be replaced by a full-fledged (and more correctly placed) migration process in the future. /// public Task ImportFromStableAsync() { var stable = GetStableStorage?.Invoke(); if (stable == null) { Logger.Log("No osu!stable installation available!", LoggingTarget.Information, LogLevel.Error); return Task.CompletedTask; } return Task.Factory.StartNew(() => Import(stable.GetDirectories(ImportFromStablePath).Select(f => stable.GetFullPath(f)).ToArray()), TaskCreationOptions.LongRunning); } #endregion /// /// Create a barebones model from the provided archive. /// Actual expensive population should be done in ; this should just prepare for duplicate checking. /// /// The archive to create the model for. /// A model populated with minimal information. Returning a null will abort importing silently. protected abstract TModel CreateModel(ArchiveReader archive); /// /// Populate the provided model completely from the given archive. /// After this method, the model should be in a state ready to commit to a store. /// /// The model to populate. /// The archive to use as a reference for population. May be null. protected virtual void Populate(TModel model, [CanBeNull] ArchiveReader archive) { } protected virtual TModel CheckForExisting(TModel model) => null; private DbSet queryModel() => ContextFactory.Get().Set(); /// /// Creates an from a valid storage path. /// /// A file or folder path resolving the archive content. /// A reader giving access to the archive's content. private ArchiveReader getReaderFrom(string path) { if (ZipUtils.IsZipArchive(path)) return new ZipArchiveReader(File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read), Path.GetFileName(path)); if (Directory.Exists(path)) return new LegacyFilesystemReader(path); throw new InvalidFormatException($"{path} is not a valid archive"); } } }