mirror of
https://github.com/ppy/osu.git
synced 2024-12-14 12:23:22 +08:00
Merge pull request #13673 from peppy/import-early-checksum-abort
Shortcut import process to speed up duplicate imports
This commit is contained in:
commit
a9084db665
@ -192,6 +192,7 @@ namespace osu.Game.Tests.Beatmaps.IO
|
||||
}
|
||||
|
||||
[Test]
|
||||
[Ignore("intentionally broken by import optimisations")]
|
||||
public async Task TestImportThenImportWithChangedFile()
|
||||
{
|
||||
using (HeadlessGameHost host = new CleanRunHeadlessGameHost(nameof(ImportBeatmapTest)))
|
||||
@ -294,6 +295,7 @@ namespace osu.Game.Tests.Beatmaps.IO
|
||||
}
|
||||
|
||||
[Test]
|
||||
[Ignore("intentionally broken by import optimisations")]
|
||||
public async Task TestImportCorruptThenImport()
|
||||
{
|
||||
// unfortunately for the time being we need to reference osu.Framework.Desktop for a game host here.
|
||||
@ -439,12 +441,11 @@ namespace osu.Game.Tests.Beatmaps.IO
|
||||
}
|
||||
}
|
||||
|
||||
[TestCase(true)]
|
||||
[TestCase(false)]
|
||||
public async Task TestImportThenDeleteThenImportWithOnlineIDMismatch(bool set)
|
||||
[Test]
|
||||
public async Task TestImportThenDeleteThenImportWithOnlineIDsMissing()
|
||||
{
|
||||
// unfortunately for the time being we need to reference osu.Framework.Desktop for a game host here.
|
||||
using (HeadlessGameHost host = new CleanRunHeadlessGameHost($"{nameof(ImportBeatmapTest)}-{set}"))
|
||||
using (HeadlessGameHost host = new CleanRunHeadlessGameHost($"{nameof(ImportBeatmapTest)}"))
|
||||
{
|
||||
try
|
||||
{
|
||||
@ -452,10 +453,8 @@ namespace osu.Game.Tests.Beatmaps.IO
|
||||
|
||||
var imported = await LoadOszIntoOsu(osu);
|
||||
|
||||
if (set)
|
||||
imported.OnlineBeatmapSetID = 1234;
|
||||
else
|
||||
imported.Beatmaps.First().OnlineBeatmapID = 1234;
|
||||
foreach (var b in imported.Beatmaps)
|
||||
b.OnlineBeatmapID = null;
|
||||
|
||||
osu.Dependencies.Get<BeatmapManager>().Update(imported);
|
||||
|
||||
|
@ -317,6 +317,14 @@ namespace osu.Game.Beatmaps
|
||||
/// <returns>The first result for the provided query, or null if no results were found.</returns>
|
||||
public BeatmapSetInfo QueryBeatmapSet(Expression<Func<BeatmapSetInfo, bool>> query) => beatmaps.ConsumableItems.AsNoTracking().FirstOrDefault(query);
|
||||
|
||||
protected override bool CanSkipImport(BeatmapSetInfo existing, BeatmapSetInfo import)
|
||||
{
|
||||
if (!base.CanReuseExisting(existing, import))
|
||||
return false;
|
||||
|
||||
return existing.Beatmaps.Any(b => b.OnlineBeatmapID != null);
|
||||
}
|
||||
|
||||
protected override bool CanReuseExisting(BeatmapSetInfo existing, BeatmapSetInfo import)
|
||||
{
|
||||
if (!base.CanReuseExisting(existing, import))
|
||||
|
@ -78,7 +78,7 @@ namespace osu.Game.Database
|
||||
|
||||
private readonly Bindable<WeakReference<TModel>> itemRemoved = new Bindable<WeakReference<TModel>>();
|
||||
|
||||
public virtual IEnumerable<string> HandledExtensions => new[] { ".zip" };
|
||||
public virtual IEnumerable<string> HandledExtensions => new[] { @".zip" };
|
||||
|
||||
protected readonly FileStore Files;
|
||||
|
||||
@ -99,7 +99,7 @@ namespace osu.Game.Database
|
||||
ModelStore.ItemUpdated += item => handleEvent(() => itemUpdated.Value = new WeakReference<TModel>(item));
|
||||
ModelStore.ItemRemoved += item => handleEvent(() => itemRemoved.Value = new WeakReference<TModel>(item));
|
||||
|
||||
exportStorage = storage.GetStorageForDirectory("exports");
|
||||
exportStorage = storage.GetStorageForDirectory(@"exports");
|
||||
|
||||
Files = new FileStore(contextFactory, storage);
|
||||
|
||||
@ -282,7 +282,7 @@ namespace osu.Game.Database
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
LogForModel(model, $"Model creation of {archive.Name} failed.", e);
|
||||
LogForModel(model, @$"Model creation of {archive.Name} failed.", e);
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -309,6 +309,12 @@ namespace osu.Game.Database
|
||||
Logger.Log($"{prefix} {message}", LoggingTarget.Database);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Whether the implementation overrides <see cref="ComputeHash"/> with a custom implementation.
|
||||
/// Custom hash implementations must bypass the early exit in the import flow (see <see cref="computeHashFast"/> usage).
|
||||
/// </summary>
|
||||
protected virtual bool HasCustomHashFunction => false;
|
||||
|
||||
/// <summary>
|
||||
/// Create a SHA-2 hash from the provided archive based on file content of all files matching <see cref="HashableFileTypes"/>.
|
||||
/// </summary>
|
||||
@ -317,7 +323,11 @@ namespace osu.Game.Database
|
||||
/// </remarks>
|
||||
protected virtual string ComputeHash(TModel item, ArchiveReader reader = null)
|
||||
{
|
||||
// for now, concatenate all .osu files in the set to create a unique hash.
|
||||
if (reader != null)
|
||||
// fast hashing for cases where the item's files may not be populated.
|
||||
return computeHashFast(reader);
|
||||
|
||||
// for now, concatenate all hashable files in the set to create a unique hash.
|
||||
MemoryStream hashable = new MemoryStream();
|
||||
|
||||
foreach (TFileModel file in item.Files.Where(f => HashableFileTypes.Any(ext => f.Filename.EndsWith(ext, StringComparison.OrdinalIgnoreCase))).OrderBy(f => f.Filename))
|
||||
@ -329,9 +339,6 @@ namespace osu.Game.Database
|
||||
if (hashable.Length > 0)
|
||||
return hashable.ComputeSHA2Hash();
|
||||
|
||||
if (reader != null)
|
||||
return reader.Name.ComputeSHA2Hash();
|
||||
|
||||
return item.Hash;
|
||||
}
|
||||
|
||||
@ -348,19 +355,48 @@ namespace osu.Game.Database
|
||||
|
||||
delayEvents();
|
||||
|
||||
bool checkedExisting = false;
|
||||
TModel existing = null;
|
||||
|
||||
if (archive != null && !HasCustomHashFunction)
|
||||
{
|
||||
// this is a fast bail condition to improve large import performance.
|
||||
item.Hash = computeHashFast(archive);
|
||||
|
||||
checkedExisting = true;
|
||||
existing = CheckForExisting(item);
|
||||
|
||||
if (existing != null)
|
||||
{
|
||||
// bare minimum comparisons
|
||||
//
|
||||
// note that this should really be checking filesizes on disk (of existing files) for some degree of sanity.
|
||||
// or alternatively doing a faster hash check. either of these require database changes and reprocessing of existing files.
|
||||
if (CanSkipImport(existing, item) &&
|
||||
getFilenames(existing.Files).SequenceEqual(getShortenedFilenames(archive).Select(p => p.shortened).OrderBy(f => f)))
|
||||
{
|
||||
LogForModel(item, @$"Found existing (optimised) {HumanisedModelName} for {item} (ID {existing.ID}) – skipping import.");
|
||||
Undelete(existing);
|
||||
return existing;
|
||||
}
|
||||
|
||||
LogForModel(item, @"Found existing (optimised) but failed pre-check.");
|
||||
}
|
||||
}
|
||||
|
||||
void rollback()
|
||||
{
|
||||
if (!Delete(item))
|
||||
{
|
||||
// We may have not yet added the model to the underlying table, but should still clean up files.
|
||||
LogForModel(item, "Dereferencing files for incomplete import.");
|
||||
LogForModel(item, @"Dereferencing files for incomplete import.");
|
||||
Files.Dereference(item.Files.Select(f => f.FileInfo).ToArray());
|
||||
}
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
LogForModel(item, "Beginning import...");
|
||||
LogForModel(item, @"Beginning import...");
|
||||
|
||||
item.Files = archive != null ? createFileInfos(archive, Files) : new List<TFileModel>();
|
||||
item.Hash = ComputeHash(item, archive);
|
||||
@ -371,22 +407,24 @@ namespace osu.Game.Database
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!write.IsTransactionLeader) throw new InvalidOperationException($"Ensure there is no parent transaction so errors can correctly be handled by {this}");
|
||||
if (!write.IsTransactionLeader) throw new InvalidOperationException(@$"Ensure there is no parent transaction so errors can correctly be handled by {this}");
|
||||
|
||||
var existing = CheckForExisting(item);
|
||||
if (!checkedExisting)
|
||||
existing = CheckForExisting(item);
|
||||
|
||||
if (existing != null)
|
||||
{
|
||||
if (CanReuseExisting(existing, item))
|
||||
{
|
||||
Undelete(existing);
|
||||
LogForModel(item, $"Found existing {HumanisedModelName} for {item} (ID {existing.ID}) – skipping import.");
|
||||
LogForModel(item, @$"Found existing {HumanisedModelName} for {item} (ID {existing.ID}) – skipping import.");
|
||||
// existing item will be used; rollback new import and exit early.
|
||||
rollback();
|
||||
flushEvents(true);
|
||||
return existing;
|
||||
}
|
||||
|
||||
LogForModel(item, @"Found existing but failed re-use check.");
|
||||
Delete(existing);
|
||||
ModelStore.PurgeDeletable(s => s.ID == existing.ID);
|
||||
}
|
||||
@ -403,12 +441,12 @@ namespace osu.Game.Database
|
||||
}
|
||||
}
|
||||
|
||||
LogForModel(item, "Import successfully completed!");
|
||||
LogForModel(item, @"Import successfully completed!");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
if (!(e is TaskCanceledException))
|
||||
LogForModel(item, "Database import or population failed and has been rolled back.", e);
|
||||
LogForModel(item, @"Database import or population failed and has been rolled back.", e);
|
||||
|
||||
rollback();
|
||||
flushEvents(false);
|
||||
@ -428,7 +466,7 @@ namespace osu.Game.Database
|
||||
var retrievedItem = ModelStore.ConsumableItems.FirstOrDefault(s => s.ID == item.ID);
|
||||
|
||||
if (retrievedItem == null)
|
||||
throw new ArgumentException("Specified model could not be found", nameof(item));
|
||||
throw new ArgumentException(@"Specified model could not be found", nameof(item));
|
||||
|
||||
using (var outputStream = exportStorage.GetStream($"{getValidFilename(item.ToString())}{HandledExtensions.First()}", FileAccess.Write, FileMode.Create))
|
||||
ExportModelTo(retrievedItem, outputStream);
|
||||
@ -637,6 +675,22 @@ namespace osu.Game.Database
|
||||
}
|
||||
}
|
||||
|
||||
private string computeHashFast(ArchiveReader reader)
|
||||
{
|
||||
MemoryStream hashable = new MemoryStream();
|
||||
|
||||
foreach (var file in reader.Filenames.Where(f => HashableFileTypes.Any(ext => f.EndsWith(ext, StringComparison.OrdinalIgnoreCase))).OrderBy(f => f))
|
||||
{
|
||||
using (Stream s = reader.GetStream(file))
|
||||
s.CopyTo(hashable);
|
||||
}
|
||||
|
||||
if (hashable.Length > 0)
|
||||
return hashable.ComputeSHA2Hash();
|
||||
|
||||
return reader.Name.ComputeSHA2Hash();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create all required <see cref="IO.FileInfo"/>s for the provided archive, adding them to the global file store.
|
||||
/// </summary>
|
||||
@ -644,18 +698,14 @@ namespace osu.Game.Database
|
||||
{
|
||||
var fileInfos = new List<TFileModel>();
|
||||
|
||||
string prefix = reader.Filenames.GetCommonPrefix();
|
||||
if (!(prefix.EndsWith('/') || prefix.EndsWith('\\')))
|
||||
prefix = string.Empty;
|
||||
|
||||
// import files to manager
|
||||
foreach (string file in reader.Filenames)
|
||||
foreach (var filenames in getShortenedFilenames(reader))
|
||||
{
|
||||
using (Stream s = reader.GetStream(file))
|
||||
using (Stream s = reader.GetStream(filenames.original))
|
||||
{
|
||||
fileInfos.Add(new TFileModel
|
||||
{
|
||||
Filename = file.Substring(prefix.Length).ToStandardisedPath(),
|
||||
Filename = filenames.shortened,
|
||||
FileInfo = files.Add(s)
|
||||
});
|
||||
}
|
||||
@ -664,6 +714,17 @@ namespace osu.Game.Database
|
||||
return fileInfos;
|
||||
}
|
||||
|
||||
private IEnumerable<(string original, string shortened)> getShortenedFilenames(ArchiveReader reader)
|
||||
{
|
||||
string prefix = reader.Filenames.GetCommonPrefix();
|
||||
if (!(prefix.EndsWith('/') || prefix.EndsWith('\\')))
|
||||
prefix = string.Empty;
|
||||
|
||||
// import files to manager
|
||||
foreach (string file in reader.Filenames)
|
||||
yield return (file, file.Substring(prefix.Length).ToStandardisedPath());
|
||||
}
|
||||
|
||||
#region osu-stable import
|
||||
|
||||
/// <summary>
|
||||
@ -696,7 +757,7 @@ namespace osu.Game.Database
|
||||
{
|
||||
string fullPath = storage.GetFullPath(ImportFromStablePath);
|
||||
|
||||
Logger.Log($"Folder \"{fullPath}\" not available in the target osu!stable installation to import {HumanisedModelName}s.", LoggingTarget.Information, LogLevel.Error);
|
||||
Logger.Log(@$"Folder ""{fullPath}"" not available in the target osu!stable installation to import {HumanisedModelName}s.", LoggingTarget.Information, LogLevel.Error);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
@ -744,6 +805,15 @@ namespace osu.Game.Database
|
||||
/// <returns>An existing model which matches the criteria to skip importing, else null.</returns>
|
||||
protected TModel CheckForExisting(TModel model) => model.Hash == null ? null : ModelStore.ConsumableItems.FirstOrDefault(b => b.Hash == model.Hash);
|
||||
|
||||
/// <summary>
|
||||
/// Whether inport can be skipped after finding an existing import early in the process.
|
||||
/// Only valid when <see cref="ComputeHash"/> is not overridden.
|
||||
/// </summary>
|
||||
/// <param name="existing">The existing model.</param>
|
||||
/// <param name="import">The newly imported model.</param>
|
||||
/// <returns>Whether to skip this import completely.</returns>
|
||||
protected virtual bool CanSkipImport(TModel existing, TModel import) => true;
|
||||
|
||||
/// <summary>
|
||||
/// After an existing <typeparamref name="TModel"/> is found during an import process, the default behaviour is to use/restore the existing
|
||||
/// item and skip the import. This method allows changing that behaviour.
|
||||
@ -771,7 +841,7 @@ namespace osu.Game.Database
|
||||
|
||||
private DbSet<TModel> queryModel() => ContextFactory.Get().Set<TModel>();
|
||||
|
||||
protected virtual string HumanisedModelName => $"{typeof(TModel).Name.Replace("Info", "").ToLower()}";
|
||||
protected virtual string HumanisedModelName => $"{typeof(TModel).Name.Replace(@"Info", "").ToLower()}";
|
||||
|
||||
#region Event handling / delaying
|
||||
|
||||
|
@ -125,6 +125,8 @@ namespace osu.Game.Skinning
|
||||
|
||||
private const string unknown_creator_string = "Unknown";
|
||||
|
||||
protected override bool HasCustomHashFunction => true;
|
||||
|
||||
protected override string ComputeHash(SkinInfo item, ArchiveReader reader = null)
|
||||
{
|
||||
// we need to populate early to create a hash based off skin.ini contents
|
||||
|
Loading…
Reference in New Issue
Block a user