Initial file updating implementation

This commit is contained in:
smoogipoo 2020-01-08 12:36:07 +09:00
parent 681a43a318
commit 609102bea3
2 changed files with 49 additions and 10 deletions

View File

@ -19,6 +19,7 @@ using osu.Game.Beatmaps.Formats;
using osu.Game.Database; using osu.Game.Database;
using osu.Game.IO; using osu.Game.IO;
using osu.Game.IO.Archives; using osu.Game.IO.Archives;
using osu.Game.Rulesets.Osu.Objects;
using osu.Game.Tests.Resources; using osu.Game.Tests.Resources;
using SharpCompress.Archives; using SharpCompress.Archives;
using SharpCompress.Archives.Zip; using SharpCompress.Archives.Zip;
@ -579,16 +580,23 @@ namespace osu.Game.Tests.Beatmaps.IO
using (var writer = new StreamWriter(stream, leaveOpen: true)) using (var writer = new StreamWriter(stream, leaveOpen: true))
{ {
beatmapToUpdate.BeatmapInfo.Version = "updated"; beatmapToUpdate.BeatmapInfo.Version = "updated";
beatmapToUpdate.HitObjects.Clear();
beatmapToUpdate.HitObjects.Add(new HitCircle { StartTime = 5000 });
new LegacyBeatmapEncoder(beatmapToUpdate).Encode(writer); new LegacyBeatmapEncoder(beatmapToUpdate).Encode(writer);
} }
stream.Seek(0, SeekOrigin.Begin); stream.Seek(0, SeekOrigin.Begin);
using (var reader = new UpdateArchiveReader<BeatmapSetInfo, BeatmapSetFileInfo>(manager.Files.Store, setToUpdate, fileToUpdate, stream)) manager.Update(setToUpdate);
await manager.Import(setToUpdate, reader); manager.UpdateFile(fileToUpdate, stream);
} }
var allBeatmaps = manager.GetAllUsableBeatmapSets(); Beatmap updatedBeatmap = (Beatmap)manager.GetWorkingBeatmap(manager.QueryBeatmap(b => b.ID == beatmapToUpdate.BeatmapInfo.ID)).Beatmap;
Assert.That(updatedBeatmap.BeatmapInfo.Version, Is.EqualTo("updated"));
Assert.That(updatedBeatmap.HitObjects.Count, Is.EqualTo(1));
Assert.That(updatedBeatmap.HitObjects[0].StartTime, Is.EqualTo(5000));
} }
finally finally
{ {

View File

@ -261,18 +261,24 @@ namespace osu.Game.Database
/// <remarks> /// <remarks>
/// In the case of no matching files, a hash will be generated from the passed archive's <see cref="ArchiveReader.Name"/>. /// In the case of no matching files, a hash will be generated from the passed archive's <see cref="ArchiveReader.Name"/>.
/// </remarks> /// </remarks>
private string computeHash(ArchiveReader reader) private string computeHash(TModel item, ArchiveReader reader = null)
{ {
// for now, concatenate all .osu files in the set to create a unique hash. // for now, concatenate all .osu files in the set to create a unique hash.
MemoryStream hashable = new MemoryStream(); MemoryStream hashable = new MemoryStream();
foreach (string file in reader.Filenames.Where(f => HashableFileTypes.Any(f.EndsWith))) foreach (TFileModel file in item.Files.Where(f => HashableFileTypes.Any(f.Filename.EndsWith)))
{ {
using (Stream s = reader.GetStream(file)) using (Stream s = Files.Store.GetStream(file.FileInfo.StoragePath))
s.CopyTo(hashable); s.CopyTo(hashable);
} }
return hashable.Length > 0 ? hashable.ComputeSHA2Hash() : reader.Name.ComputeSHA2Hash(); if (hashable.Length > 0)
return hashable.ComputeSHA2Hash();
if (reader != null)
return reader.Name.ComputeSHA2Hash();
return item.Hash;
} }
/// <summary> /// <summary>
@ -302,7 +308,7 @@ namespace osu.Game.Database
LogForModel(item, "Beginning import..."); LogForModel(item, "Beginning import...");
item.Files = archive != null ? createFileInfos(archive, Files) : new List<TFileModel>(); item.Files = archive != null ? createFileInfos(archive, Files) : new List<TFileModel>();
item.Hash = archive != null ? computeHash(archive) : item.Hash; item.Hash = computeHash(item, archive);
await Populate(item, archive, cancellationToken); await Populate(item, archive, cancellationToken);
@ -358,12 +364,37 @@ namespace osu.Game.Database
return item; return item;
}, cancellationToken, TaskCreationOptions.HideScheduler, import_scheduler).Unwrap(); }, cancellationToken, TaskCreationOptions.HideScheduler, import_scheduler).Unwrap();
public void UpdateFile(TFileModel file, Stream contents)
{
using (ContextFactory.GetForWrite()) // used to share a context for full import. keep in mind this will block all writes.
{
var existingModels = ModelStore.ConsumableItems.Where(b => b.Files.Any(f => f.FileInfoID == file.FileInfoID)).ToList();
if (existingModels.Count == 0)
throw new InvalidOperationException($"Cannot update files of models not contained by this {nameof(ArchiveModelManager<TModel, TFileModel>)}.");
using (var stream = Files.Storage.GetStream(file.FileInfo.StoragePath, FileAccess.Write, FileMode.Create))
contents.CopyTo(stream);
foreach (var model in existingModels)
Update(model);
}
}
/// <summary> /// <summary>
/// Perform an update of the specified item. /// Perform an update of the specified item.
/// TODO: Support file changes. /// TODO: Support file additions/removals.
/// </summary> /// </summary>
/// <param name="item">The item to update.</param> /// <param name="item">The item to update.</param>
public void Update(TModel item) => ModelStore.Update(item); public void Update(TModel item)
{
using (ContextFactory.GetForWrite())
{
item.Hash = computeHash(item);
ModelStore.Update(item);
}
}
/// <summary> /// <summary>
/// Delete an item from the manager. /// Delete an item from the manager.