Use new LINQ Order() instead of OrderBy() when possible

This commit is contained in:
Berkan Diler 2024-02-08 18:01:00 +01:00
parent 7f3646f2d1
commit 6adf0ac01e
16 changed files with 20 additions and 20 deletions

View File

@ -375,7 +375,7 @@ public override LocalisableString GetVariantName(int variant)
/// <returns>The <see cref="PlayfieldType"/> that corresponds to <paramref name="variant"/>.</returns>
private PlayfieldType getPlayfieldType(int variant)
{
return (PlayfieldType)Enum.GetValues(typeof(PlayfieldType)).Cast<int>().OrderByDescending(i => i).First(v => variant >= v);
return (PlayfieldType)Enum.GetValues(typeof(PlayfieldType)).Cast<int>().OrderDescending().First(v => variant >= v);
}
protected override IEnumerable<HitResult> GetValidHitResults()

View File

@ -22,7 +22,7 @@ public ManiaScoreProcessor()
}
protected override IEnumerable<HitObject> EnumerateHitObjects(IBeatmap beatmap)
=> base.EnumerateHitObjects(beatmap).OrderBy(ho => ho, JudgementOrderComparer.DEFAULT);
=> base.EnumerateHitObjects(beatmap).Order(JudgementOrderComparer.DEFAULT);
protected override double ComputeTotalScore(double comboProgress, double accuracyProgress, double bonusPortion)
{

View File

@ -48,7 +48,7 @@ public override double DifficultyValue()
// These sections will not contribute to the difficulty.
var peaks = GetCurrentStrainPeaks().Where(p => p > 0);
List<double> strains = peaks.OrderByDescending(d => d).ToList();
List<double> strains = peaks.OrderDescending().ToList();
// We are reducing the highest strains first to account for extreme difficulty spikes
for (int i = 0; i < Math.Min(strains.Count, ReducedSectionCount); i++)
@ -59,7 +59,7 @@ public override double DifficultyValue()
// Difficulty is the weighted sum of the highest strains from every section.
// We're sorting from highest to lowest strain.
foreach (double strain in strains.OrderByDescending(d => d))
foreach (double strain in strains.OrderDescending())
{
difficulty += strain * weight;
weight *= DecayWeight;

View File

@ -81,7 +81,7 @@ public override double DifficultyValue()
double difficulty = 0;
double weight = 1;
foreach (double strain in peaks.OrderByDescending(d => d))
foreach (double strain in peaks.OrderDescending())
{
difficulty += strain * weight;
weight *= 0.9;

View File

@ -45,6 +45,6 @@ private void updateTournament(ValueChangedEvent<string> newTournament)
Logger.Log("Changing tournament storage: " + GetFullPath(string.Empty));
}
public IEnumerable<string> ListTournaments() => AllTournaments.GetDirectories(string.Empty).OrderBy(directory => directory, StringComparer.CurrentCultureIgnoreCase);
public IEnumerable<string> ListTournaments() => AllTournaments.GetDirectories(string.Empty).Order(StringComparer.CurrentCultureIgnoreCase);
}
}

View File

@ -266,8 +266,8 @@ protected override bool CanReuseExisting(BeatmapSetInfo existing, BeatmapSetInfo
if (!base.CanReuseExisting(existing, import))
return false;
var existingIds = existing.Beatmaps.Select(b => b.OnlineID).OrderBy(i => i);
var importIds = import.Beatmaps.Select(b => b.OnlineID).OrderBy(i => i);
var existingIds = existing.Beatmaps.Select(b => b.OnlineID).Order();
var importIds = import.Beatmaps.Select(b => b.OnlineID).Order();
// force re-import if we are not in a sane state.
return existing.OnlineID == import.OnlineID && existingIds.SequenceEqual(importIds);

View File

@ -74,7 +74,7 @@ private void collectionsChanged(IRealmCollection<BeatmapCollection> collections,
}
else
{
foreach (int i in changes.DeletedIndices.OrderByDescending(i => i))
foreach (int i in changes.DeletedIndices.OrderDescending())
filters.RemoveAt(i + 1);
foreach (int i in changes.InsertedIndices)

View File

@ -279,7 +279,7 @@ await Task.WhenAll(tasks.Select(async task =>
// note that this should really be checking filesizes on disk (of existing files) for some degree of sanity.
// or alternatively doing a faster hash check. either of these require database changes and reprocessing of existing files.
if (CanSkipImport(existing, item) &&
getFilenames(existing.Files).SequenceEqual(getShortenedFilenames(archive).Select(p => p.shortened).OrderBy(f => f)) &&
getFilenames(existing.Files).SequenceEqual(getShortenedFilenames(archive).Select(p => p.shortened).Order()) &&
checkAllFilesExist(existing))
{
LogForModel(item, @$"Found existing (optimised) {HumanisedModelName} for {item} (ID {existing.ID}) skipping import.");
@ -437,7 +437,7 @@ private string computeHashFast(ArchiveReader reader)
{
MemoryStream hashable = new MemoryStream();
foreach (string? file in reader.Filenames.Where(f => HashableFileTypes.Any(ext => f.EndsWith(ext, StringComparison.OrdinalIgnoreCase))).OrderBy(f => f))
foreach (string? file in reader.Filenames.Where(f => HashableFileTypes.Any(ext => f.EndsWith(ext, StringComparison.OrdinalIgnoreCase))).Order())
{
using (Stream s = reader.GetStream(file))
s.CopyTo(hashable);

View File

@ -122,7 +122,7 @@ private void beatmapsChanged(IRealmCollection<BeatmapSetInfo> sender, ChangeSet
foreach (int i in changes.InsertedIndices)
beatmapSets.Insert(i, sender[i].ToLive(realm));
foreach (int i in changes.DeletedIndices.OrderByDescending(i => i))
foreach (int i in changes.DeletedIndices.OrderDescending())
beatmapSets.RemoveAt(i);
}

View File

@ -36,7 +36,7 @@ private void load(FrameworkConfigManager config, OsuConfigManager osuConfig, IDi
{
LabelText = GraphicsSettingsStrings.Renderer,
Current = renderer,
Items = host.GetPreferredRenderersForCurrentPlatform().OrderBy(t => t).Where(t => t != RendererType.Vulkan),
Items = host.GetPreferredRenderersForCurrentPlatform().Order().Where(t => t != RendererType.Vulkan),
Keywords = new[] { @"compatibility", @"directx" },
},
// TODO: this needs to be a custom dropdown at some point

View File

@ -108,7 +108,7 @@ public override double DifficultyValue()
// Difficulty is the weighted sum of the highest strains from every section.
// We're sorting from highest to lowest strain.
foreach (double strain in peaks.OrderByDescending(d => d))
foreach (double strain in peaks.OrderDescending())
{
difficulty += strain * weight;
weight *= DecayWeight;

View File

@ -31,8 +31,8 @@ public class CheckBreaks : ICheck
public IEnumerable<Issue> Run(BeatmapVerifierContext context)
{
var startTimes = context.Beatmap.HitObjects.Select(ho => ho.StartTime).OrderBy(x => x).ToList();
var endTimes = context.Beatmap.HitObjects.Select(ho => ho.GetEndTime()).OrderBy(x => x).ToList();
var startTimes = context.Beatmap.HitObjects.Select(ho => ho.StartTime).Order().ToList();
var endTimes = context.Beatmap.HitObjects.Select(ho => ho.GetEndTime()).Order().ToList();
foreach (var breakPeriod in context.Beatmap.Breaks)
{

View File

@ -188,7 +188,7 @@ public void ApplyToDrawableHitObject(DrawableHitObject drawable)
public void ApplyToBeatmap(IBeatmap beatmap)
{
var hitObjects = getAllApplicableHitObjects(beatmap.HitObjects).ToList();
var endTimes = hitObjects.Select(x => x.GetEndTime()).OrderBy(x => x).Distinct().ToList();
var endTimes = hitObjects.Select(x => x.GetEndTime()).Order().Distinct().ToList();
foreach (HitObject hitObject in hitObjects)
{

View File

@ -107,7 +107,7 @@ private void prepareDetachedRulesets(RealmAccess realmAccess)
}
}
availableRulesets.AddRange(detachedRulesets.OrderBy(r => r));
availableRulesets.AddRange(detachedRulesets.Order());
});
}

View File

@ -35,7 +35,7 @@ public static BeatDivisorPresetCollection Custom(int maxDivisor)
presets.Add(maxDivisor / candidate);
}
return new BeatDivisorPresetCollection(BeatDivisorType.Custom, presets.Distinct().OrderBy(d => d));
return new BeatDivisorPresetCollection(BeatDivisorType.Custom, presets.Distinct().Order());
}
}
}

View File

@ -169,7 +169,7 @@ private void updateInspectorText()
InspectorText.Clear();
double[] sliderVelocities = EditorBeatmap.HitObjects.OfType<IHasSliderVelocity>().Select(sv => sv.SliderVelocityMultiplier).OrderBy(v => v).ToArray();
double[] sliderVelocities = EditorBeatmap.HitObjects.OfType<IHasSliderVelocity>().Select(sv => sv.SliderVelocityMultiplier).Order().ToArray();
AddHeader("Base velocity (from beatmap setup)");
AddValue($"{beatmapVelocity:#,0.00}x");