diff --git a/Config/config.json b/Config/config.json index ec09830..a7a9354 100644 --- a/Config/config.json +++ b/Config/config.json @@ -1,5 +1,5 @@ { - "serverLocation": "D:\\Spt Stuff\\server", + "serverLocation": "E:\\spt\\Server", "threads": 20, "threadPoolingTimeoutMs": 1000, "jsonSerializer": "DotNet", @@ -24,13 +24,14 @@ "cleanupTempFolderAfterProcess": true }, "intakeReaderConfig": { + "maxDumpsPerMap": 1500, "readerType": "Json", "ignoredDumpLocations": [ "Hideout" ] }, "dumpFilesLocation": [ - "D:\\Spt Stuff\\Lootgenerator\\dumps\\input" + "E:\\spt\\dumps\\input" ], "thresholdDate": "2023-01-08", "acceptedFileExtensions": [ @@ -46,6 +47,6 @@ "spawnPointToleranceForForced": 99.5 }, "writerConfig": { - "outputLocation": "D:\\Spt Stuff\\Lootgenerator\\dumps\\output" + "outputLocation": "E:\\spt\\dumps\\output" } } \ No newline at end of file diff --git a/Model/Config/IntakeReaderConfig.cs b/Model/Config/IntakeReaderConfig.cs index b3e820f..9522bed 100644 --- a/Model/Config/IntakeReaderConfig.cs +++ b/Model/Config/IntakeReaderConfig.cs @@ -10,6 +10,11 @@ public class IntakeReaderConfig [JsonPropertyName("readerType")] public IntakeReaderTypes IntakeReaderType { get; set; } = IntakeReaderTypes.Json; + [JsonProperty("maxDumpsPerMap")] + [JsonPropertyName("maxDumpsPerMap")] + public int MaxDumpsPerMap { get; set; } = 1500; + + [JsonProperty("ignoredDumpLocations")] [JsonPropertyName("ignoredDumpLocations")] public List IgnoredDumpLocations { get; set; } = new List(); diff --git a/Process/QueuePipeline.cs b/Process/QueuePipeline.cs index c6689e3..603ddc3 100644 --- a/Process/QueuePipeline.cs +++ b/Process/QueuePipeline.cs @@ -41,8 +41,13 @@ public class QueuePipeline : IPipeline try { - // Gather all files, then add them into the processing queue - GatherFiles().ForEach(f => _filesToProcess.Add(f)); + // Gather all files, sort them by date descending and then add them into the processing queue + GatherFiles().OrderByDescending(f => + { + FileDateParser.TryParseFileDate(f, out var date); + return date; + } + ).ToList().ForEach(f => _filesToProcess.Add(f)); // We startup all the threads and collect them into a runners list for (int i = 0; i < threads; i++) diff --git a/Process/Reader/Intake/JsonFileIntakeReader.cs b/Process/Reader/Intake/JsonFileIntakeReader.cs index 4243134..de3ed67 100644 --- a/Process/Reader/Intake/JsonFileIntakeReader.cs +++ b/Process/Reader/Intake/JsonFileIntakeReader.cs @@ -1,5 +1,4 @@ -using System.Globalization; -using System.Text.RegularExpressions; +using System.Collections.Concurrent; using LootDumpProcessor.Logger; using LootDumpProcessor.Model.Input; using LootDumpProcessor.Model.Processing; @@ -15,27 +14,40 @@ public class JsonFileIntakeReader : IIntakeReader private static readonly HashSet? _ignoredLocations = LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.IgnoredDumpLocations.ToHashSet(); - private static Regex FileNameDateRegex = new("([0-9]{4}(-[0-9]{2}){2}_((-){0,1}[0-9]{2}){3})"); - + private readonly ConcurrentDictionary _totalMapDumpsCounter = new(); + public bool Read(string file, out BasicInfo basicInfo) { var fileData = File.ReadAllText(file); - var unparsedDate = FileNameDateRegex.Match(file).Groups[1].Value; - var date = DateTime.ParseExact(unparsedDate, "yyyy-MM-dd_HH-mm-ss", CultureInfo.InvariantCulture); + // If the file format changes it may screw up this date parser + if (!FileDateParser.TryParseFileDate(file, out var date)) + LoggerFactory.GetInstance().Log($"Couldnt parse date from file: {file}", LogLevel.Error); var fi = _jsonSerializer.Deserialize(fileData); if (fi.Data?.Name != null && (!_ignoredLocations?.Contains(fi.Data.Name) ?? true)) { - basicInfo = new BasicInfo + int counter; + if (!_totalMapDumpsCounter.TryGetValue(fi.Data.Name, out counter)) { - Map = fi.Data.Name, - FileHash = ProcessorUtil.HashFile(fileData), - Data = fi, - Date = date, - FileName = file - }; - LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Info); - return true; + counter = 0; + _totalMapDumpsCounter[fi.Data.Name] = counter; + } + + if (counter < LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig.MaxDumpsPerMap) + { + basicInfo = new BasicInfo + { + Map = fi.Data.Name, + FileHash = ProcessorUtil.HashFile(fileData), + Data = fi, + Date = date.Value, + FileName = file + }; + _totalMapDumpsCounter[fi.Data.Name] += 1; + LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Info); + return true; + } + LoggerFactory.GetInstance().Log($"Ignoring file {file} as the file cap for map {fi.Data.Name} has been reached", LogLevel.Info); } LoggerFactory.GetInstance().Log(