added limit of dumps per map

This commit is contained in:
Alex 2023-08-13 18:14:54 +01:00
parent 5600ba1783
commit 20fd8d98c4
4 changed files with 43 additions and 20 deletions

View File

@ -1,5 +1,5 @@
{
"serverLocation": "D:\\Spt Stuff\\server",
"serverLocation": "E:\\spt\\Server",
"threads": 20,
"threadPoolingTimeoutMs": 1000,
"jsonSerializer": "DotNet",
@ -24,13 +24,14 @@
"cleanupTempFolderAfterProcess": true
},
"intakeReaderConfig": {
"maxDumpsPerMap": 1500,
"readerType": "Json",
"ignoredDumpLocations": [
"Hideout"
]
},
"dumpFilesLocation": [
"D:\\Spt Stuff\\Lootgenerator\\dumps\\input"
"E:\\spt\\dumps\\input"
],
"thresholdDate": "2023-01-08",
"acceptedFileExtensions": [
@ -46,6 +47,6 @@
"spawnPointToleranceForForced": 99.5
},
"writerConfig": {
"outputLocation": "D:\\Spt Stuff\\Lootgenerator\\dumps\\output"
"outputLocation": "E:\\spt\\dumps\\output"
}
}

View File

@ -10,6 +10,11 @@ public class IntakeReaderConfig
[JsonPropertyName("readerType")]
public IntakeReaderTypes IntakeReaderType { get; set; } = IntakeReaderTypes.Json;
[JsonProperty("maxDumpsPerMap")]
[JsonPropertyName("maxDumpsPerMap")]
public int MaxDumpsPerMap { get; set; } = 1500;
[JsonProperty("ignoredDumpLocations")]
[JsonPropertyName("ignoredDumpLocations")]
public List<string> IgnoredDumpLocations { get; set; } = new List<string>();

View File

@ -41,8 +41,13 @@ public class QueuePipeline : IPipeline
try
{
// Gather all files, then add them into the processing queue
GatherFiles().ForEach(f => _filesToProcess.Add(f));
// Gather all files, sort them by date descending and then add them into the processing queue
GatherFiles().OrderByDescending(f =>
{
FileDateParser.TryParseFileDate(f, out var date);
return date;
}
).ToList().ForEach(f => _filesToProcess.Add(f));
// We startup all the threads and collect them into a runners list
for (int i = 0; i < threads; i++)

View File

@ -1,5 +1,4 @@
using System.Globalization;
using System.Text.RegularExpressions;
using System.Collections.Concurrent;
using LootDumpProcessor.Logger;
using LootDumpProcessor.Model.Input;
using LootDumpProcessor.Model.Processing;
@ -15,27 +14,40 @@ public class JsonFileIntakeReader : IIntakeReader
private static readonly HashSet<string>? _ignoredLocations =
LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.IgnoredDumpLocations.ToHashSet();
private static Regex FileNameDateRegex = new("([0-9]{4}(-[0-9]{2}){2}_((-){0,1}[0-9]{2}){3})");
private readonly ConcurrentDictionary<string, int> _totalMapDumpsCounter = new();
public bool Read(string file, out BasicInfo basicInfo)
{
var fileData = File.ReadAllText(file);
var unparsedDate = FileNameDateRegex.Match(file).Groups[1].Value;
var date = DateTime.ParseExact(unparsedDate, "yyyy-MM-dd_HH-mm-ss", CultureInfo.InvariantCulture);
// If the file format changes it may screw up this date parser
if (!FileDateParser.TryParseFileDate(file, out var date))
LoggerFactory.GetInstance().Log($"Couldnt parse date from file: {file}", LogLevel.Error);
var fi = _jsonSerializer.Deserialize<RootData>(fileData);
if (fi.Data?.Name != null && (!_ignoredLocations?.Contains(fi.Data.Name) ?? true))
{
basicInfo = new BasicInfo
int counter;
if (!_totalMapDumpsCounter.TryGetValue(fi.Data.Name, out counter))
{
Map = fi.Data.Name,
FileHash = ProcessorUtil.HashFile(fileData),
Data = fi,
Date = date,
FileName = file
};
LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Info);
return true;
counter = 0;
_totalMapDumpsCounter[fi.Data.Name] = counter;
}
if (counter < LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig.MaxDumpsPerMap)
{
basicInfo = new BasicInfo
{
Map = fi.Data.Name,
FileHash = ProcessorUtil.HashFile(fileData),
Data = fi,
Date = date.Value,
FileName = file
};
_totalMapDumpsCounter[fi.Data.Name] += 1;
LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Info);
return true;
}
LoggerFactory.GetInstance().Log($"Ignoring file {file} as the file cap for map {fi.Data.Name} has been reached", LogLevel.Info);
}
LoggerFactory.GetInstance().Log(