added limit of dumps per map

This commit is contained in:
Alex 2023-08-13 18:14:54 +01:00
parent 5600ba1783
commit 20fd8d98c4
4 changed files with 43 additions and 20 deletions

View File

@ -1,5 +1,5 @@
{ {
"serverLocation": "D:\\Spt Stuff\\server", "serverLocation": "E:\\spt\\Server",
"threads": 20, "threads": 20,
"threadPoolingTimeoutMs": 1000, "threadPoolingTimeoutMs": 1000,
"jsonSerializer": "DotNet", "jsonSerializer": "DotNet",
@ -24,13 +24,14 @@
"cleanupTempFolderAfterProcess": true "cleanupTempFolderAfterProcess": true
}, },
"intakeReaderConfig": { "intakeReaderConfig": {
"maxDumpsPerMap": 1500,
"readerType": "Json", "readerType": "Json",
"ignoredDumpLocations": [ "ignoredDumpLocations": [
"Hideout" "Hideout"
] ]
}, },
"dumpFilesLocation": [ "dumpFilesLocation": [
"D:\\Spt Stuff\\Lootgenerator\\dumps\\input" "E:\\spt\\dumps\\input"
], ],
"thresholdDate": "2023-01-08", "thresholdDate": "2023-01-08",
"acceptedFileExtensions": [ "acceptedFileExtensions": [
@ -46,6 +47,6 @@
"spawnPointToleranceForForced": 99.5 "spawnPointToleranceForForced": 99.5
}, },
"writerConfig": { "writerConfig": {
"outputLocation": "D:\\Spt Stuff\\Lootgenerator\\dumps\\output" "outputLocation": "E:\\spt\\dumps\\output"
} }
} }

View File

@ -10,6 +10,11 @@ public class IntakeReaderConfig
[JsonPropertyName("readerType")] [JsonPropertyName("readerType")]
public IntakeReaderTypes IntakeReaderType { get; set; } = IntakeReaderTypes.Json; public IntakeReaderTypes IntakeReaderType { get; set; } = IntakeReaderTypes.Json;
[JsonProperty("maxDumpsPerMap")]
[JsonPropertyName("maxDumpsPerMap")]
public int MaxDumpsPerMap { get; set; } = 1500;
[JsonProperty("ignoredDumpLocations")] [JsonProperty("ignoredDumpLocations")]
[JsonPropertyName("ignoredDumpLocations")] [JsonPropertyName("ignoredDumpLocations")]
public List<string> IgnoredDumpLocations { get; set; } = new List<string>(); public List<string> IgnoredDumpLocations { get; set; } = new List<string>();

View File

@ -41,8 +41,13 @@ public class QueuePipeline : IPipeline
try try
{ {
// Gather all files, then add them into the processing queue // Gather all files, sort them by date descending and then add them into the processing queue
GatherFiles().ForEach(f => _filesToProcess.Add(f)); GatherFiles().OrderByDescending(f =>
{
FileDateParser.TryParseFileDate(f, out var date);
return date;
}
).ToList().ForEach(f => _filesToProcess.Add(f));
// We startup all the threads and collect them into a runners list // We startup all the threads and collect them into a runners list
for (int i = 0; i < threads; i++) for (int i = 0; i < threads; i++)

View File

@ -1,5 +1,4 @@
using System.Globalization; using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using LootDumpProcessor.Logger; using LootDumpProcessor.Logger;
using LootDumpProcessor.Model.Input; using LootDumpProcessor.Model.Input;
using LootDumpProcessor.Model.Processing; using LootDumpProcessor.Model.Processing;
@ -15,28 +14,41 @@ public class JsonFileIntakeReader : IIntakeReader
private static readonly HashSet<string>? _ignoredLocations = private static readonly HashSet<string>? _ignoredLocations =
LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.IgnoredDumpLocations.ToHashSet(); LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.IgnoredDumpLocations.ToHashSet();
private static Regex FileNameDateRegex = new("([0-9]{4}(-[0-9]{2}){2}_((-){0,1}[0-9]{2}){3})"); private readonly ConcurrentDictionary<string, int> _totalMapDumpsCounter = new();
public bool Read(string file, out BasicInfo basicInfo) public bool Read(string file, out BasicInfo basicInfo)
{ {
var fileData = File.ReadAllText(file); var fileData = File.ReadAllText(file);
var unparsedDate = FileNameDateRegex.Match(file).Groups[1].Value; // If the file format changes it may screw up this date parser
var date = DateTime.ParseExact(unparsedDate, "yyyy-MM-dd_HH-mm-ss", CultureInfo.InvariantCulture); if (!FileDateParser.TryParseFileDate(file, out var date))
LoggerFactory.GetInstance().Log($"Couldnt parse date from file: {file}", LogLevel.Error);
var fi = _jsonSerializer.Deserialize<RootData>(fileData); var fi = _jsonSerializer.Deserialize<RootData>(fileData);
if (fi.Data?.Name != null && (!_ignoredLocations?.Contains(fi.Data.Name) ?? true)) if (fi.Data?.Name != null && (!_ignoredLocations?.Contains(fi.Data.Name) ?? true))
{
int counter;
if (!_totalMapDumpsCounter.TryGetValue(fi.Data.Name, out counter))
{
counter = 0;
_totalMapDumpsCounter[fi.Data.Name] = counter;
}
if (counter < LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig.MaxDumpsPerMap)
{ {
basicInfo = new BasicInfo basicInfo = new BasicInfo
{ {
Map = fi.Data.Name, Map = fi.Data.Name,
FileHash = ProcessorUtil.HashFile(fileData), FileHash = ProcessorUtil.HashFile(fileData),
Data = fi, Data = fi,
Date = date, Date = date.Value,
FileName = file FileName = file
}; };
_totalMapDumpsCounter[fi.Data.Name] += 1;
LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Info); LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Info);
return true; return true;
} }
LoggerFactory.GetInstance().Log($"Ignoring file {file} as the file cap for map {fi.Data.Name} has been reached", LogLevel.Info);
}
LoggerFactory.GetInstance().Log( LoggerFactory.GetInstance().Log(
$"File {file} was not eligible for dump data, it did not contain a location name or it was on ignored locations config", $"File {file} was not eligible for dump data, it did not contain a location name or it was on ignored locations config",