Merge pull request 'Changes' (#1) from StreetsProbabilityTest into master
Reviewed-on: SPT-AKI/LootDumpProcessor#1
This commit is contained in:
commit
c69dea3130
@ -1,9 +1,12 @@
|
||||
{
|
||||
"serverLocation": "D:\\Spt Stuff\\server",
|
||||
"threads": 20,
|
||||
"serverLocation": "E:\\spt\\Server",
|
||||
"threads": 10,
|
||||
"threadPoolingTimeoutMs": 1000,
|
||||
"jsonSerializer": "DotNet",
|
||||
"manualGarbageCollectionCalls": false,
|
||||
"dumpProcessorConfig": {
|
||||
"spawnContainerChanceIncludeAfterDate": "2023-08-10 00:00:01"
|
||||
},
|
||||
"dataStorageConfig": {
|
||||
"dataStorageType": "Memory",
|
||||
"fileDataStorageTempLocation": "D:\\Spt Stuff\\Lootgenerator\\Dumps\\cache"
|
||||
@ -21,13 +24,14 @@
|
||||
"cleanupTempFolderAfterProcess": true
|
||||
},
|
||||
"intakeReaderConfig": {
|
||||
"maxDumpsPerMap": 1500,
|
||||
"readerType": "Json",
|
||||
"ignoredDumpLocations": [
|
||||
"Hideout"
|
||||
]
|
||||
},
|
||||
"dumpFilesLocation": [
|
||||
"D:\\Spt Stuff\\Lootgenerator\\dumps\\input"
|
||||
"E:\\spt\\dumps\\input"
|
||||
],
|
||||
"thresholdDate": "2023-01-08",
|
||||
"acceptedFileExtensions": [
|
||||
@ -43,6 +47,6 @@
|
||||
"spawnPointToleranceForForced": 99.5
|
||||
},
|
||||
"writerConfig": {
|
||||
"outputLocation": "D:\\Spt Stuff\\Lootgenerator\\dumps\\output"
|
||||
"outputLocation": "E:\\spt\\dumps\\output"
|
||||
}
|
||||
}
|
@ -42,6 +42,10 @@ public class Config
|
||||
[JsonPropertyName("processorConfig")]
|
||||
public ProcessorConfig ProcessorConfig { get; set; }
|
||||
|
||||
[JsonProperty("dumpProcessorConfig")]
|
||||
[JsonPropertyName("dumpProcessorConfig")]
|
||||
public DumpProcessorConfig DumpProcessorConfig { get; set; }
|
||||
|
||||
[JsonProperty("writerConfig")]
|
||||
[JsonPropertyName("writerConfig")]
|
||||
public WriterConfig WriterConfig { get; set; }
|
||||
|
14
Model/Config/DumpProcessorConfig.cs
Normal file
14
Model/Config/DumpProcessorConfig.cs
Normal file
@ -0,0 +1,14 @@
|
||||
using System.Text.Json.Serialization;
|
||||
using LootDumpProcessor.Serializers.Json.Converters;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace LootDumpProcessor.Model.Config;
|
||||
|
||||
public class DumpProcessorConfig
|
||||
{
|
||||
[JsonProperty("spawnContainerChanceIncludeAfterDate")]
|
||||
[JsonPropertyName("spawnContainerChanceIncludeAfterDate")]
|
||||
[Newtonsoft.Json.JsonConverter(typeof(NewtonsoftDateTimeConverter))]
|
||||
[System.Text.Json.Serialization.JsonConverter(typeof(NetDateTimeConverter))]
|
||||
public DateTime SpawnContainerChanceIncludeAfterDate { get; set; }
|
||||
}
|
@ -10,6 +10,11 @@ public class IntakeReaderConfig
|
||||
[JsonPropertyName("readerType")]
|
||||
public IntakeReaderTypes IntakeReaderType { get; set; } = IntakeReaderTypes.Json;
|
||||
|
||||
[JsonProperty("maxDumpsPerMap")]
|
||||
[JsonPropertyName("maxDumpsPerMap")]
|
||||
public int MaxDumpsPerMap { get; set; } = 1500;
|
||||
|
||||
|
||||
[JsonProperty("ignoredDumpLocations")]
|
||||
[JsonPropertyName("ignoredDumpLocations")]
|
||||
public List<string> IgnoredDumpLocations { get; set; } = new List<string>();
|
||||
|
@ -1,5 +1,5 @@
|
||||
using LootDumpProcessor.Storage;
|
||||
using Newtonsoft.Json;
|
||||
using LootDumpProcessor.Serializers.Json.Converters;
|
||||
using LootDumpProcessor.Storage;
|
||||
|
||||
namespace LootDumpProcessor.Model.Processing;
|
||||
|
||||
@ -7,7 +7,8 @@ public class PreProcessedLooseLoot : IKeyable
|
||||
{
|
||||
public Dictionary<string, int> Counts { get; set; }
|
||||
|
||||
[JsonConverter(typeof(NewtonsoftJsonKeyConverter))]
|
||||
[Newtonsoft.Json.JsonConverter(typeof(NewtonsoftJsonKeyConverter))]
|
||||
[System.Text.Json.Serialization.JsonConverter(typeof(NetJsonKeyConverter))]
|
||||
public IKey ItemProperties { get; set; }
|
||||
|
||||
public int MapSpawnpointCount { get; set; }
|
||||
|
@ -21,3 +21,16 @@ public class HandbookRoot
|
||||
public List<Category> Categories { get; set; }
|
||||
public List<HandbookItem> Items { get; set; }
|
||||
}
|
||||
|
||||
public class StaticContainerRoot
|
||||
{
|
||||
public decimal probability { get; set; }
|
||||
public StaticContainerTemplate template { get; set; }
|
||||
}
|
||||
|
||||
public class StaticContainerTemplate
|
||||
{
|
||||
public string Id { get; set; }
|
||||
public decimal SpawnChance { get; set; }
|
||||
public bool IsAlwaysSpawn { get; set; }
|
||||
}
|
@ -42,26 +42,21 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
|
||||
Runners.Clear();
|
||||
// BSG changed the map data so static containers are now dynamic, so we need to scan all dumps for the static containers.
|
||||
LoggerFactory.GetInstance().Log("Queuing dumps for static data processing", LogLevel.Info);
|
||||
foreach (var dumped in dumps)
|
||||
{
|
||||
Runners.Add(
|
||||
Task.Factory.StartNew(() =>
|
||||
{
|
||||
LoggerFactory.GetInstance().Log($"Processing static data for file {dumped.BasicInfo.FileName}", LogLevel.Info);
|
||||
var data = _jsonSerializer.Deserialize<RootData>(File.ReadAllText(dumped.BasicInfo.FileName));
|
||||
// the if statement below will keep track of how many dumps we have for each map
|
||||
lock (mapDumpCounterLock)
|
||||
{
|
||||
if (mapDumpCounter.ContainsKey(data.Data.Name))
|
||||
mapDumpCounter[data.Data.Name] += 1;
|
||||
else
|
||||
mapDumpCounter.Add(data.Data.Name, 1);
|
||||
}
|
||||
// the if statement below takes care of processing "forced" or real static data for each map, we only need
|
||||
// to do this once per map, so we dont care about doing it again
|
||||
lock (staticContainersLock)
|
||||
{
|
||||
if (!staticContainers.ContainsKey(data.Data.Name))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log($"Doing first time process for map {data.Data.Name} of real static data", LogLevel.Info);
|
||||
var mapStaticLoot = StaticLootProcessor.CreateRealStaticContainers(data);
|
||||
staticContainers[mapStaticLoot.Item1] = mapStaticLoot.Item2;
|
||||
}
|
||||
@ -78,6 +73,21 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
}
|
||||
}
|
||||
|
||||
// Only process the dump file if the date is higher (after) the configuration date
|
||||
if (FileDateParser.TryParseFileDate(dumped.BasicInfo.FileName, out var fileDate) &&
|
||||
fileDate.HasValue &&
|
||||
fileDate.Value > LootDumpProcessorContext.GetConfig().DumpProcessorConfig
|
||||
.SpawnContainerChanceIncludeAfterDate)
|
||||
{
|
||||
// the if statement below will keep track of how many dumps we have for each map
|
||||
lock (mapDumpCounterLock)
|
||||
{
|
||||
if (mapDumpCounter.ContainsKey(data.Data.Name))
|
||||
mapDumpCounter[data.Data.Name] += 1;
|
||||
else
|
||||
mapDumpCounter.Add(data.Data.Name, 1);
|
||||
}
|
||||
|
||||
foreach (var dynamicStaticContainer in StaticLootProcessor.CreateDynamicStaticContainers(data))
|
||||
{
|
||||
lock (mapStaticContainersAggregatedLock)
|
||||
@ -88,6 +98,7 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
mapAggregatedData.Add(dynamicStaticContainer, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GCHandler.Collect();
|
||||
})
|
||||
@ -95,6 +106,7 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
}
|
||||
|
||||
Task.WaitAll(Runners.ToArray());
|
||||
LoggerFactory.GetInstance().Log("All static data processing threads finished", LogLevel.Info);
|
||||
// Aggregate and calculate the probability of a static container
|
||||
mapStaticContainersAggregated.ToDictionary(
|
||||
kv => kv.Key,
|
||||
@ -102,7 +114,7 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
td => new StaticDataPoint
|
||||
{
|
||||
Template = td.Key,
|
||||
Probability = Math.Round((double)((decimal)td.Value / (decimal)mapDumpCounter[kv.Key]), 2)
|
||||
Probability = GetStaticProbability(kv.Key, td, mapDumpCounter)
|
||||
}
|
||||
).ToList()
|
||||
).ToList().ForEach(kv => staticContainers[kv.Key].StaticContainers = kv.Value);
|
||||
@ -110,32 +122,42 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
// Static containers
|
||||
output.Add(OutputFileType.StaticContainer, staticContainers);
|
||||
|
||||
LoggerFactory.GetInstance().Log("Processing ammo distribution", LogLevel.Info);
|
||||
// Ammo distribution
|
||||
output.Add(
|
||||
OutputFileType.StaticAmmo,
|
||||
StaticLootProcessor.CreateAmmoDistribution(dumpProcessData.ContainerCounts)
|
||||
);
|
||||
|
||||
LoggerFactory.GetInstance().Log("Processing static loot distribution", LogLevel.Info);
|
||||
// Static loot distribution
|
||||
output.Add(
|
||||
OutputFileType.StaticLoot,
|
||||
StaticLootProcessor.CreateStaticLootDistribution(dumpProcessData.ContainerCounts)
|
||||
);
|
||||
|
||||
LoggerFactory.GetInstance().Log("Processing loose loot distribution", LogLevel.Info);
|
||||
// Loose loot distribution
|
||||
var looseLootDistribution = LooseLootProcessor.CreateLooseLootDistribution(
|
||||
dumpProcessData.MapCounts,
|
||||
dumpProcessData.LooseLootCounts
|
||||
);
|
||||
|
||||
LoggerFactory.GetInstance().Log("Collecting loose loot distribution information", LogLevel.Info);
|
||||
var loot = dumpProcessData.MapCounts
|
||||
.Select(mapCount => mapCount.Key)
|
||||
.ToDictionary(mi => mi, mi => looseLootDistribution[mi]);
|
||||
|
||||
output.Add(OutputFileType.LooseLoot, loot);
|
||||
LoggerFactory.GetInstance().Log("Dump processing fully completed!", LogLevel.Info);
|
||||
return output;
|
||||
}
|
||||
|
||||
private static double GetStaticProbability(string mapName, KeyValuePair<Template, int> td, Dictionary<string, int> mapDumpCounter)
|
||||
{
|
||||
return Math.Round((double)((decimal)td.Value / (decimal)mapDumpCounter[mapName]), 2);
|
||||
}
|
||||
|
||||
private DumpProcessData GetDumpProcessData(List<PartialData> dumps)
|
||||
{
|
||||
var dumpProcessData = new DumpProcessData();
|
||||
|
@ -60,9 +60,17 @@ public class StaticLootProcessor
|
||||
|
||||
public static List<Template> CreateDynamicStaticContainers(RootData rawMapDump)
|
||||
{
|
||||
return (from li in rawMapDump.Data.Loot
|
||||
var data = (from li in rawMapDump.Data.Loot
|
||||
where (li.IsContainer ?? false) && (!LootDumpProcessorContext.GetStaticWeaponIds().Contains(li.Items[0].Tpl))
|
||||
select li).ToList();
|
||||
|
||||
foreach (var item in data)
|
||||
{
|
||||
// remove all but first item from containers items
|
||||
item.Items = new List<Item> { item.Items[0] };
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
public static Dictionary<string, List<AmmoDistribution>> CreateAmmoDistribution(
|
||||
|
@ -41,8 +41,13 @@ public class QueuePipeline : IPipeline
|
||||
|
||||
try
|
||||
{
|
||||
// Gather all files, then add them into the processing queue
|
||||
GatherFiles().ForEach(f => _filesToProcess.Add(f));
|
||||
// Gather all files, sort them by date descending and then add them into the processing queue
|
||||
GatherFiles().OrderByDescending(f =>
|
||||
{
|
||||
FileDateParser.TryParseFileDate(f, out var date);
|
||||
return date;
|
||||
}
|
||||
).ToList().ForEach(f => _filesToProcess.Add(f));
|
||||
|
||||
// We startup all the threads and collect them into a runners list
|
||||
for (int i = 0; i < threads; i++)
|
||||
|
@ -1,5 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Collections.Concurrent;
|
||||
using LootDumpProcessor.Logger;
|
||||
using LootDumpProcessor.Model.Input;
|
||||
using LootDumpProcessor.Model.Processing;
|
||||
@ -15,28 +14,41 @@ public class JsonFileIntakeReader : IIntakeReader
|
||||
private static readonly HashSet<string>? _ignoredLocations =
|
||||
LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.IgnoredDumpLocations.ToHashSet();
|
||||
|
||||
private static Regex FileNameDateRegex = new("([0-9]{4}(-[0-9]{2}){2}_((-){0,1}[0-9]{2}){3})");
|
||||
private static readonly ConcurrentDictionary<string, int> _totalMapDumpsCounter = new();
|
||||
|
||||
public bool Read(string file, out BasicInfo basicInfo)
|
||||
{
|
||||
var fileData = File.ReadAllText(file);
|
||||
var unparsedDate = FileNameDateRegex.Match(file).Groups[1].Value;
|
||||
var date = DateTime.ParseExact(unparsedDate, "yyyy-MM-dd_HH-mm-ss", CultureInfo.InvariantCulture);
|
||||
// If the file format changes it may screw up this date parser
|
||||
if (!FileDateParser.TryParseFileDate(file, out var date))
|
||||
LoggerFactory.GetInstance().Log($"Couldnt parse date from file: {file}", LogLevel.Error);
|
||||
|
||||
var fi = _jsonSerializer.Deserialize<RootData>(fileData);
|
||||
if (fi.Data?.Name != null && (!_ignoredLocations?.Contains(fi.Data.Name) ?? true))
|
||||
{
|
||||
int counter;
|
||||
if (!_totalMapDumpsCounter.TryGetValue(fi.Data.Name, out counter))
|
||||
{
|
||||
counter = 0;
|
||||
_totalMapDumpsCounter[fi.Data.Name] = counter;
|
||||
}
|
||||
|
||||
if (counter < LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig.MaxDumpsPerMap)
|
||||
{
|
||||
basicInfo = new BasicInfo
|
||||
{
|
||||
Map = fi.Data.Name,
|
||||
FileHash = ProcessorUtil.HashFile(fileData),
|
||||
Data = fi,
|
||||
Date = date,
|
||||
Date = date.Value,
|
||||
FileName = file
|
||||
};
|
||||
_totalMapDumpsCounter[fi.Data.Name] += 1;
|
||||
LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Info);
|
||||
return true;
|
||||
}
|
||||
LoggerFactory.GetInstance().Log($"Ignoring file {file} as the file cap for map {fi.Data.Name} has been reached", LogLevel.Info);
|
||||
}
|
||||
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"File {file} was not eligible for dump data, it did not contain a location name or it was on ignored locations config",
|
||||
|
22
Serializers/Json/Converters/NetDateTimeConverter.cs
Normal file
22
Serializers/Json/Converters/NetDateTimeConverter.cs
Normal file
@ -0,0 +1,22 @@
|
||||
using System.Globalization;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace LootDumpProcessor.Serializers.Json.Converters;
|
||||
|
||||
public class NetDateTimeConverter : JsonConverter<DateTime>
|
||||
{
|
||||
private static string _dateTimeFormat = "yyyy-MM-dd HH:mm:ss";
|
||||
public override DateTime Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
|
||||
{
|
||||
var stringDate = reader.GetString() ?? "";
|
||||
if (!DateTime.TryParseExact(stringDate, _dateTimeFormat, null, DateTimeStyles.None, out var parsedDate))
|
||||
throw new Exception($"Invalid value for DateTime format: {_dateTimeFormat}");
|
||||
return parsedDate;
|
||||
}
|
||||
|
||||
public override void Write(Utf8JsonWriter writer, DateTime value, JsonSerializerOptions options)
|
||||
{
|
||||
writer.WriteStringValue(value.ToString(_dateTimeFormat));
|
||||
}
|
||||
}
|
@ -1,7 +1,8 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using LootDumpProcessor.Storage;
|
||||
|
||||
namespace LootDumpProcessor.Storage;
|
||||
namespace LootDumpProcessor.Serializers.Json.Converters;
|
||||
|
||||
public class NetJsonKeyConverter : JsonConverter<IKey?>
|
||||
{
|
||||
|
28
Serializers/Json/Converters/NewtonsoftDateTimeConverter.cs
Normal file
28
Serializers/Json/Converters/NewtonsoftDateTimeConverter.cs
Normal file
@ -0,0 +1,28 @@
|
||||
using System.Globalization;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace LootDumpProcessor.Serializers.Json.Converters;
|
||||
|
||||
public class NewtonsoftDateTimeConverter : JsonConverter<DateTime>
|
||||
{
|
||||
private static string _dateTimeFormat = "yyyy-MM-dd HH:mm:ss";
|
||||
|
||||
public override void WriteJson(JsonWriter writer, DateTime value, JsonSerializer serializer)
|
||||
{
|
||||
writer.WriteValue(value.ToString(_dateTimeFormat));
|
||||
}
|
||||
|
||||
public override DateTime ReadJson(
|
||||
JsonReader reader,
|
||||
Type objectType,
|
||||
DateTime existingValue,
|
||||
bool hasExistingValue,
|
||||
JsonSerializer serializer
|
||||
)
|
||||
{
|
||||
var stringDate = reader.Value?.ToString() ?? "";
|
||||
if (!DateTime.TryParseExact(stringDate, _dateTimeFormat, null, DateTimeStyles.None, out var parsedDate))
|
||||
throw new Exception($"Invalid value for DateTime format: {_dateTimeFormat}");
|
||||
return parsedDate;
|
||||
}
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
using Newtonsoft.Json;
|
||||
using LootDumpProcessor.Storage;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace LootDumpProcessor.Storage;
|
||||
namespace LootDumpProcessor.Serializers.Json.Converters;
|
||||
|
||||
public class NewtonsoftJsonKeyConverter : JsonConverter<AbstractKey>
|
||||
{
|
||||
|
@ -1,6 +1,6 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using LootDumpProcessor.Storage;
|
||||
using LootDumpProcessor.Serializers.Json.Converters;
|
||||
|
||||
namespace LootDumpProcessor.Serializers.Json;
|
||||
|
||||
@ -12,9 +12,11 @@ public class NetJsonSerializer : IJsonSerializer
|
||||
Converters =
|
||||
{
|
||||
new NetJsonKeyConverter(),
|
||||
new JsonStringEnumConverter()
|
||||
new JsonStringEnumConverter(),
|
||||
new NetDateTimeConverter()
|
||||
}
|
||||
};
|
||||
|
||||
public string Serialize<T>(T obj)
|
||||
{
|
||||
return JsonSerializer.Serialize(obj, _serializeOptions);
|
||||
|
@ -1,4 +1,4 @@
|
||||
using LootDumpProcessor.Storage;
|
||||
using LootDumpProcessor.Serializers.Json.Converters;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Converters;
|
||||
|
||||
@ -11,7 +11,8 @@ public class NewtonsoftJsonSerializer : IJsonSerializer
|
||||
Converters =
|
||||
{
|
||||
new NewtonsoftJsonKeyConverter(),
|
||||
new StringEnumConverter()
|
||||
new StringEnumConverter(),
|
||||
new NewtonsoftDateTimeConverter()
|
||||
}
|
||||
};
|
||||
|
||||
|
32
Utils/FileDateParser.cs
Normal file
32
Utils/FileDateParser.cs
Normal file
@ -0,0 +1,32 @@
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace LootDumpProcessor.Process.Processor;
|
||||
|
||||
public static class FileDateParser
|
||||
{
|
||||
private static readonly Regex _fileDateRegex =
|
||||
new(".*([0-9]{4})[-]([0-9]{2})[-]([0-9]{2})[_]([0-9]{2})[-]([0-9]{2})[-]([0-9]{2}).*");
|
||||
|
||||
public static bool TryParseFileDate(string fileName, out DateTime? date)
|
||||
{
|
||||
date = null;
|
||||
if (!_fileDateRegex.IsMatch(fileName))
|
||||
return false;
|
||||
var match = _fileDateRegex.Match(fileName);
|
||||
var year = match.Groups[1].Value;
|
||||
var month = match.Groups[2].Value;
|
||||
var day = match.Groups[3].Value;
|
||||
var hour = match.Groups[4].Value;
|
||||
var mins = match.Groups[5].Value;
|
||||
var secs = match.Groups[6].Value;
|
||||
date = new DateTime(
|
||||
int.Parse(year),
|
||||
int.Parse(month),
|
||||
int.Parse(day),
|
||||
int.Parse(hour),
|
||||
int.Parse(mins),
|
||||
int.Parse(secs)
|
||||
);
|
||||
return true;
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user