Merge pull request 'Changes' (#1) from StreetsProbabilityTest into master
Reviewed-on: SPT-AKI/LootDumpProcessor#1
This commit is contained in:
commit
c69dea3130
@ -1,9 +1,12 @@
|
|||||||
{
|
{
|
||||||
"serverLocation": "D:\\Spt Stuff\\server",
|
"serverLocation": "E:\\spt\\Server",
|
||||||
"threads": 20,
|
"threads": 10,
|
||||||
"threadPoolingTimeoutMs": 1000,
|
"threadPoolingTimeoutMs": 1000,
|
||||||
"jsonSerializer": "DotNet",
|
"jsonSerializer": "DotNet",
|
||||||
"manualGarbageCollectionCalls": false,
|
"manualGarbageCollectionCalls": false,
|
||||||
|
"dumpProcessorConfig": {
|
||||||
|
"spawnContainerChanceIncludeAfterDate": "2023-08-10 00:00:01"
|
||||||
|
},
|
||||||
"dataStorageConfig": {
|
"dataStorageConfig": {
|
||||||
"dataStorageType": "Memory",
|
"dataStorageType": "Memory",
|
||||||
"fileDataStorageTempLocation": "D:\\Spt Stuff\\Lootgenerator\\Dumps\\cache"
|
"fileDataStorageTempLocation": "D:\\Spt Stuff\\Lootgenerator\\Dumps\\cache"
|
||||||
@ -21,13 +24,14 @@
|
|||||||
"cleanupTempFolderAfterProcess": true
|
"cleanupTempFolderAfterProcess": true
|
||||||
},
|
},
|
||||||
"intakeReaderConfig": {
|
"intakeReaderConfig": {
|
||||||
|
"maxDumpsPerMap": 1500,
|
||||||
"readerType": "Json",
|
"readerType": "Json",
|
||||||
"ignoredDumpLocations": [
|
"ignoredDumpLocations": [
|
||||||
"Hideout"
|
"Hideout"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"dumpFilesLocation": [
|
"dumpFilesLocation": [
|
||||||
"D:\\Spt Stuff\\Lootgenerator\\dumps\\input"
|
"E:\\spt\\dumps\\input"
|
||||||
],
|
],
|
||||||
"thresholdDate": "2023-01-08",
|
"thresholdDate": "2023-01-08",
|
||||||
"acceptedFileExtensions": [
|
"acceptedFileExtensions": [
|
||||||
@ -43,6 +47,6 @@
|
|||||||
"spawnPointToleranceForForced": 99.5
|
"spawnPointToleranceForForced": 99.5
|
||||||
},
|
},
|
||||||
"writerConfig": {
|
"writerConfig": {
|
||||||
"outputLocation": "D:\\Spt Stuff\\Lootgenerator\\dumps\\output"
|
"outputLocation": "E:\\spt\\dumps\\output"
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -42,6 +42,10 @@ public class Config
|
|||||||
[JsonPropertyName("processorConfig")]
|
[JsonPropertyName("processorConfig")]
|
||||||
public ProcessorConfig ProcessorConfig { get; set; }
|
public ProcessorConfig ProcessorConfig { get; set; }
|
||||||
|
|
||||||
|
[JsonProperty("dumpProcessorConfig")]
|
||||||
|
[JsonPropertyName("dumpProcessorConfig")]
|
||||||
|
public DumpProcessorConfig DumpProcessorConfig { get; set; }
|
||||||
|
|
||||||
[JsonProperty("writerConfig")]
|
[JsonProperty("writerConfig")]
|
||||||
[JsonPropertyName("writerConfig")]
|
[JsonPropertyName("writerConfig")]
|
||||||
public WriterConfig WriterConfig { get; set; }
|
public WriterConfig WriterConfig { get; set; }
|
||||||
|
14
Model/Config/DumpProcessorConfig.cs
Normal file
14
Model/Config/DumpProcessorConfig.cs
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
using System.Text.Json.Serialization;
|
||||||
|
using LootDumpProcessor.Serializers.Json.Converters;
|
||||||
|
using Newtonsoft.Json;
|
||||||
|
|
||||||
|
namespace LootDumpProcessor.Model.Config;
|
||||||
|
|
||||||
|
public class DumpProcessorConfig
|
||||||
|
{
|
||||||
|
[JsonProperty("spawnContainerChanceIncludeAfterDate")]
|
||||||
|
[JsonPropertyName("spawnContainerChanceIncludeAfterDate")]
|
||||||
|
[Newtonsoft.Json.JsonConverter(typeof(NewtonsoftDateTimeConverter))]
|
||||||
|
[System.Text.Json.Serialization.JsonConverter(typeof(NetDateTimeConverter))]
|
||||||
|
public DateTime SpawnContainerChanceIncludeAfterDate { get; set; }
|
||||||
|
}
|
@ -10,6 +10,11 @@ public class IntakeReaderConfig
|
|||||||
[JsonPropertyName("readerType")]
|
[JsonPropertyName("readerType")]
|
||||||
public IntakeReaderTypes IntakeReaderType { get; set; } = IntakeReaderTypes.Json;
|
public IntakeReaderTypes IntakeReaderType { get; set; } = IntakeReaderTypes.Json;
|
||||||
|
|
||||||
|
[JsonProperty("maxDumpsPerMap")]
|
||||||
|
[JsonPropertyName("maxDumpsPerMap")]
|
||||||
|
public int MaxDumpsPerMap { get; set; } = 1500;
|
||||||
|
|
||||||
|
|
||||||
[JsonProperty("ignoredDumpLocations")]
|
[JsonProperty("ignoredDumpLocations")]
|
||||||
[JsonPropertyName("ignoredDumpLocations")]
|
[JsonPropertyName("ignoredDumpLocations")]
|
||||||
public List<string> IgnoredDumpLocations { get; set; } = new List<string>();
|
public List<string> IgnoredDumpLocations { get; set; } = new List<string>();
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
using LootDumpProcessor.Storage;
|
using LootDumpProcessor.Serializers.Json.Converters;
|
||||||
using Newtonsoft.Json;
|
using LootDumpProcessor.Storage;
|
||||||
|
|
||||||
namespace LootDumpProcessor.Model.Processing;
|
namespace LootDumpProcessor.Model.Processing;
|
||||||
|
|
||||||
@ -7,7 +7,8 @@ public class PreProcessedLooseLoot : IKeyable
|
|||||||
{
|
{
|
||||||
public Dictionary<string, int> Counts { get; set; }
|
public Dictionary<string, int> Counts { get; set; }
|
||||||
|
|
||||||
[JsonConverter(typeof(NewtonsoftJsonKeyConverter))]
|
[Newtonsoft.Json.JsonConverter(typeof(NewtonsoftJsonKeyConverter))]
|
||||||
|
[System.Text.Json.Serialization.JsonConverter(typeof(NetJsonKeyConverter))]
|
||||||
public IKey ItemProperties { get; set; }
|
public IKey ItemProperties { get; set; }
|
||||||
|
|
||||||
public int MapSpawnpointCount { get; set; }
|
public int MapSpawnpointCount { get; set; }
|
||||||
|
@ -21,3 +21,16 @@ public class HandbookRoot
|
|||||||
public List<Category> Categories { get; set; }
|
public List<Category> Categories { get; set; }
|
||||||
public List<HandbookItem> Items { get; set; }
|
public List<HandbookItem> Items { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public class StaticContainerRoot
|
||||||
|
{
|
||||||
|
public decimal probability { get; set; }
|
||||||
|
public StaticContainerTemplate template { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public class StaticContainerTemplate
|
||||||
|
{
|
||||||
|
public string Id { get; set; }
|
||||||
|
public decimal SpawnChance { get; set; }
|
||||||
|
public bool IsAlwaysSpawn { get; set; }
|
||||||
|
}
|
@ -42,26 +42,21 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
|||||||
|
|
||||||
Runners.Clear();
|
Runners.Clear();
|
||||||
// BSG changed the map data so static containers are now dynamic, so we need to scan all dumps for the static containers.
|
// BSG changed the map data so static containers are now dynamic, so we need to scan all dumps for the static containers.
|
||||||
|
LoggerFactory.GetInstance().Log("Queuing dumps for static data processing", LogLevel.Info);
|
||||||
foreach (var dumped in dumps)
|
foreach (var dumped in dumps)
|
||||||
{
|
{
|
||||||
Runners.Add(
|
Runners.Add(
|
||||||
Task.Factory.StartNew(() =>
|
Task.Factory.StartNew(() =>
|
||||||
{
|
{
|
||||||
|
LoggerFactory.GetInstance().Log($"Processing static data for file {dumped.BasicInfo.FileName}", LogLevel.Info);
|
||||||
var data = _jsonSerializer.Deserialize<RootData>(File.ReadAllText(dumped.BasicInfo.FileName));
|
var data = _jsonSerializer.Deserialize<RootData>(File.ReadAllText(dumped.BasicInfo.FileName));
|
||||||
// the if statement below will keep track of how many dumps we have for each map
|
|
||||||
lock (mapDumpCounterLock)
|
|
||||||
{
|
|
||||||
if (mapDumpCounter.ContainsKey(data.Data.Name))
|
|
||||||
mapDumpCounter[data.Data.Name] += 1;
|
|
||||||
else
|
|
||||||
mapDumpCounter.Add(data.Data.Name, 1);
|
|
||||||
}
|
|
||||||
// the if statement below takes care of processing "forced" or real static data for each map, we only need
|
// the if statement below takes care of processing "forced" or real static data for each map, we only need
|
||||||
// to do this once per map, so we dont care about doing it again
|
// to do this once per map, so we dont care about doing it again
|
||||||
lock (staticContainersLock)
|
lock (staticContainersLock)
|
||||||
{
|
{
|
||||||
if (!staticContainers.ContainsKey(data.Data.Name))
|
if (!staticContainers.ContainsKey(data.Data.Name))
|
||||||
{
|
{
|
||||||
|
LoggerFactory.GetInstance().Log($"Doing first time process for map {data.Data.Name} of real static data", LogLevel.Info);
|
||||||
var mapStaticLoot = StaticLootProcessor.CreateRealStaticContainers(data);
|
var mapStaticLoot = StaticLootProcessor.CreateRealStaticContainers(data);
|
||||||
staticContainers[mapStaticLoot.Item1] = mapStaticLoot.Item2;
|
staticContainers[mapStaticLoot.Item1] = mapStaticLoot.Item2;
|
||||||
}
|
}
|
||||||
@ -78,14 +73,30 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach (var dynamicStaticContainer in StaticLootProcessor.CreateDynamicStaticContainers(data))
|
// Only process the dump file if the date is higher (after) the configuration date
|
||||||
|
if (FileDateParser.TryParseFileDate(dumped.BasicInfo.FileName, out var fileDate) &&
|
||||||
|
fileDate.HasValue &&
|
||||||
|
fileDate.Value > LootDumpProcessorContext.GetConfig().DumpProcessorConfig
|
||||||
|
.SpawnContainerChanceIncludeAfterDate)
|
||||||
{
|
{
|
||||||
lock (mapStaticContainersAggregatedLock)
|
// the if statement below will keep track of how many dumps we have for each map
|
||||||
|
lock (mapDumpCounterLock)
|
||||||
{
|
{
|
||||||
if (mapAggregatedData.ContainsKey(dynamicStaticContainer))
|
if (mapDumpCounter.ContainsKey(data.Data.Name))
|
||||||
mapAggregatedData[dynamicStaticContainer] += 1;
|
mapDumpCounter[data.Data.Name] += 1;
|
||||||
else
|
else
|
||||||
mapAggregatedData.Add(dynamicStaticContainer, 1);
|
mapDumpCounter.Add(data.Data.Name, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach (var dynamicStaticContainer in StaticLootProcessor.CreateDynamicStaticContainers(data))
|
||||||
|
{
|
||||||
|
lock (mapStaticContainersAggregatedLock)
|
||||||
|
{
|
||||||
|
if (mapAggregatedData.ContainsKey(dynamicStaticContainer))
|
||||||
|
mapAggregatedData[dynamicStaticContainer] += 1;
|
||||||
|
else
|
||||||
|
mapAggregatedData.Add(dynamicStaticContainer, 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,6 +106,7 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
|||||||
}
|
}
|
||||||
|
|
||||||
Task.WaitAll(Runners.ToArray());
|
Task.WaitAll(Runners.ToArray());
|
||||||
|
LoggerFactory.GetInstance().Log("All static data processing threads finished", LogLevel.Info);
|
||||||
// Aggregate and calculate the probability of a static container
|
// Aggregate and calculate the probability of a static container
|
||||||
mapStaticContainersAggregated.ToDictionary(
|
mapStaticContainersAggregated.ToDictionary(
|
||||||
kv => kv.Key,
|
kv => kv.Key,
|
||||||
@ -102,7 +114,7 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
|||||||
td => new StaticDataPoint
|
td => new StaticDataPoint
|
||||||
{
|
{
|
||||||
Template = td.Key,
|
Template = td.Key,
|
||||||
Probability = Math.Round((double)((decimal)td.Value / (decimal)mapDumpCounter[kv.Key]), 2)
|
Probability = GetStaticProbability(kv.Key, td, mapDumpCounter)
|
||||||
}
|
}
|
||||||
).ToList()
|
).ToList()
|
||||||
).ToList().ForEach(kv => staticContainers[kv.Key].StaticContainers = kv.Value);
|
).ToList().ForEach(kv => staticContainers[kv.Key].StaticContainers = kv.Value);
|
||||||
@ -110,32 +122,42 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
|||||||
// Static containers
|
// Static containers
|
||||||
output.Add(OutputFileType.StaticContainer, staticContainers);
|
output.Add(OutputFileType.StaticContainer, staticContainers);
|
||||||
|
|
||||||
|
LoggerFactory.GetInstance().Log("Processing ammo distribution", LogLevel.Info);
|
||||||
// Ammo distribution
|
// Ammo distribution
|
||||||
output.Add(
|
output.Add(
|
||||||
OutputFileType.StaticAmmo,
|
OutputFileType.StaticAmmo,
|
||||||
StaticLootProcessor.CreateAmmoDistribution(dumpProcessData.ContainerCounts)
|
StaticLootProcessor.CreateAmmoDistribution(dumpProcessData.ContainerCounts)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
LoggerFactory.GetInstance().Log("Processing static loot distribution", LogLevel.Info);
|
||||||
// Static loot distribution
|
// Static loot distribution
|
||||||
output.Add(
|
output.Add(
|
||||||
OutputFileType.StaticLoot,
|
OutputFileType.StaticLoot,
|
||||||
StaticLootProcessor.CreateStaticLootDistribution(dumpProcessData.ContainerCounts)
|
StaticLootProcessor.CreateStaticLootDistribution(dumpProcessData.ContainerCounts)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
LoggerFactory.GetInstance().Log("Processing loose loot distribution", LogLevel.Info);
|
||||||
// Loose loot distribution
|
// Loose loot distribution
|
||||||
var looseLootDistribution = LooseLootProcessor.CreateLooseLootDistribution(
|
var looseLootDistribution = LooseLootProcessor.CreateLooseLootDistribution(
|
||||||
dumpProcessData.MapCounts,
|
dumpProcessData.MapCounts,
|
||||||
dumpProcessData.LooseLootCounts
|
dumpProcessData.LooseLootCounts
|
||||||
);
|
);
|
||||||
|
|
||||||
|
LoggerFactory.GetInstance().Log("Collecting loose loot distribution information", LogLevel.Info);
|
||||||
var loot = dumpProcessData.MapCounts
|
var loot = dumpProcessData.MapCounts
|
||||||
.Select(mapCount => mapCount.Key)
|
.Select(mapCount => mapCount.Key)
|
||||||
.ToDictionary(mi => mi, mi => looseLootDistribution[mi]);
|
.ToDictionary(mi => mi, mi => looseLootDistribution[mi]);
|
||||||
|
|
||||||
output.Add(OutputFileType.LooseLoot, loot);
|
output.Add(OutputFileType.LooseLoot, loot);
|
||||||
|
LoggerFactory.GetInstance().Log("Dump processing fully completed!", LogLevel.Info);
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static double GetStaticProbability(string mapName, KeyValuePair<Template, int> td, Dictionary<string, int> mapDumpCounter)
|
||||||
|
{
|
||||||
|
return Math.Round((double)((decimal)td.Value / (decimal)mapDumpCounter[mapName]), 2);
|
||||||
|
}
|
||||||
|
|
||||||
private DumpProcessData GetDumpProcessData(List<PartialData> dumps)
|
private DumpProcessData GetDumpProcessData(List<PartialData> dumps)
|
||||||
{
|
{
|
||||||
var dumpProcessData = new DumpProcessData();
|
var dumpProcessData = new DumpProcessData();
|
||||||
|
@ -60,9 +60,17 @@ public class StaticLootProcessor
|
|||||||
|
|
||||||
public static List<Template> CreateDynamicStaticContainers(RootData rawMapDump)
|
public static List<Template> CreateDynamicStaticContainers(RootData rawMapDump)
|
||||||
{
|
{
|
||||||
return (from li in rawMapDump.Data.Loot
|
var data = (from li in rawMapDump.Data.Loot
|
||||||
where (li.IsContainer ?? false) && (!LootDumpProcessorContext.GetStaticWeaponIds().Contains(li.Items[0].Tpl))
|
where (li.IsContainer ?? false) && (!LootDumpProcessorContext.GetStaticWeaponIds().Contains(li.Items[0].Tpl))
|
||||||
select li).ToList();
|
select li).ToList();
|
||||||
|
|
||||||
|
foreach (var item in data)
|
||||||
|
{
|
||||||
|
// remove all but first item from containers items
|
||||||
|
item.Items = new List<Item> { item.Items[0] };
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Dictionary<string, List<AmmoDistribution>> CreateAmmoDistribution(
|
public static Dictionary<string, List<AmmoDistribution>> CreateAmmoDistribution(
|
||||||
|
@ -41,8 +41,13 @@ public class QueuePipeline : IPipeline
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
// Gather all files, then add them into the processing queue
|
// Gather all files, sort them by date descending and then add them into the processing queue
|
||||||
GatherFiles().ForEach(f => _filesToProcess.Add(f));
|
GatherFiles().OrderByDescending(f =>
|
||||||
|
{
|
||||||
|
FileDateParser.TryParseFileDate(f, out var date);
|
||||||
|
return date;
|
||||||
|
}
|
||||||
|
).ToList().ForEach(f => _filesToProcess.Add(f));
|
||||||
|
|
||||||
// We startup all the threads and collect them into a runners list
|
// We startup all the threads and collect them into a runners list
|
||||||
for (int i = 0; i < threads; i++)
|
for (int i = 0; i < threads; i++)
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
using System.Globalization;
|
using System.Collections.Concurrent;
|
||||||
using System.Text.RegularExpressions;
|
|
||||||
using LootDumpProcessor.Logger;
|
using LootDumpProcessor.Logger;
|
||||||
using LootDumpProcessor.Model.Input;
|
using LootDumpProcessor.Model.Input;
|
||||||
using LootDumpProcessor.Model.Processing;
|
using LootDumpProcessor.Model.Processing;
|
||||||
@ -15,27 +14,40 @@ public class JsonFileIntakeReader : IIntakeReader
|
|||||||
private static readonly HashSet<string>? _ignoredLocations =
|
private static readonly HashSet<string>? _ignoredLocations =
|
||||||
LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.IgnoredDumpLocations.ToHashSet();
|
LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.IgnoredDumpLocations.ToHashSet();
|
||||||
|
|
||||||
private static Regex FileNameDateRegex = new("([0-9]{4}(-[0-9]{2}){2}_((-){0,1}[0-9]{2}){3})");
|
private static readonly ConcurrentDictionary<string, int> _totalMapDumpsCounter = new();
|
||||||
|
|
||||||
public bool Read(string file, out BasicInfo basicInfo)
|
public bool Read(string file, out BasicInfo basicInfo)
|
||||||
{
|
{
|
||||||
var fileData = File.ReadAllText(file);
|
var fileData = File.ReadAllText(file);
|
||||||
var unparsedDate = FileNameDateRegex.Match(file).Groups[1].Value;
|
// If the file format changes it may screw up this date parser
|
||||||
var date = DateTime.ParseExact(unparsedDate, "yyyy-MM-dd_HH-mm-ss", CultureInfo.InvariantCulture);
|
if (!FileDateParser.TryParseFileDate(file, out var date))
|
||||||
|
LoggerFactory.GetInstance().Log($"Couldnt parse date from file: {file}", LogLevel.Error);
|
||||||
|
|
||||||
var fi = _jsonSerializer.Deserialize<RootData>(fileData);
|
var fi = _jsonSerializer.Deserialize<RootData>(fileData);
|
||||||
if (fi.Data?.Name != null && (!_ignoredLocations?.Contains(fi.Data.Name) ?? true))
|
if (fi.Data?.Name != null && (!_ignoredLocations?.Contains(fi.Data.Name) ?? true))
|
||||||
{
|
{
|
||||||
basicInfo = new BasicInfo
|
int counter;
|
||||||
|
if (!_totalMapDumpsCounter.TryGetValue(fi.Data.Name, out counter))
|
||||||
{
|
{
|
||||||
Map = fi.Data.Name,
|
counter = 0;
|
||||||
FileHash = ProcessorUtil.HashFile(fileData),
|
_totalMapDumpsCounter[fi.Data.Name] = counter;
|
||||||
Data = fi,
|
}
|
||||||
Date = date,
|
|
||||||
FileName = file
|
if (counter < LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig.MaxDumpsPerMap)
|
||||||
};
|
{
|
||||||
LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Info);
|
basicInfo = new BasicInfo
|
||||||
return true;
|
{
|
||||||
|
Map = fi.Data.Name,
|
||||||
|
FileHash = ProcessorUtil.HashFile(fileData),
|
||||||
|
Data = fi,
|
||||||
|
Date = date.Value,
|
||||||
|
FileName = file
|
||||||
|
};
|
||||||
|
_totalMapDumpsCounter[fi.Data.Name] += 1;
|
||||||
|
LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Info);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
LoggerFactory.GetInstance().Log($"Ignoring file {file} as the file cap for map {fi.Data.Name} has been reached", LogLevel.Info);
|
||||||
}
|
}
|
||||||
|
|
||||||
LoggerFactory.GetInstance().Log(
|
LoggerFactory.GetInstance().Log(
|
||||||
|
22
Serializers/Json/Converters/NetDateTimeConverter.cs
Normal file
22
Serializers/Json/Converters/NetDateTimeConverter.cs
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
using System.Globalization;
|
||||||
|
using System.Text.Json;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
|
||||||
|
namespace LootDumpProcessor.Serializers.Json.Converters;
|
||||||
|
|
||||||
|
public class NetDateTimeConverter : JsonConverter<DateTime>
|
||||||
|
{
|
||||||
|
private static string _dateTimeFormat = "yyyy-MM-dd HH:mm:ss";
|
||||||
|
public override DateTime Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
|
||||||
|
{
|
||||||
|
var stringDate = reader.GetString() ?? "";
|
||||||
|
if (!DateTime.TryParseExact(stringDate, _dateTimeFormat, null, DateTimeStyles.None, out var parsedDate))
|
||||||
|
throw new Exception($"Invalid value for DateTime format: {_dateTimeFormat}");
|
||||||
|
return parsedDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public override void Write(Utf8JsonWriter writer, DateTime value, JsonSerializerOptions options)
|
||||||
|
{
|
||||||
|
writer.WriteStringValue(value.ToString(_dateTimeFormat));
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,8 @@
|
|||||||
using System.Text.Json;
|
using System.Text.Json;
|
||||||
using System.Text.Json.Serialization;
|
using System.Text.Json.Serialization;
|
||||||
|
using LootDumpProcessor.Storage;
|
||||||
|
|
||||||
namespace LootDumpProcessor.Storage;
|
namespace LootDumpProcessor.Serializers.Json.Converters;
|
||||||
|
|
||||||
public class NetJsonKeyConverter : JsonConverter<IKey?>
|
public class NetJsonKeyConverter : JsonConverter<IKey?>
|
||||||
{
|
{
|
||||||
|
28
Serializers/Json/Converters/NewtonsoftDateTimeConverter.cs
Normal file
28
Serializers/Json/Converters/NewtonsoftDateTimeConverter.cs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
using System.Globalization;
|
||||||
|
using Newtonsoft.Json;
|
||||||
|
|
||||||
|
namespace LootDumpProcessor.Serializers.Json.Converters;
|
||||||
|
|
||||||
|
public class NewtonsoftDateTimeConverter : JsonConverter<DateTime>
|
||||||
|
{
|
||||||
|
private static string _dateTimeFormat = "yyyy-MM-dd HH:mm:ss";
|
||||||
|
|
||||||
|
public override void WriteJson(JsonWriter writer, DateTime value, JsonSerializer serializer)
|
||||||
|
{
|
||||||
|
writer.WriteValue(value.ToString(_dateTimeFormat));
|
||||||
|
}
|
||||||
|
|
||||||
|
public override DateTime ReadJson(
|
||||||
|
JsonReader reader,
|
||||||
|
Type objectType,
|
||||||
|
DateTime existingValue,
|
||||||
|
bool hasExistingValue,
|
||||||
|
JsonSerializer serializer
|
||||||
|
)
|
||||||
|
{
|
||||||
|
var stringDate = reader.Value?.ToString() ?? "";
|
||||||
|
if (!DateTime.TryParseExact(stringDate, _dateTimeFormat, null, DateTimeStyles.None, out var parsedDate))
|
||||||
|
throw new Exception($"Invalid value for DateTime format: {_dateTimeFormat}");
|
||||||
|
return parsedDate;
|
||||||
|
}
|
||||||
|
}
|
@ -1,6 +1,7 @@
|
|||||||
using Newtonsoft.Json;
|
using LootDumpProcessor.Storage;
|
||||||
|
using Newtonsoft.Json;
|
||||||
|
|
||||||
namespace LootDumpProcessor.Storage;
|
namespace LootDumpProcessor.Serializers.Json.Converters;
|
||||||
|
|
||||||
public class NewtonsoftJsonKeyConverter : JsonConverter<AbstractKey>
|
public class NewtonsoftJsonKeyConverter : JsonConverter<AbstractKey>
|
||||||
{
|
{
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
using System.Text.Json;
|
using System.Text.Json;
|
||||||
using System.Text.Json.Serialization;
|
using System.Text.Json.Serialization;
|
||||||
using LootDumpProcessor.Storage;
|
using LootDumpProcessor.Serializers.Json.Converters;
|
||||||
|
|
||||||
namespace LootDumpProcessor.Serializers.Json;
|
namespace LootDumpProcessor.Serializers.Json;
|
||||||
|
|
||||||
@ -12,9 +12,11 @@ public class NetJsonSerializer : IJsonSerializer
|
|||||||
Converters =
|
Converters =
|
||||||
{
|
{
|
||||||
new NetJsonKeyConverter(),
|
new NetJsonKeyConverter(),
|
||||||
new JsonStringEnumConverter()
|
new JsonStringEnumConverter(),
|
||||||
|
new NetDateTimeConverter()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
public string Serialize<T>(T obj)
|
public string Serialize<T>(T obj)
|
||||||
{
|
{
|
||||||
return JsonSerializer.Serialize(obj, _serializeOptions);
|
return JsonSerializer.Serialize(obj, _serializeOptions);
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
using LootDumpProcessor.Storage;
|
using LootDumpProcessor.Serializers.Json.Converters;
|
||||||
using Newtonsoft.Json;
|
using Newtonsoft.Json;
|
||||||
using Newtonsoft.Json.Converters;
|
using Newtonsoft.Json.Converters;
|
||||||
|
|
||||||
@ -11,7 +11,8 @@ public class NewtonsoftJsonSerializer : IJsonSerializer
|
|||||||
Converters =
|
Converters =
|
||||||
{
|
{
|
||||||
new NewtonsoftJsonKeyConverter(),
|
new NewtonsoftJsonKeyConverter(),
|
||||||
new StringEnumConverter()
|
new StringEnumConverter(),
|
||||||
|
new NewtonsoftDateTimeConverter()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
32
Utils/FileDateParser.cs
Normal file
32
Utils/FileDateParser.cs
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
using System.Text.RegularExpressions;
|
||||||
|
|
||||||
|
namespace LootDumpProcessor.Process.Processor;
|
||||||
|
|
||||||
|
public static class FileDateParser
|
||||||
|
{
|
||||||
|
private static readonly Regex _fileDateRegex =
|
||||||
|
new(".*([0-9]{4})[-]([0-9]{2})[-]([0-9]{2})[_]([0-9]{2})[-]([0-9]{2})[-]([0-9]{2}).*");
|
||||||
|
|
||||||
|
public static bool TryParseFileDate(string fileName, out DateTime? date)
|
||||||
|
{
|
||||||
|
date = null;
|
||||||
|
if (!_fileDateRegex.IsMatch(fileName))
|
||||||
|
return false;
|
||||||
|
var match = _fileDateRegex.Match(fileName);
|
||||||
|
var year = match.Groups[1].Value;
|
||||||
|
var month = match.Groups[2].Value;
|
||||||
|
var day = match.Groups[3].Value;
|
||||||
|
var hour = match.Groups[4].Value;
|
||||||
|
var mins = match.Groups[5].Value;
|
||||||
|
var secs = match.Groups[6].Value;
|
||||||
|
date = new DateTime(
|
||||||
|
int.Parse(year),
|
||||||
|
int.Parse(month),
|
||||||
|
int.Parse(day),
|
||||||
|
int.Parse(hour),
|
||||||
|
int.Parse(mins),
|
||||||
|
int.Parse(secs)
|
||||||
|
);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user