LootDumpProcessor/Process/Reader/Intake/JsonFileIntakeReader.cs

81 lines
3.2 KiB
C#
Raw Permalink Normal View History

using System.Collections.Concurrent;
2023-08-12 19:08:38 +01:00
using LootDumpProcessor.Logger;
using LootDumpProcessor.Model.Input;
using LootDumpProcessor.Model.Processing;
using LootDumpProcessor.Serializers.Json;
using LootDumpProcessor.Utils;
2023-08-12 19:08:38 +01:00
namespace LootDumpProcessor.Process.Reader.Intake;
2023-08-12 19:08:38 +01:00
public class JsonFileIntakeReader : IIntakeReader
{
private static readonly IJsonSerializer _jsonSerializer = JsonSerializerFactory.GetInstance();
private static readonly HashSet<string>? _ignoredLocations =
LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.IgnoredDumpLocations.ToHashSet();
2023-08-13 19:06:39 +01:00
private static readonly ConcurrentDictionary<string, int> _totalMapDumpsCounter = new();
2023-08-13 18:14:54 +01:00
2023-08-12 19:08:38 +01:00
public bool Read(string file, out BasicInfo basicInfo)
{
var fileData = File.ReadAllText(file);
if (fileData == null)
{
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
LoggerFactory.GetInstance().Log($"Couldnt parse date from file: {file}", LogLevel.Error);
basicInfo = null;
return false;
}
2023-08-13 18:14:54 +01:00
// If the file format changes it may screw up this date parser
if (!FileDateParser.TryParseFileDate(file, out var date))
{
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
LoggerFactory.GetInstance().Log($"Couldnt parse date from file: {file}", LogLevel.Error);
}
2023-08-12 19:08:38 +01:00
var fi = _jsonSerializer.Deserialize<RootData>(fileData);
if (fi?.Data?.LocationLoot?.Name != null && (!_ignoredLocations?.Contains(fi.Data.LocationLoot.Name) ?? true))
2023-08-12 19:08:38 +01:00
{
if (!_totalMapDumpsCounter.TryGetValue(fi.Data.LocationLoot.Name, out var counter))
2023-08-13 18:14:54 +01:00
{
counter = 0;
_totalMapDumpsCounter[fi.Data.LocationLoot.Name] = counter;
2023-08-13 18:14:54 +01:00
}
if (counter < (LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.MaxDumpsPerMap ?? 1500))
2023-08-12 19:08:38 +01:00
{
2023-08-13 18:14:54 +01:00
basicInfo = new BasicInfo
{
Map = fi.Data.LocationLoot.Id.ToLower(),
2023-08-13 18:14:54 +01:00
FileHash = ProcessorUtil.HashFile(fileData),
Data = fi,
Date = date ?? DateTime.MinValue,
2023-08-13 18:14:54 +01:00
FileName = file
};
_totalMapDumpsCounter[fi.Data.LocationLoot.Name] += 1;
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Debug);
2023-08-13 18:14:54 +01:00
return true;
}
2024-05-30 16:07:12 +01:00
// Map dump limit reached, exit
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
LoggerFactory.GetInstance().Log($"Ignoring file {file} as the file cap for map {fi.Data.LocationLoot.Id} has been reached", LogLevel.Debug);
2024-05-30 16:07:12 +01:00
basicInfo = null;
2024-05-30 16:07:12 +01:00
return false;
2023-08-12 19:08:38 +01:00
}
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Warning))
LoggerFactory.GetInstance().Log($"File {file} was not eligible for dump data, it did not contain a location name or it was on ignored locations config", LogLevel.Warning);
2023-08-12 19:08:38 +01:00
basicInfo = null;
return false;
}
}