2024-04-16 18:29:40 +00:00
using System.Collections.Concurrent ;
2023-08-12 19:08:38 +01:00
using LootDumpProcessor.Logger ;
using LootDumpProcessor.Model.Input ;
using LootDumpProcessor.Model.Processing ;
using LootDumpProcessor.Serializers.Json ;
2024-04-16 18:29:40 +00:00
using LootDumpProcessor.Utils ;
2023-08-12 19:08:38 +01:00
2024-04-16 18:29:40 +00:00
namespace LootDumpProcessor.Process.Reader.Intake ;
2023-08-12 19:08:38 +01:00
public class JsonFileIntakeReader : IIntakeReader
{
private static readonly IJsonSerializer _jsonSerializer = JsonSerializerFactory . GetInstance ( ) ;
private static readonly HashSet < string > ? _ignoredLocations =
LootDumpProcessorContext . GetConfig ( ) . ReaderConfig . IntakeReaderConfig ? . IgnoredDumpLocations . ToHashSet ( ) ;
2023-08-13 19:06:39 +01:00
private static readonly ConcurrentDictionary < string , int > _totalMapDumpsCounter = new ( ) ;
2023-08-13 18:14:54 +01:00
2023-08-12 19:08:38 +01:00
public bool Read ( string file , out BasicInfo basicInfo )
{
var fileData = File . ReadAllText ( file ) ;
2024-08-22 15:33:27 -04:00
if ( fileData = = null )
{
if ( LoggerFactory . GetInstance ( ) . CanBeLogged ( LogLevel . Error ) )
LoggerFactory . GetInstance ( ) . Log ( $"Couldnt parse date from file: {file}" , LogLevel . Error ) ;
basicInfo = null ;
return false ;
}
2023-08-13 18:14:54 +01:00
// If the file format changes it may screw up this date parser
if ( ! FileDateParser . TryParseFileDate ( file , out var date ) )
2024-04-16 18:29:40 +00:00
{
if ( LoggerFactory . GetInstance ( ) . CanBeLogged ( LogLevel . Error ) )
LoggerFactory . GetInstance ( ) . Log ( $"Couldnt parse date from file: {file}" , LogLevel . Error ) ;
}
2023-08-12 19:08:38 +01:00
var fi = _jsonSerializer . Deserialize < RootData > ( fileData ) ;
2024-08-22 15:33:27 -04:00
if ( fi ? . Data ? . LocationLoot ? . Name ! = null & & ( ! _ignoredLocations ? . Contains ( fi . Data . LocationLoot . Name ) ? ? true ) )
2023-08-12 19:08:38 +01:00
{
2024-08-22 15:33:27 -04:00
if ( ! _totalMapDumpsCounter . TryGetValue ( fi . Data . LocationLoot . Name , out var counter ) )
2023-08-13 18:14:54 +01:00
{
counter = 0 ;
2024-08-22 15:33:27 -04:00
_totalMapDumpsCounter [ fi . Data . LocationLoot . Name ] = counter ;
2023-08-13 18:14:54 +01:00
}
2024-04-16 18:29:40 +00:00
if ( counter < ( LootDumpProcessorContext . GetConfig ( ) . ReaderConfig . IntakeReaderConfig ? . MaxDumpsPerMap ? ? 1500 ) )
2023-08-12 19:08:38 +01:00
{
2023-08-13 18:14:54 +01:00
basicInfo = new BasicInfo
{
2024-08-22 15:33:27 -04:00
Map = fi . Data . LocationLoot . Id . ToLower ( ) ,
2023-08-13 18:14:54 +01:00
FileHash = ProcessorUtil . HashFile ( fileData ) ,
Data = fi ,
2024-08-22 15:33:27 -04:00
Date = date ? ? DateTime . MinValue ,
2023-08-13 18:14:54 +01:00
FileName = file
} ;
2024-08-22 15:33:27 -04:00
_totalMapDumpsCounter [ fi . Data . LocationLoot . Name ] + = 1 ;
2024-04-16 18:29:40 +00:00
if ( LoggerFactory . GetInstance ( ) . CanBeLogged ( LogLevel . Debug ) )
LoggerFactory . GetInstance ( ) . Log ( $"File {file} fully read, returning data" , LogLevel . Debug ) ;
2024-05-30 16:03:33 +01:00
2023-08-13 18:14:54 +01:00
return true ;
}
2024-05-30 16:03:33 +01:00
2024-05-30 16:07:12 +01:00
// Map dump limit reached, exit
if ( LoggerFactory . GetInstance ( ) . CanBeLogged ( LogLevel . Debug ) )
2024-08-22 15:33:27 -04:00
LoggerFactory . GetInstance ( ) . Log ( $"Ignoring file {file} as the file cap for map {fi.Data.LocationLoot.Id} has been reached" , LogLevel . Debug ) ;
2024-05-30 16:07:12 +01:00
2024-08-22 15:33:27 -04:00
basicInfo = null ;
2024-05-30 16:07:12 +01:00
return false ;
2023-08-12 19:08:38 +01:00
}
2024-04-16 18:29:40 +00:00
if ( LoggerFactory . GetInstance ( ) . CanBeLogged ( LogLevel . Warning ) )
2024-08-22 15:33:27 -04:00
LoggerFactory . GetInstance ( ) . Log ( $"File {file} was not eligible for dump data, it did not contain a location name or it was on ignored locations config" , LogLevel . Warning ) ;
2023-08-12 19:08:38 +01:00
basicInfo = null ;
return false ;
}
}