Prevent log messages to save on string allocations
This commit is contained in:
parent
f861a9c25a
commit
2930581d25
@ -4,5 +4,6 @@ public interface ILogger
|
||||
{
|
||||
void Setup();
|
||||
void Log(string message, LogLevel level);
|
||||
bool CanBeLogged(LogLevel level);
|
||||
void Stop();
|
||||
}
|
@ -72,6 +72,11 @@ public class QueueLogger : ILogger
|
||||
queuedMessages.Add(new LoggedMessage { Message = message, LogLevel = level });
|
||||
}
|
||||
|
||||
public bool CanBeLogged(LogLevel level)
|
||||
{
|
||||
return GetLogLevel(level) <= logLevel;
|
||||
}
|
||||
|
||||
// Wait for graceful termination of the logging thread
|
||||
public void Stop()
|
||||
{
|
||||
|
@ -26,11 +26,13 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
|
||||
public Dictionary<OutputFileType, object> ProcessDumps(List<PartialData> dumps)
|
||||
{
|
||||
LoggerFactory.GetInstance().Log("Starting final dump processing", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Starting final dump processing", LogLevel.Info);
|
||||
var output = new Dictionary<OutputFileType, object>();
|
||||
|
||||
var dumpProcessData = GetDumpProcessData(dumps);
|
||||
LoggerFactory.GetInstance().Log("Heavy processing done!", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Heavy processing done!", LogLevel.Info);
|
||||
|
||||
var staticContainers = new Dictionary<string, MapStaticLoot>();
|
||||
var staticContainersLock = new object();
|
||||
@ -43,13 +45,15 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
|
||||
Runners.Clear();
|
||||
// BSG changed the map data so static containers are now dynamic, so we need to scan all dumps for the static containers.
|
||||
LoggerFactory.GetInstance().Log("Queuing dumps for static data processing", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Queuing dumps for static data processing", LogLevel.Info);
|
||||
foreach (var dumped in dumps)
|
||||
{
|
||||
Runners.Add(
|
||||
Task.Factory.StartNew(() =>
|
||||
{
|
||||
LoggerFactory.GetInstance().Log($"Processing static data for file {dumped.BasicInfo.FileName}", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
|
||||
LoggerFactory.GetInstance().Log($"Processing static data for file {dumped.BasicInfo.FileName}", LogLevel.Debug);
|
||||
var data = _jsonSerializer.Deserialize<RootData>(File.ReadAllText(dumped.BasicInfo.FileName));
|
||||
// the if statement below takes care of processing "forced" or real static data for each map, we only need
|
||||
// to do this once per map, so we dont care about doing it again
|
||||
@ -57,7 +61,8 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
{
|
||||
if (!staticContainers.ContainsKey(data.Data.Name))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log($"Doing first time process for map {data.Data.Name} of real static data", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log($"Doing first time process for map {data.Data.Name} of real static data", LogLevel.Info);
|
||||
var mapStaticLoot = StaticLootProcessor.CreateRealStaticContainers(data);
|
||||
staticContainers[mapStaticLoot.Item1] = mapStaticLoot.Item2;
|
||||
}
|
||||
@ -114,7 +119,8 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
}
|
||||
|
||||
Task.WaitAll(Runners.ToArray());
|
||||
LoggerFactory.GetInstance().Log("All static data processing threads finished", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("All static data processing threads finished", LogLevel.Info);
|
||||
// Aggregate and calculate the probability of a static container
|
||||
mapStaticContainersAggregated.ToDictionary(
|
||||
kv => kv.Key,
|
||||
@ -129,35 +135,39 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
|
||||
// Static containers
|
||||
output.Add(OutputFileType.StaticContainer, staticContainers);
|
||||
|
||||
LoggerFactory.GetInstance().Log("Processing ammo distribution", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Processing ammo distribution", LogLevel.Info);
|
||||
// Ammo distribution
|
||||
output.Add(
|
||||
OutputFileType.StaticAmmo,
|
||||
StaticLootProcessor.CreateAmmoDistribution(dumpProcessData.ContainerCounts)
|
||||
);
|
||||
|
||||
LoggerFactory.GetInstance().Log("Processing static loot distribution", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Processing static loot distribution", LogLevel.Info);
|
||||
// Static loot distribution
|
||||
output.Add(
|
||||
OutputFileType.StaticLoot,
|
||||
StaticLootProcessor.CreateStaticLootDistribution(dumpProcessData.ContainerCounts)
|
||||
);
|
||||
|
||||
LoggerFactory.GetInstance().Log("Processing loose loot distribution", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Processing loose loot distribution", LogLevel.Info);
|
||||
// Loose loot distribution
|
||||
var looseLootDistribution = LooseLootProcessor.CreateLooseLootDistribution(
|
||||
dumpProcessData.MapCounts,
|
||||
dumpProcessData.LooseLootCounts
|
||||
);
|
||||
|
||||
LoggerFactory.GetInstance().Log("Collecting loose loot distribution information", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Collecting loose loot distribution information", LogLevel.Info);
|
||||
var loot = dumpProcessData.MapCounts
|
||||
.Select(mapCount => mapCount.Key)
|
||||
.ToDictionary(mi => mi, mi => looseLootDistribution[mi]);
|
||||
|
||||
output.Add(OutputFileType.LooseLoot, loot);
|
||||
LoggerFactory.GetInstance().Log("Dump processing fully completed!", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Dump processing fully completed!", LogLevel.Info);
|
||||
return output;
|
||||
}
|
||||
|
||||
@ -176,10 +186,11 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
{
|
||||
var mapi = tuple.Key;
|
||||
var g = tuple.ToList();
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Processing map {mapi}, total dump data to process: {g.Count}",
|
||||
LogLevel.Info
|
||||
);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Processing map {mapi}, total dump data to process: {g.Count}",
|
||||
LogLevel.Info
|
||||
);
|
||||
dumpProcessData.MapCounts[mapi] = g.Count;
|
||||
|
||||
var lockObjectContainerCounts = new object();
|
||||
@ -275,18 +286,16 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
|
||||
lock (lockObjectCounts)
|
||||
{
|
||||
counts.MapSpawnpointCount.AddRange(new List<int>
|
||||
{
|
||||
dumpData.LooseLoot.MapSpawnpointCount
|
||||
});
|
||||
counts.MapSpawnpointCount.Add(dumpData.LooseLoot.MapSpawnpointCount);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"ERROR OCCURRED:{e.Message}\n{e.StackTrace}",
|
||||
LogLevel.Error
|
||||
);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"ERROR OCCURRED:{e.Message}\n{e.StackTrace}",
|
||||
LogLevel.Error
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -297,10 +306,11 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
// Wait until all runners are done processing
|
||||
while (!Runners.All(r => r.IsCompleted))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"One or more file processors are still processing files. Waiting {LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs}ms before checking again",
|
||||
LogLevel.Info
|
||||
);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"One or more file processors are still processing files. Waiting {LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs}ms before checking again",
|
||||
LogLevel.Info
|
||||
);
|
||||
Thread.Sleep(
|
||||
TimeSpan.FromMilliseconds(LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs));
|
||||
}
|
||||
|
@ -9,7 +9,8 @@ public class FileProcessor : IFileProcessor
|
||||
{
|
||||
public PartialData Process(BasicInfo parsedData)
|
||||
{
|
||||
LoggerFactory.GetInstance().Log($"Processing file {parsedData.FileName}...", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
|
||||
LoggerFactory.GetInstance().Log($"Processing file {parsedData.FileName}...", LogLevel.Debug);
|
||||
List<Template> looseLoot = new List<Template>();
|
||||
List<Template> staticLoot = new List<Template>();
|
||||
|
||||
@ -28,7 +29,7 @@ public class FileProcessor : IFileProcessor
|
||||
BasicInfo = parsedData
|
||||
};
|
||||
|
||||
PartialData data = new PartialData
|
||||
var data = new PartialData
|
||||
{
|
||||
BasicInfo = parsedData,
|
||||
ParsedDumpKey = (AbstractKey)dumpData.GetKey()
|
||||
@ -36,16 +37,17 @@ public class FileProcessor : IFileProcessor
|
||||
|
||||
if (!DataStorageFactory.GetInstance().Exists(dumpData.GetKey()))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Cached not found for {string.Join("/", dumpData.GetKey().GetLookupIndex())} processing.",
|
||||
LogLevel.Info
|
||||
);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Cached not found for {string.Join("/", dumpData.GetKey().GetLookupIndex())} processing.",
|
||||
LogLevel.Debug
|
||||
);
|
||||
dumpData.Containers = StaticLootProcessor.PreProcessStaticLoot(staticLoot);
|
||||
dumpData.LooseLoot = LooseLootProcessor.PreProcessLooseLoot(looseLoot);
|
||||
DataStorageFactory.GetInstance().Store(dumpData);
|
||||
}
|
||||
|
||||
LoggerFactory.GetInstance().Log($"File {parsedData.FileName} finished processing!", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
|
||||
LoggerFactory.GetInstance().Log($"File {parsedData.FileName} finished processing!", LogLevel.Debug);
|
||||
return data;
|
||||
}
|
||||
}
|
@ -2,9 +2,12 @@
|
||||
|
||||
public static class FileProcessorFactory
|
||||
{
|
||||
private static IFileProcessor _fileProcessor;
|
||||
public static IFileProcessor GetInstance()
|
||||
{
|
||||
// implement actual factory someday
|
||||
return new FileProcessor();
|
||||
// TODO: implement actual factory someday
|
||||
if (_fileProcessor == null)
|
||||
_fileProcessor = new FileProcessor();
|
||||
return _fileProcessor;
|
||||
}
|
||||
}
|
@ -164,10 +164,11 @@ public static class LooseLootProcessor
|
||||
Template = template
|
||||
};
|
||||
looseLootDistribution[mapName].SpawnPointsForced.Add(spawnPointToAdd);
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Item: {template.Id} has > {LootDumpProcessorContext.GetConfig().ProcessorConfig.SpawnPointToleranceForForced}% spawn chance in spawn point: {spawnPointToAdd.LocationId} but isn't in forced loot, adding to forced",
|
||||
LogLevel.Warning
|
||||
);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Warning))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Item: {template.Id} has > {LootDumpProcessorContext.GetConfig().ProcessorConfig.SpawnPointToleranceForForced}% spawn chance in spawn point: {spawnPointToAdd.LocationId} but isn't in forced loot, adding to forced",
|
||||
LogLevel.Warning
|
||||
);
|
||||
}
|
||||
else // Normal spawn point, add to non-forced spawnpoint array
|
||||
{
|
||||
@ -205,10 +206,11 @@ public static class LooseLootProcessor
|
||||
}
|
||||
else
|
||||
{
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Item template {distribution.ComposedKey?.FirstItem?.Tpl} was on loose loot distribution for spawn point {template.Id} but the spawn points didnt contain a template matching it.",
|
||||
LogLevel.Error
|
||||
);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Item template {distribution.ComposedKey?.FirstItem?.Tpl} was on loose loot distribution for spawn point {template.Id} but the spawn points didnt contain a template matching it.",
|
||||
LogLevel.Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -249,10 +251,11 @@ public static class LooseLootProcessor
|
||||
{
|
||||
if (!forcedTplsFound.Contains(itemTpl))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Expected item: {itemTpl} defined in forced_loose.yaml config not found in forced loot",
|
||||
LogLevel.Error
|
||||
);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Expected item: {itemTpl} defined in forced_loose.yaml config not found in forced loot",
|
||||
LogLevel.Error
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -261,10 +264,11 @@ public static class LooseLootProcessor
|
||||
{
|
||||
if (!forcedTplsInConfig.Contains(itemTpl))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Map: {mapName} Item: {itemTpl} not defined in forced_loose.yaml config but was flagged as forced by code",
|
||||
LogLevel.Warning
|
||||
);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Warning))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Map: {mapName} Item: {itemTpl} not defined in forced_loose.yaml config but was flagged as forced by code",
|
||||
LogLevel.Warning
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -41,7 +41,8 @@ public class QueuePipeline : IPipeline
|
||||
// We add 2 more threads to the total count to account for subprocesses and others
|
||||
int threads = LootDumpProcessorContext.GetConfig().Threads;
|
||||
ThreadPool.SetMaxThreads(threads + 2, threads + 2);
|
||||
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Gathering files to begin processing", LogLevel.Info);
|
||||
try
|
||||
{
|
||||
// Gather all files, sort them by date descending and then add them into the processing queue
|
||||
@ -51,10 +52,15 @@ public class QueuePipeline : IPipeline
|
||||
return date;
|
||||
}
|
||||
).ToList().ForEach(f => _filesToProcess.Add(f));
|
||||
|
||||
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Files sorted and ready to begin pre-processing", LogLevel.Info);
|
||||
|
||||
// We startup all the threads and collect them into a runners list
|
||||
for (int i = 0; i < threads; i++)
|
||||
{
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Creating pre-processing threads", LogLevel.Info);
|
||||
Runners.Add(
|
||||
Task.Factory.StartNew(
|
||||
() =>
|
||||
@ -70,9 +76,10 @@ public class QueuePipeline : IPipeline
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Error occurred while processing file {file}\n{e.Message}\n{e.StackTrace}",
|
||||
LogLevel.Error);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Error occurred while processing file {file}\n{e.Message}\n{e.StackTrace}",
|
||||
LogLevel.Error);
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -83,12 +90,14 @@ public class QueuePipeline : IPipeline
|
||||
// Wait until all runners are done processing
|
||||
while (!Runners.All(r => r.IsCompleted))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"One or more file processors are still processing files. Waiting {LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs}ms before checking again",
|
||||
LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"One or more file processors are still processing files. Waiting {LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs}ms before checking again",
|
||||
LogLevel.Info);
|
||||
Thread.Sleep(TimeSpan.FromMilliseconds(LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs));
|
||||
}
|
||||
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log("Pre-processing finished", LogLevel.Info);
|
||||
// Single writer instance to collect results
|
||||
var writer = WriterFactory.GetInstance();
|
||||
// Single collector instance to collect results
|
||||
|
@ -14,9 +14,10 @@ public class JsonDumpFileFilter : IFileFilter
|
||||
// Calculate parsed date from config threshold
|
||||
if (string.IsNullOrEmpty(LootDumpProcessorContext.GetConfig().ReaderConfig.ThresholdDate))
|
||||
{
|
||||
LoggerFactory.GetInstance()
|
||||
.Log($"ThresholdDate is null or empty in configs, defaulting to current day minus 30 days",
|
||||
LogLevel.Warning);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Warning))
|
||||
LoggerFactory.GetInstance()
|
||||
.Log($"ThresholdDate is null or empty in configs, defaulting to current day minus 30 days",
|
||||
LogLevel.Warning);
|
||||
_parsedThresholdDate = (DateTime.Now - TimeSpan.FromDays(30));
|
||||
}
|
||||
else
|
||||
|
@ -2,15 +2,27 @@
|
||||
|
||||
public static class IntakeReaderFactory
|
||||
{
|
||||
private static readonly Dictionary<IntakeReaderTypes, IIntakeReader> Instances = new();
|
||||
private static readonly object DictionaryLock = new();
|
||||
public static IIntakeReader GetInstance()
|
||||
{
|
||||
return (LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.IntakeReaderType ?? IntakeReaderTypes.Json) switch
|
||||
var type = LootDumpProcessorContext.GetConfig().ReaderConfig.IntakeReaderConfig?.IntakeReaderType ??
|
||||
IntakeReaderTypes.Json;
|
||||
lock (DictionaryLock)
|
||||
{
|
||||
IntakeReaderTypes.Json => new JsonFileIntakeReader(),
|
||||
_ => throw new ArgumentOutOfRangeException(
|
||||
"IntakeReaderType",
|
||||
"Value was not defined on IntakeReaderConfig"
|
||||
)
|
||||
};
|
||||
if (!Instances.TryGetValue(type, out var intakeReader))
|
||||
{
|
||||
intakeReader = type switch
|
||||
{
|
||||
IntakeReaderTypes.Json => new JsonFileIntakeReader(),
|
||||
_ => throw new ArgumentOutOfRangeException(
|
||||
"IntakeReaderType",
|
||||
"Value was not defined on IntakeReaderConfig"
|
||||
)
|
||||
};
|
||||
Instances.Add(type, intakeReader);
|
||||
}
|
||||
return intakeReader;
|
||||
}
|
||||
}
|
||||
}
|
@ -21,7 +21,10 @@ public class JsonFileIntakeReader : IIntakeReader
|
||||
var fileData = File.ReadAllText(file);
|
||||
// If the file format changes it may screw up this date parser
|
||||
if (!FileDateParser.TryParseFileDate(file, out var date))
|
||||
LoggerFactory.GetInstance().Log($"Couldnt parse date from file: {file}", LogLevel.Error);
|
||||
{
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
|
||||
LoggerFactory.GetInstance().Log($"Couldnt parse date from file: {file}", LogLevel.Error);
|
||||
}
|
||||
|
||||
var fi = _jsonSerializer.Deserialize<RootData>(fileData);
|
||||
if (fi.Data?.Name != null && (!_ignoredLocations?.Contains(fi.Data.Name) ?? true))
|
||||
@ -43,16 +46,19 @@ public class JsonFileIntakeReader : IIntakeReader
|
||||
FileName = file
|
||||
};
|
||||
_totalMapDumpsCounter[fi.Data.Name] += 1;
|
||||
LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
|
||||
LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Debug);
|
||||
return true;
|
||||
}
|
||||
LoggerFactory.GetInstance().Log($"Ignoring file {file} as the file cap for map {fi.Data.Name} has been reached", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
|
||||
LoggerFactory.GetInstance().Log($"Ignoring file {file} as the file cap for map {fi.Data.Name} has been reached", LogLevel.Debug);
|
||||
}
|
||||
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"File {file} was not eligible for dump data, it did not contain a location name or it was on ignored locations config",
|
||||
LogLevel.Info
|
||||
);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Warning))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"File {file} was not eligible for dump data, it did not contain a location name or it was on ignored locations config",
|
||||
LogLevel.Warning
|
||||
);
|
||||
basicInfo = null;
|
||||
return false;
|
||||
}
|
||||
|
@ -12,11 +12,12 @@ public abstract class AbstractPreProcessReader : IPreProcessReader
|
||||
if (string.IsNullOrEmpty(tempFolder))
|
||||
{
|
||||
tempFolder = GetBaseDirectory();
|
||||
LoggerFactory.GetInstance()
|
||||
.Log(
|
||||
$"No temp folder was assigned preProcessorTempFolder in PreProcessorConfig, defaulting to {tempFolder}",
|
||||
LogLevel.Warning
|
||||
);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Warning))
|
||||
LoggerFactory.GetInstance()
|
||||
.Log(
|
||||
$"No temp folder was assigned preProcessorTempFolder in PreProcessorConfig, defaulting to {tempFolder}",
|
||||
LogLevel.Warning
|
||||
);
|
||||
}
|
||||
|
||||
// Cleanup the temp directory before starting the process
|
||||
|
@ -18,17 +18,23 @@ public class SevenZipPreProcessReader : AbstractPreProcessReader
|
||||
var fileRaw = Path.GetFileNameWithoutExtension(file);
|
||||
// SevenZip library doesnt like forward slashes for some reason
|
||||
var outPath = $"{_tempFolder}\\{fileRaw}".Replace("/", "\\");
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Unzipping {file} into temp path {outPath}, this may take a while...",
|
||||
LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Unzipping {file} into temp path {outPath}, this may take a while...",
|
||||
LogLevel.Info);
|
||||
var extractor = new SevenZipExtractor(file);
|
||||
extractor.Extracting += (_, args) =>
|
||||
// Only log process on debug mode
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
|
||||
{
|
||||
if (args.PercentDone % 10 == 0)
|
||||
LoggerFactory.GetInstance().Log($"Unzip progress: {args.PercentDone}%", LogLevel.Info);
|
||||
};
|
||||
extractor.Extracting += (_, args) =>
|
||||
{
|
||||
if (args.PercentDone % 10 == 0)
|
||||
LoggerFactory.GetInstance().Log($"Unzip progress: {args.PercentDone}%", LogLevel.Debug);
|
||||
};
|
||||
}
|
||||
extractor.ExtractArchive(outPath);
|
||||
LoggerFactory.GetInstance().Log($"Finished unzipping {file} into temp path {outPath}", LogLevel.Info);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log($"Finished unzipping {file} into temp path {outPath}", LogLevel.Info);
|
||||
|
||||
files = Directory.GetFiles(outPath).ToList();
|
||||
directories = Directory.GetDirectories(outPath).ToList();
|
||||
|
@ -16,7 +16,8 @@ public class TarkovItems(string items)
|
||||
throw new Exception("The server items couldnt be found or loaded. Check server config is pointing to the correct place");
|
||||
if (!_items.TryGetValue(tpl, out var item_template))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log($"[IsBaseClass] Item template '{tpl}' with base class id '{baseclass_id}' was not found on the server items!", LogLevel.Error);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
|
||||
LoggerFactory.GetInstance().Log($"[IsBaseClass] Item template '{tpl}' with base class id '{baseclass_id}' was not found on the server items!", LogLevel.Error);
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -32,7 +33,8 @@ public class TarkovItems(string items)
|
||||
throw new Exception("The server items couldnt be found or loaded. Check server config is pointing to the correct place");
|
||||
if (!_items.TryGetValue(tpl, out var item_template))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log($"[IsQuestItem] Item template '{tpl}' was not found on the server items!", LogLevel.Error);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
|
||||
LoggerFactory.GetInstance().Log($"[IsQuestItem] Item template '{tpl}' was not found on the server items!", LogLevel.Error);
|
||||
return false;
|
||||
}
|
||||
return item_template.Props.QuestItem;
|
||||
@ -44,7 +46,8 @@ public class TarkovItems(string items)
|
||||
throw new Exception("The server items couldnt be found or loaded. Check server config is pointing to the correct place");
|
||||
if (!_items.TryGetValue(tpl, out var item_template))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log($"[MaxDurability] Item template '{tpl}' was not found on the server items!", LogLevel.Error);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
|
||||
LoggerFactory.GetInstance().Log($"[MaxDurability] Item template '{tpl}' was not found on the server items!", LogLevel.Error);
|
||||
return null;
|
||||
}
|
||||
return item_template.Props.MaxDurability?.ToString() ?? "";
|
||||
@ -56,7 +59,8 @@ public class TarkovItems(string items)
|
||||
throw new Exception("The server items couldnt be found or loaded. Check server config is pointing to the correct place");
|
||||
if (!_items.TryGetValue(tpl, out var item_template))
|
||||
{
|
||||
LoggerFactory.GetInstance().Log($"[AmmoCaliber] Item template '{tpl}' was not found on the server items!", LogLevel.Error);
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
|
||||
LoggerFactory.GetInstance().Log($"[AmmoCaliber] Item template '{tpl}' was not found on the server items!", LogLevel.Error);
|
||||
return null;
|
||||
}
|
||||
return item_template.Props.Caliber;
|
||||
|
Loading…
x
Reference in New Issue
Block a user