diff --git a/Config/config.json b/Config/config.json
index d84759c..f318eba 100644
--- a/Config/config.json
+++ b/Config/config.json
@@ -60,5 +60,19 @@
"container_City_SW_04_DesignStuff_00002",
"Lootable_00063"
]
- }
+ },
+ "mapsToProcess": [
+ "bigmap",
+ "factory4_day",
+ "factory4_night",
+ "interchange",
+ "laboratory",
+ "lighthouse",
+ "rezervbase",
+ "sandbox",
+ "sandbox_high",
+ "shorline",
+ "tarkovstreets",
+ "woods"
+ ]
}
\ No newline at end of file
diff --git a/Config/map_directory_mapping.yaml b/Config/map_directory_mapping.yaml
deleted file mode 100644
index 6ad716f..0000000
--- a/Config/map_directory_mapping.yaml
+++ /dev/null
@@ -1,35 +0,0 @@
----
-Customs:
- name:
- - bigmap
-Factory:
- name:
- - factory4_day
- - factory4_night
-Interchange:
- name:
- - interchange
-Laboratory:
- name:
- - laboratory
-Lighthouse:
- name:
- - lighthouse
-ReserveBase:
- name:
- - rezervbase
-Shoreline:
- name:
- - shoreline
-Woods:
- name:
- - woods
-Streets of Tarkov:
- name:
- - tarkovstreets
-Sandbox:
- name:
- - Sandbox
-SandboxHigh:
- name:
- - Sandbox_high
\ No newline at end of file
diff --git a/LootDumpProcessor.csproj b/LootDumpProcessor.csproj
index d5eb964..13a5a2c 100644
--- a/LootDumpProcessor.csproj
+++ b/LootDumpProcessor.csproj
@@ -26,9 +26,6 @@
Always
-
- Always
-
Always
diff --git a/LootDumpProcessorContext.cs b/LootDumpProcessorContext.cs
index ad4a98c..1f2d404 100644
--- a/LootDumpProcessorContext.cs
+++ b/LootDumpProcessorContext.cs
@@ -54,6 +54,10 @@ public static class LootDumpProcessorContext
return _forcedStatic;
}
+ ///
+ /// Not Used
+ ///
+ ///
public static Dictionary GetDirectoryMappings()
{
lock (_mapDirectoryMappingsLock)
diff --git a/Model/Config/Config.cs b/Model/Config/Config.cs
index 5448647..1fb1345 100644
--- a/Model/Config/Config.cs
+++ b/Model/Config/Config.cs
@@ -57,4 +57,8 @@ public class Config
[JsonProperty("containerIgnoreList")]
[JsonPropertyName("containerIgnoreList")]
public Dictionary ContainerIgnoreList { get; set; }
+
+ [JsonProperty("mapsToProcess")]
+ [JsonPropertyName("mapsToProcess")]
+ public List MapsToProcess { get; set; }
}
\ No newline at end of file
diff --git a/Process/Collector/DumpCollector.cs b/Process/Collector/DumpCollector.cs
index 7c825da..08e91bb 100644
--- a/Process/Collector/DumpCollector.cs
+++ b/Process/Collector/DumpCollector.cs
@@ -42,4 +42,16 @@ public class DumpCollector : ICollector
return processedDumps;
}
+
+ public void Clear()
+ {
+ lock (lockObject)
+ {
+ foreach (var file in Directory.GetFiles(DumpLocation))
+ {
+ File.Delete(file);
+ }
+ processedDumps.Clear();
+ }
+ }
}
\ No newline at end of file
diff --git a/Process/Collector/HashSetCollector.cs b/Process/Collector/HashSetCollector.cs
index be15f77..3d7ab10 100644
--- a/Process/Collector/HashSetCollector.cs
+++ b/Process/Collector/HashSetCollector.cs
@@ -23,4 +23,9 @@ public class HashSetCollector : ICollector
{
return processedDumps.ToList();
}
+
+ public void Clear()
+ {
+ processedDumps.Clear();
+ }
}
\ No newline at end of file
diff --git a/Process/Collector/ICollector.cs b/Process/Collector/ICollector.cs
index 98eec27..116d608 100644
--- a/Process/Collector/ICollector.cs
+++ b/Process/Collector/ICollector.cs
@@ -6,6 +6,7 @@ public interface ICollector
{
void Setup();
void Hold(PartialData parsedDump);
+ void Clear();
List Retrieve();
}
\ No newline at end of file
diff --git a/Process/Processor/DumpProcessor/MultithreadSteppedDumpProcessor.cs b/Process/Processor/DumpProcessor/MultithreadSteppedDumpProcessor.cs
index 4b1c41b..d89e148 100644
--- a/Process/Processor/DumpProcessor/MultithreadSteppedDumpProcessor.cs
+++ b/Process/Processor/DumpProcessor/MultithreadSteppedDumpProcessor.cs
@@ -17,8 +17,6 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
private static readonly List Runners = new();
- private static readonly BlockingCollection _partialDataToProcess = new();
-
// if we need to, this variable can be moved to use the factory, but since the factory
// needs a locking mechanism to prevent dictionary access exceptions, its better to keep
// a reference to use here
@@ -255,6 +253,9 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
looseLootCounts.ItemProperties = actualDictionaryItemProperties.GetKey();
dumpProcessData.LooseLootCounts.Add(mapName, looseLootCounts.GetKey());
+
+ BlockingCollection _partialDataToProcess = new();
+
// add the items to the queue
foreach (var partialData in partialFileMetaData)
{
diff --git a/Process/QueuePipeline.cs b/Process/QueuePipeline.cs
index c825e3d..f30d0b7 100644
--- a/Process/QueuePipeline.cs
+++ b/Process/QueuePipeline.cs
@@ -1,5 +1,6 @@
using System.Collections.Concurrent;
using LootDumpProcessor.Logger;
+using LootDumpProcessor.Model.Input;
using LootDumpProcessor.Process.Collector;
using LootDumpProcessor.Process.Processor;
using LootDumpProcessor.Process.Processor.DumpProcessor;
@@ -9,14 +10,20 @@ using LootDumpProcessor.Process.Reader.Filters;
using LootDumpProcessor.Process.Reader.Intake;
using LootDumpProcessor.Process.Reader.PreProcess;
using LootDumpProcessor.Process.Writer;
+using LootDumpProcessor.Serializers.Json;
+using LootDumpProcessor.Storage;
using LootDumpProcessor.Utils;
namespace LootDumpProcessor.Process;
public class QueuePipeline : IPipeline
{
+ private static readonly BlockingCollection _filesToRename = new();
private static readonly BlockingCollection _filesToProcess = new();
private static readonly List Runners = new();
+ private static readonly List Renamers = new();
+
+ private readonly List _mapNames = LootDumpProcessorContext.GetConfig().MapsToProcess;
private static readonly Dictionary _preProcessReaders;
@@ -41,68 +48,19 @@ public class QueuePipeline : IPipeline
// We add 2 more threads to the total count to account for subprocesses and others
int threads = LootDumpProcessorContext.GetConfig().Threads;
ThreadPool.SetMaxThreads(threads + 2, threads + 2);
+
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
LoggerFactory.GetInstance().Log("Gathering files to begin processing", LogLevel.Info);
+ }
+
try
{
- // Gather all files, sort them by date descending and then add them into the processing queue
- GatherFiles().OrderByDescending(f =>
- {
- FileDateParser.TryParseFileDate(f, out var date);
- return date;
- }
- ).ToList().ForEach(f => _filesToProcess.Add(f));
-
- if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
- LoggerFactory.GetInstance().Log("Files sorted and ready to begin pre-processing", LogLevel.Info);
-
- // We startup all the threads and collect them into a runners list
- for (int i = 0; i < threads; i++)
+ FixFilesFromDumps(threads);
+ foreach (var mapName in _mapNames)
{
- if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
- LoggerFactory.GetInstance().Log("Creating pre-processing threads", LogLevel.Info);
- Runners.Add(
- Task.Factory.StartNew(
- () =>
- {
- while (_filesToProcess.TryTake(out var file, TimeSpan.FromMilliseconds(5000)))
- {
- try
- {
- var reader = IntakeReaderFactory.GetInstance();
- var processor = FileProcessorFactory.GetInstance();
- if (reader.Read(file, out var basicInfo))
- collector.Hold(processor.Process(basicInfo));
- }
- catch (Exception e)
- {
- if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
- LoggerFactory.GetInstance().Log(
- $"Error occurred while processing file {file}\n{e.Message}\n{e.StackTrace}",
- LogLevel.Error);
- }
- }
- },
- TaskCreationOptions.LongRunning)
- );
+ ProcessFilesFromDumpsPerMap(threads, collector, mapName);
}
-
- // Wait until all runners are done processing
- while (!Runners.All(r => r.IsCompleted))
- {
- if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
- LoggerFactory.GetInstance().Log(
- $"One or more file processors are still processing files. Waiting {LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs}ms before checking again",
- LogLevel.Info);
- Thread.Sleep(TimeSpan.FromMilliseconds(LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs));
- }
- if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
- LoggerFactory.GetInstance().Log("Pre-processing finished", LogLevel.Info);
- // Single writer instance to collect results
- var writer = WriterFactory.GetInstance();
- // Single collector instance to collect results
- var dumpProcessor = DumpProcessorFactory.GetInstance();
- writer.WriteAll(dumpProcessor.ProcessDumps(collector.Retrieve()));
}
finally
{
@@ -120,7 +78,9 @@ public class QueuePipeline : IPipeline
var inputPath = LootDumpProcessorContext.GetConfig().ReaderConfig.DumpFilesLocation;
if (inputPath == null || inputPath.Count == 0)
+ {
throw new Exception("Reader dumpFilesLocations must be set to a valid value");
+ }
// We gather up all files into a queue
var queuedFilesToProcess = GetFileQueue(inputPath);
@@ -133,7 +93,9 @@ public class QueuePipeline : IPipeline
var gatheredFiles = new List();
if (queuedFilesToProcess.Count == 0)
+ {
throw new Exception("No files matched accepted extension types in configs");
+ }
var fileFilters = GetFileFilters() ?? new Dictionary();
@@ -147,8 +109,7 @@ public class QueuePipeline : IPipeline
if (preProcessor.TryPreProcess(file, out var outputFiles, out var outputDirectories))
{
// all new directories need to be processed as well
- GetFileQueue(outputDirectories).ToList()
- .ForEach(queuedFilesToProcess.Enqueue);
+ GetFileQueue(outputDirectories).ToList().ForEach(queuedFilesToProcess.Enqueue);
// all output files need to be queued as well
outputFiles.ForEach(queuedFilesToProcess.Enqueue);
}
@@ -159,7 +120,9 @@ public class QueuePipeline : IPipeline
if (fileFilters.TryGetValue(extension, out var filter))
{
if (filter.Accept(file))
+ {
gatheredFiles.Add(file);
+ }
}
else
{
@@ -188,17 +151,28 @@ public class QueuePipeline : IPipeline
{
// Check the input file to be sure its going to have data on it.
if (!Directory.Exists(path))
+ {
throw new Exception($"Input directory \"{inputPath}\" could not be found");
+ }
+
// If we should process subfolder, queue them up as well
if (LootDumpProcessorContext.GetConfig().ReaderConfig.ProcessSubFolders)
+ {
foreach (var directory in Directory.GetDirectories(path))
+ {
queuedPathsToProcess.Enqueue(directory);
+ }
+ }
var files = Directory.GetFiles(path);
foreach (var file in files)
+ {
if (acceptedFileExtension.Contains(Path.GetExtension(file)[1..].ToLower()))
+ {
queuedFilesToProcess.Enqueue(file);
+ }
+ }
}
return queuedFilesToProcess;
@@ -214,4 +188,240 @@ public class QueuePipeline : IPipeline
FileFilterFactory.GetInstance
);
}
+
+ ///
+ /// Gets all files and adds them to the processor.
+ /// use per map version now
+ ///
+ ///
+ ///
+ private void ProcessFilesFromDumps(int threads, ICollector collector)
+ {
+ // Gather all files, sort them by date descending and then add them into the processing queue
+ GatherFiles().OrderByDescending(f =>
+ {
+ FileDateParser.TryParseFileDate(f, out var date);
+ return date;
+ }
+ ).ToList().ForEach(f => _filesToProcess.Add(f));
+
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance().Log("Files sorted and ready to begin pre-processing", LogLevel.Info);
+ }
+
+ // We startup all the threads and collect them into a runners list
+ for (int i = 0; i < threads; i++)
+ {
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance().Log("Creating pre-processing threads", LogLevel.Info);
+ }
+
+ Runners.Add(
+ Task.Factory.StartNew(
+ () =>
+ {
+ while (_filesToProcess.TryTake(out var file, TimeSpan.FromMilliseconds(5000)))
+ {
+ try
+ {
+ var reader = IntakeReaderFactory.GetInstance();
+ var processor = FileProcessorFactory.GetInstance();
+ if (reader.Read(file, out var basicInfo))
+ {
+ collector.Hold(processor.Process(basicInfo));
+ }
+ }
+ catch (Exception e)
+ {
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
+ {
+ LoggerFactory.GetInstance().Log(
+ $"Error occurred while processing file {file}\n{e.Message}\n{e.StackTrace}",
+ LogLevel.Error);
+ }
+ }
+ }
+ },
+ TaskCreationOptions.LongRunning)
+ );
+ }
+
+ // Wait until all runners are done processing
+ while (!Runners.All(r => r.IsCompleted))
+ {
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance().Log(
+ $"One or more file processors are still processing files. Waiting {LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs}ms before checking again",
+ LogLevel.Info);
+ }
+
+ Thread.Sleep(TimeSpan.FromMilliseconds(LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs));
+ }
+
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance().Log("Pre-processing finished", LogLevel.Info);
+ }
+
+ // Single writer instance to collect results
+ var writer = WriterFactory.GetInstance();
+ // Single collector instance to collect results
+ var dumpProcessor = DumpProcessorFactory.GetInstance();
+ writer.WriteAll(dumpProcessor.ProcessDumps(collector.Retrieve()));
+ }
+
+ private void ProcessFilesFromDumpsPerMap(int threads, ICollector collector, string mapName)
+ {
+ // Gather all files, sort them by date descending and then add them into the processing queue
+ GatherFiles().FindAll(f => f.ToLower().Contains($"{mapName}--")).OrderByDescending(f =>
+ {
+ FileDateParser.TryParseFileDate(f, out var date);
+ return date;
+ }
+ ).ToList().ForEach(f => _filesToProcess.Add(f));
+
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance().Log("Files sorted and ready to begin pre-processing", LogLevel.Info);
+ }
+
+ // We startup all the threads and collect them into a runners list
+ for (int i = 0; i < threads; i++)
+ {
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance().Log("Creating pre-processing threads", LogLevel.Info);
+ }
+
+ Runners.Add(
+ Task.Factory.StartNew(
+ () =>
+ {
+ while (_filesToProcess.TryTake(out var file, TimeSpan.FromMilliseconds(5000)))
+ {
+ try
+ {
+ var reader = IntakeReaderFactory.GetInstance();
+ var processor = FileProcessorFactory.GetInstance();
+ if (reader.Read(file, out var basicInfo))
+ {
+ collector.Hold(processor.Process(basicInfo));
+ }
+ }
+ catch (Exception e)
+ {
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
+ {
+ LoggerFactory.GetInstance().Log(
+ $"Error occurred while processing file {file}\n{e.Message}\n{e.StackTrace}",
+ LogLevel.Error);
+ }
+ }
+ }
+ },
+ TaskCreationOptions.LongRunning)
+ );
+ }
+
+ // Wait until all runners are done processing
+ while (!Runners.All(r => r.IsCompleted))
+ {
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance().Log(
+ $"One or more file processors are still processing files. Waiting {LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs}ms before checking again",
+ LogLevel.Info);
+ }
+
+ Thread.Sleep(TimeSpan.FromMilliseconds(LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs));
+ }
+
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance().Log("Pre-processing finished", LogLevel.Info);
+ }
+
+ // Single writer instance to collect results
+ var writer = WriterFactory.GetInstance();
+ // Single collector instance to collect results
+ var dumpProcessor = DumpProcessorFactory.GetInstance();
+ writer.WriteAll(dumpProcessor.ProcessDumps(collector.Retrieve()));
+
+ // clear collector and datastorage as we process per map now
+ collector.Clear();
+ DataStorageFactory.GetInstance().Clear();
+ }
+
+ ///
+ /// Adds map name to file if they dont have it already.
+ ///
+ /// Number of threads to use
+ private void FixFilesFromDumps(int threads)
+ {
+ var inputPath = LootDumpProcessorContext.GetConfig().ReaderConfig.DumpFilesLocation;
+
+ if (inputPath == null || inputPath.Count == 0)
+ {
+ throw new Exception("Reader dumpFilesLocations must be set to a valid value");
+ }
+
+ GetFileQueue(inputPath).ToList().ForEach(f => _filesToRename.Add(f));
+
+ var jsonUtil = JsonSerializerFactory.GetInstance(JsonSerializerTypes.DotNet);
+
+ for (var i = 0; i < threads; i++)
+ {
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance().Log("Creating file-processing threads", LogLevel.Info);
+ }
+
+ Renamers.Add(Task.Factory.StartNew(() =>
+ {
+ while (_filesToRename.TryTake(out var file, TimeSpan.FromMilliseconds(5000)))
+ {
+ if (_mapNames.Any(x => file.Contains(x)))
+ {
+ continue;
+ }
+
+ try
+ {
+ var data = File.ReadAllText(file);
+ var fileData = jsonUtil.Deserialize(data);
+ var newpath = file.Replace("resp", $"{fileData.Data.LocationLoot.Id.ToLower()}--resp");
+ File.Move(file, newpath);
+ }
+ catch (Exception e)
+ {
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Error))
+ {
+ LoggerFactory.GetInstance().Log(
+ $"Error occurred while processing file {file}\n{e.Message}\n{e.StackTrace}",
+ LogLevel.Error);
+ }
+ }
+ }
+ }, TaskCreationOptions.LongRunning));
+ }
+
+ while (!Renamers.All(r => r.IsCompleted))
+ {
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance()
+ .Log($"one or more files are being processed. Waiting {LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs} ms", LogLevel.Info);
+ }
+
+ Thread.Sleep(TimeSpan.FromMilliseconds(LootDumpProcessorContext.GetConfig().ThreadPoolingTimeoutMs));
+ }
+
+ if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
+ {
+ LoggerFactory.GetInstance().Log("File-processing finished", LogLevel.Info);
+ }
+ }
}
\ No newline at end of file
diff --git a/Storage/IDataStorage.cs b/Storage/IDataStorage.cs
index b94386e..a49cad5 100644
--- a/Storage/IDataStorage.cs
+++ b/Storage/IDataStorage.cs
@@ -7,4 +7,5 @@ public interface IDataStorage
bool Exists(IKey t);
T GetItem(IKey key) where T : IKeyable;
List GetAll();
+ void Clear();
}
\ No newline at end of file
diff --git a/Storage/Implementations/File/FileDataStorage.cs b/Storage/Implementations/File/FileDataStorage.cs
index 1d55a10..f5f7d84 100644
--- a/Storage/Implementations/File/FileDataStorage.cs
+++ b/Storage/Implementations/File/FileDataStorage.cs
@@ -25,4 +25,9 @@ public class FileDataStorage : IDataStorage
{
throw new NotImplementedException();
}
+
+ public void Clear()
+ {
+ // leaving empty so this the File version can still be used it needed
+ }
}
\ No newline at end of file
diff --git a/Storage/Implementations/Memory/MemoryDataStorage.cs b/Storage/Implementations/Memory/MemoryDataStorage.cs
index bc69c43..40fc5f2 100644
--- a/Storage/Implementations/Memory/MemoryDataStorage.cs
+++ b/Storage/Implementations/Memory/MemoryDataStorage.cs
@@ -47,4 +47,12 @@ public class MemoryDataStorage : IDataStorage
{
return string.Join("-", key.GetLookupIndex());
}
+
+ public void Clear()
+ {
+ lock (_cacheObjectLock)
+ {
+ CachedObjects.Clear();
+ }
+ }
}
\ No newline at end of file