using System.Collections.Concurrent; using LootDumpProcessor.Logger; using LootDumpProcessor.Model; using LootDumpProcessor.Model.Input; using LootDumpProcessor.Model.Output.StaticContainer; using LootDumpProcessor.Model.Processing; using LootDumpProcessor.Serializers.Json; using LootDumpProcessor.Storage; using LootDumpProcessor.Storage.Collections; using LootDumpProcessor.Utils; namespace LootDumpProcessor.Process.Processor.DumpProcessor; public class MultithreadSteppedDumpProcessor : IDumpProcessor { private static IJsonSerializer _jsonSerializer = JsonSerializerFactory.GetInstance(); private static readonly List Runners = new(); private static readonly BlockingCollection _partialDataToProcess = new(); // if we need to, this variable can be moved to use the factory, but since the factory // needs a locking mechanism to prevent dictionary access exceptions, its better to keep // a reference to use here private static readonly IDataStorage _dataStorage = DataStorageFactory.GetInstance(); public Dictionary ProcessDumps(List dumps) { if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log("Starting final dump processing", LogLevel.Info); var output = new Dictionary(); var dumpProcessData = GetDumpProcessData(dumps); if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log("Heavy processing done!", LogLevel.Info); var staticContainers = new Dictionary(); var staticContainersLock = new object(); // We need to count how many dumps we have for each map var mapDumpCounter = new Dictionary(); var mapDumpCounterLock = new object(); // dictionary of maps, that has a dictionary of template and hit count var mapStaticContainersAggregated = new Dictionary>(); var mapStaticContainersAggregatedLock = new object(); Runners.Clear(); // BSG changed the map data so static containers are now dynamic, so we need to scan all dumps for the static containers. if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log("Queuing dumps for static data processing", LogLevel.Info); foreach (var dumped in dumps) { Runners.Add( Task.Factory.StartNew(() => { if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug)) LoggerFactory.GetInstance().Log($"Processing static data for file {dumped.BasicInfo.FileName}", LogLevel.Debug); var data = _jsonSerializer.Deserialize(File.ReadAllText(dumped.BasicInfo.FileName)); var mapName = data.Data.Name; // the if statement below takes care of processing "forced" or real static data for each map, only need // to do this once per map, we dont care about doing it again lock (staticContainersLock) { if (!staticContainers.ContainsKey(mapName)) { if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log($"Doing first time process for map {mapName} of real static data", LogLevel.Info); var mapStaticContainers = StaticLootProcessor.CreateStaticWeaponsAndStaticForcedContainers(data); // .Item1 = map name staticContainers[mapStaticContainers.Item1] = mapStaticContainers.Item2; } } // Takes care of finding how many "dynamic static containers" we have on the map Dictionary mapAggregatedDataDict; lock (mapStaticContainersAggregatedLock) { // Init dict if map key doesnt exist if (!mapStaticContainersAggregated.TryGetValue(mapName, out mapAggregatedDataDict)) { mapAggregatedDataDict = new Dictionary(); mapStaticContainersAggregated.Add(mapName, mapAggregatedDataDict); } } // Only process the dump file if the date is higher (after) the configuration date if (DumpWasMadeAfterConfigThresholdDate(dumped)) { // Keep track of how many dumps we have for each map lock (mapDumpCounterLock) { IncrementMapCounterDictionaryValue(mapDumpCounter, mapName); } var containerIgnoreListExists = LootDumpProcessorContext.GetConfig().ContainerIgnoreList.TryGetValue(data.Data.Id.ToLower(), out string[]? ignoreListForMap); foreach (var dynamicStaticContainer in StaticLootProcessor.CreateDynamicStaticContainers(data)) { lock (mapStaticContainersAggregatedLock) { // Skip adding containers to aggredated data if container id is in ignore list if (containerIgnoreListExists && ignoreListForMap.Contains(dynamicStaticContainer.Id)) { continue; } // Increment count by 1 if (!mapAggregatedDataDict.TryAdd(dynamicStaticContainer, 1)) mapAggregatedDataDict[dynamicStaticContainer] += 1; } } } GCHandler.Collect(); }) ); } Task.WaitAll(Runners.ToArray()); if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log("All static data processing threads finished", LogLevel.Info); // Aggregate and calculate the probability of a static container mapStaticContainersAggregated.ToDictionary( kv => kv.Key, kv => kv.Value.Select( td => new StaticDataPoint { Template = td.Key, Probability = GetStaticContainerProbability(kv.Key, td, mapDumpCounter) } ).ToList() ).ToList().ForEach(kv => staticContainers[kv.Key].StaticContainers = kv.Value); // Static containers output.Add(OutputFileType.StaticContainer, staticContainers); if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log("Processing ammo distribution", LogLevel.Info); // Ammo distribution output.Add( OutputFileType.StaticAmmo, StaticLootProcessor.CreateAmmoDistribution(dumpProcessData.ContainerCounts) ); if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log("Processing static loot distribution", LogLevel.Info); // Static loot distribution output.Add( OutputFileType.StaticLoot, StaticLootProcessor.CreateStaticLootDistribution(dumpProcessData.ContainerCounts, staticContainers) ); if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log("Processing loose loot distribution", LogLevel.Info); // Loose loot distribution var looseLootDistribution = LooseLootProcessor.CreateLooseLootDistribution( dumpProcessData.MapCounts, dumpProcessData.LooseLootCounts ); if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log("Collecting loose loot distribution information", LogLevel.Info); var loot = dumpProcessData.MapCounts .Select(mapCount => mapCount.Key) .ToDictionary(mi => mi, mi => looseLootDistribution[mi]); output.Add(OutputFileType.LooseLoot, loot); if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log("Dump processing fully completed!", LogLevel.Info); return output; } private static bool DumpWasMadeAfterConfigThresholdDate(PartialData dataDump) { return FileDateParser.TryParseFileDate(dataDump.BasicInfo.FileName, out var fileDate) && fileDate.HasValue && fileDate.Value > LootDumpProcessorContext.GetConfig().DumpProcessorConfig .SpawnContainerChanceIncludeAfterDate; } private static void IncrementMapCounterDictionaryValue(Dictionary mapDumpCounter, string mapName) { if (!mapDumpCounter.TryAdd(mapName, 1)) { // Dict has map, increment count by 1 mapDumpCounter[mapName] += 1; } } private static double GetStaticContainerProbability(string mapName, KeyValuePair td, Dictionary mapDumpCounter) { return Math.Round((double)((decimal)td.Value / (decimal)mapDumpCounter[mapName]), 2); } private static DumpProcessData GetDumpProcessData(List dumps) { var dumpProcessData = new DumpProcessData(); dumps.GroupBy(dump => dump.BasicInfo.Map) .ToList() .ForEach(tuple => { var mapi = tuple.Key; var g = tuple.ToList(); if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info)) LoggerFactory.GetInstance().Log( $"Processing map {mapi}, total dump data to process: {g.Count}", LogLevel.Info ); dumpProcessData.MapCounts[mapi] = g.Count; var lockObjectContainerCounts = new object(); var lockObjectCounts = new object(); var counts = new LooseLootCounts(); var lockObjectDictionaryCounts = new object(); var dictionaryCounts = new FlatKeyableDictionary(); counts.Counts = dictionaryCounts.GetKey(); /* var dictionaryItemCounts = new FlatKeyableDictionary>(); counts.Items = dictionaryItemCounts.GetKey(); */ var lockObjectDictionaryItemProperties = new object(); var dictionaryItemProperties = new FlatKeyableDictionary>(); var actualDictionaryItemProperties = new FlatKeyableDictionary(); counts.ItemProperties = actualDictionaryItemProperties.GetKey(); dumpProcessData.LooseLootCounts.Add(mapi, counts.GetKey()); // add the items to the queue foreach (var gi in g) { _partialDataToProcess.Add(gi); } // Call GC before running threads g = null; tuple = null; GCHandler.Collect(); // The data storage factory has a lock, we dont want the locks to occur when multithreading for (int i = 0; i < LootDumpProcessorContext.GetConfig().Threads; i++) { Runners.Add( Task.Factory.StartNew( () => { while (_partialDataToProcess.TryTake(out var partialData, TimeSpan.FromMilliseconds(5000))) { try { var dumpData = _dataStorage.GetItem(partialData.ParsedDumpKey); lock (lockObjectContainerCounts) { dumpProcessData.ContainerCounts.AddRange(dumpData.Containers); } // loose loot into ids on files var loadedDictionary = _dataStorage .GetItem>>( dumpData.LooseLoot.ItemProperties ); foreach (var (k, v) in loadedDictionary) { var count = dumpData.LooseLoot.Counts[k]; lock (lockObjectDictionaryCounts) { if (dictionaryCounts.ContainsKey(k)) dictionaryCounts[k] += count; else dictionaryCounts[k] = count; } /* var itemList = dumpData.LooseLoot.Items[k]; if (!dictionaryItemCounts.TryGetValue(k, out var itemCounts)) { itemCounts = new List(); dictionaryItemCounts.Add(k, itemCounts); } itemCounts.AddRange(itemList); */ lock (lockObjectDictionaryItemProperties) { if (!dictionaryItemProperties.TryGetValue(k, out var values)) { values = new FlatKeyableList