Merge branch 'PerMapStaticLoot'
This commit is contained in:
commit
f55ab3cdc0
@ -1,5 +1,5 @@
|
||||
---
|
||||
Customs:
|
||||
bigmap:
|
||||
- 5938188786f77474f723e87f # Case 0031
|
||||
- 5c12301c86f77419522ba7e4 # Flash drive with fake info
|
||||
- 593965cf86f774087a77e1b6 # Case 0048
|
||||
@ -13,9 +13,10 @@ Customs:
|
||||
- 5939a00786f7742fe8132936 # Golden Zibbo lighter
|
||||
- 5939e5a786f77461f11c0098 # Secure Folder 0013
|
||||
- 64e74a3d4d49d23b2c39d319 # item_quest_clock_07 (Out of Time)
|
||||
- 6614230055afee107f05e998 # The Unheard's phone
|
||||
#- 64bd1abff3a668f08805ce4f # Secure Flash drive V4 REMOVED BY BSG
|
||||
|
||||
Woods:
|
||||
woods:
|
||||
- 5938878586f7741b797c562f # Case 0052
|
||||
- 5d3ec50586f774183a607442 # Jaeger's message Underneath the wooden lookout post.
|
||||
- 5af04e0a86f7743a532b79e2 # Single-axis Fiber Optic Gyroscope: item_barter_electr_gyroscope
|
||||
@ -24,7 +25,7 @@ Woods:
|
||||
#- 64bde2248f3a947a990aa4a5 # Sliderkey Secure Flash drive #1 REMOVED BY BSG
|
||||
#- 64bde265807321a9b905f076 # Sliderkey Secure Flash drive #2 REMOVED BY BSG
|
||||
|
||||
Shoreline:
|
||||
shoreline:
|
||||
- 5a294d7c86f7740651337cf9 # Drone 1 SAS disk
|
||||
- 5a294d8486f774068638cd93 # Drone 2 SAS disk: ambiguous with itemTpl 5a294f1686f774340c7b7e4a
|
||||
- 5efdafc1e70b5e33f86de058 # Sanitar's Surgery kit marked with a blue symbol
|
||||
@ -41,8 +42,12 @@ Shoreline:
|
||||
- 5b43237186f7742f3a4ab252 # Chemical container: item_quest_chem_container
|
||||
- 5a29284f86f77463ef3db363 # Toughbook reinforced laptop
|
||||
- 64e74a534d49d23b2c39d31b # item_quest_clock_10 (Out of Time)
|
||||
- 6614238e0d240a5f5d0f679d # Skier and Peacekeeper correspondence
|
||||
- 661421c7c1f2f548c50ee649 # The Unheard's laptop
|
||||
- 6614217b6d9d5abcad0ff098 # The Unheard's phone
|
||||
- 661423200d240a5f5d0f679b # The Unheard's laptop
|
||||
|
||||
Interchange:
|
||||
interchange:
|
||||
- 5ae9a18586f7746e381e16a3 # OLI cargo manifests
|
||||
- 5ae9a0dd86f7742e5f454a05 # Goshan cargo manifests
|
||||
- 5ae9a1b886f77404c8537c62 # Idea cargo manifests
|
||||
@ -53,11 +58,15 @@ Interchange:
|
||||
- 5b4c81bd86f77418a75ae159 # Chemical container item_quest_chem_container3
|
||||
- 64e74a5ac2b4f829615ec336 # item_quest_clock_11 (Out of Time)
|
||||
|
||||
Factory:
|
||||
factory4_day:
|
||||
- 591093bb86f7747caa7bb2ee # On the neck of the dead scav in the bunker (Postman Pat Part 2)
|
||||
- 593a87af86f774122f54a951 # Syringe with a chemical
|
||||
|
||||
Lighthouse:
|
||||
factory4_night:
|
||||
- 591093bb86f7747caa7bb2ee # On the neck of the dead scav in the bunker (Postman Pat Part 2)
|
||||
- 593a87af86f774122f54a951 # Syringe with a chemical
|
||||
|
||||
lighthouse:
|
||||
- 61904c9df62c89219a56e034 # The message is tucked under the bottom of the door to the cabin.
|
||||
- 619268ad78f4fa33f173dbe5 # Water pump operation data On the desk between other documents in the upper office.
|
||||
- 619268de2be33f2604340159 # Pumping Station Operation Data In the upper floor office on the shelf.
|
||||
@ -69,9 +78,10 @@ Lighthouse:
|
||||
- 6399f54b0a36db13c823ad21 # Radio transmitter body (Key to the Tower)
|
||||
- 64e74a64aac4cd0a7264ecdf # item_quest_clock_12 (Out of Time)
|
||||
- 578f87a3245977356274f2cb #
|
||||
- 661666458c2aa9cb1602503b # Hard drive
|
||||
# - 64b91627dd13d43b9d01d6d1 # Toughbook reinforced laptop (Event quest) REMOVED BY BSG
|
||||
|
||||
ReserveBase:
|
||||
rezervbase:
|
||||
- 60915994c49cf53e4772cc38 # Military documents 1 on the table inside bunker control room (Documents)
|
||||
- 60a3b6359c427533db36cf84 # Military documents 2 On the bottom shelf of the cupboard near the corner.
|
||||
- 60a3b65c27adf161da7b6e14 # Military documents 3 Inside the cupboard next to the 4x4 Weapon Box.
|
||||
@ -82,15 +92,16 @@ ReserveBase:
|
||||
- 6398a072e301557ae24cec92 # Original Lightkeeper Intelligence (Snatch)
|
||||
- 64e74a4baac4cd0a7264ecdd # item_quest_clock_09 (Out of Time)
|
||||
|
||||
Laboratory:
|
||||
laboratory:
|
||||
- 5eff135be0d3331e9d282b7b # Flash drive marked with blue tape (TerraGroup employee)
|
||||
- 6398a4cfb5992f573c6562b3 # Secured tape
|
||||
- 64e74a44c2b4f829615ec334 # Picture 8
|
||||
#- 64e74a44c2b4f829615ec334 # item_quest_clock_08 (Out of Time) #1 REMOVED BY BSG
|
||||
#- 64bdcfed8f3a947a990aa49a # Hermetic container for storing various chemicals #1 REMOVED BY BSG
|
||||
#- 64bdd008b0bf3baa6702f35f # Hermetic container for storing various chemicals #2 REMOVED BY BSG
|
||||
#- 64bdd014f3a668f08805ce64 # Hermetic container for storing various chemicals #3 REMOVED BY BSG
|
||||
|
||||
Streets of Tarkov:
|
||||
tarkovstreets:
|
||||
- 63a943cead5cc12f22161ff7 # Accountant's notes (Audit)
|
||||
- 638cbc68a63f1b49be6a3010 # Registered letter (Youve Got Mail)
|
||||
- 638df4cc7b560b03794a18d2 # AG guitar pick (Audiophile)
|
||||
@ -114,6 +125,10 @@ Streets of Tarkov:
|
||||
- 64f5b4f71a5f313cb144c06c # Secret component (Beyond the Red Meat - Part 2)
|
||||
- 657acb2ac900be5902191ac9 # Cadastral registry records
|
||||
|
||||
Sandbox:
|
||||
sandbox:
|
||||
- 6582bd252b50c61c565828e2 # Bottle of Le Jean wine
|
||||
- 6575a6ca8778e96ded05a802 # TerraGroup scientist's hard drive
|
||||
|
||||
sandbox_high:
|
||||
- 6582bd252b50c61c565828e2 # Bottle of Le Jean wine
|
||||
- 6575a6ca8778e96ded05a802 # TerraGroup scientist's hard drive
|
@ -5,17 +5,17 @@ static_weapon_ids:
|
||||
- 5cdeb229d7f00c000e7ce174
|
||||
|
||||
forced_items:
|
||||
Customs:
|
||||
bigmap:
|
||||
# unknown key
|
||||
- containerId: custom_multiScene_00058
|
||||
itemTpl: 593962ca86f774068014d9af
|
||||
|
||||
Streets of Tarkov:
|
||||
tarkovstreets:
|
||||
# Backup hideout key
|
||||
- containerId: container_City_SE_02_DesignStuff_00025
|
||||
- containerId: container_City_SE_02_Primorskiy_51_indoor_00001
|
||||
itemTpl: 6398fd8ad3de3849057f5128
|
||||
|
||||
Sandbox:
|
||||
sandbox:
|
||||
- containerId: container_Test_for_export_00002 # Lab technician body
|
||||
itemTpl: 658199aa38c79576a2569e13 # TerraGroup science office key
|
||||
- containerId: container_custom_DesignStuff_00029 # dead body of scav near exit
|
||||
|
@ -29,4 +29,7 @@ Streets of Tarkov:
|
||||
- tarkovstreets
|
||||
Sandbox:
|
||||
name:
|
||||
- Sandbox
|
||||
- Sandbox
|
||||
SandboxHigh:
|
||||
name:
|
||||
- Sandbox_high
|
@ -5,6 +5,6 @@ namespace LootDumpProcessor.Model.Processing;
|
||||
public class DumpProcessData
|
||||
{
|
||||
public Dictionary<string, IKey> LooseLootCounts { get; set; } = new();
|
||||
public List<PreProcessedStaticLoot> ContainerCounts { get; set; } = new();
|
||||
public Dictionary<string, List<PreProcessedStaticLoot>> ContainerCounts { get; set; } = new();
|
||||
public Dictionary<string, int> MapCounts { get; set; } = new();
|
||||
}
|
@ -1,5 +1,4 @@
|
||||
using System.Text.Json.Serialization;
|
||||
using LootDumpProcessor.Process.Processor;
|
||||
using LootDumpProcessor.Utils;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
@ -9,7 +8,7 @@ namespace LootDumpProcessor.Model
|
||||
{
|
||||
[JsonProperty("StackObjectsCount", NullValueHandling = NullValueHandling.Ignore)]
|
||||
[JsonPropertyName("StackObjectsCount")]
|
||||
public int? StackObjectsCount { get; set; }
|
||||
public object? StackObjectsCount { get; set; }
|
||||
|
||||
[JsonProperty("FireMode", NullValueHandling = NullValueHandling.Ignore)]
|
||||
[JsonPropertyName("FireMode")]
|
||||
|
@ -52,22 +52,23 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
Runners.Add(
|
||||
Task.Factory.StartNew(() =>
|
||||
{
|
||||
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
|
||||
LoggerFactory.GetInstance().Log($"Processing static data for file {dumped.BasicInfo.FileName}", LogLevel.Debug);
|
||||
var data = _jsonSerializer.Deserialize<RootData>(File.ReadAllText(dumped.BasicInfo.FileName));
|
||||
var mapName = data.Data.Name;
|
||||
var dataDump = _jsonSerializer.Deserialize<RootData>(File.ReadAllText(dumped.BasicInfo.FileName));
|
||||
//var mapName = dataDump.Data.Name;
|
||||
var mapId = dataDump.Data.Id.ToLower();
|
||||
|
||||
// the if statement below takes care of processing "forced" or real static data for each map, only need
|
||||
// to do this once per map, we dont care about doing it again
|
||||
lock (staticContainersLock)
|
||||
{
|
||||
if (!staticContainers.ContainsKey(mapName))
|
||||
if (!staticContainers.ContainsKey(mapId))
|
||||
{
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log($"Doing first time process for map {mapName} of real static data", LogLevel.Info);
|
||||
var mapStaticContainers = StaticLootProcessor.CreateStaticWeaponsAndStaticForcedContainers(data);
|
||||
LoggerFactory.GetInstance().Log($"Doing first time process for map {mapId} of real static data", LogLevel.Info);
|
||||
var mapStaticContainers = StaticLootProcessor.CreateStaticWeaponsAndStaticForcedContainers(dataDump);
|
||||
// .Item1 = map name
|
||||
// .Item2 = force/weapon static arrays
|
||||
staticContainers[mapStaticContainers.Item1] = mapStaticContainers.Item2;
|
||||
}
|
||||
}
|
||||
@ -77,36 +78,40 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
lock (mapStaticContainersAggregatedLock)
|
||||
{
|
||||
// Init dict if map key doesnt exist
|
||||
if (!mapStaticContainersAggregated.TryGetValue(mapName, out mapAggregatedDataDict))
|
||||
if (!mapStaticContainersAggregated.TryGetValue(mapId, out mapAggregatedDataDict))
|
||||
{
|
||||
mapAggregatedDataDict = new Dictionary<Template, int>();
|
||||
mapStaticContainersAggregated.Add(mapName, mapAggregatedDataDict);
|
||||
mapStaticContainersAggregated.Add(mapId, mapAggregatedDataDict);
|
||||
}
|
||||
}
|
||||
|
||||
// Only process the dump file if the date is higher (after) the configuration date
|
||||
if (DumpWasMadeAfterConfigThresholdDate(dumped))
|
||||
if (!DumpWasMadeAfterConfigThresholdDate(dumped))
|
||||
{
|
||||
// Keep track of how many dumps we have for each map
|
||||
lock (mapDumpCounterLock)
|
||||
{
|
||||
IncrementMapCounterDictionaryValue(mapDumpCounter, mapName);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
var containerIgnoreListExists = LootDumpProcessorContext.GetConfig().ContainerIgnoreList.TryGetValue(data.Data.Id.ToLower(), out string[]? ignoreListForMap);
|
||||
foreach (var dynamicStaticContainer in StaticLootProcessor.CreateDynamicStaticContainers(data))
|
||||
// Keep track of how many dumps we have for each map
|
||||
lock (mapDumpCounterLock)
|
||||
{
|
||||
IncrementMapCounterDictionaryValue(mapDumpCounter, mapId);
|
||||
}
|
||||
|
||||
var containerIgnoreListExists = LootDumpProcessorContext.GetConfig().ContainerIgnoreList.TryGetValue(mapId, out string[]? ignoreListForMap);
|
||||
foreach (var dynamicStaticContainer in StaticLootProcessor.CreateDynamicStaticContainers(dataDump))
|
||||
{
|
||||
lock (mapStaticContainersAggregatedLock)
|
||||
{
|
||||
lock (mapStaticContainersAggregatedLock)
|
||||
if (containerIgnoreListExists && ignoreListForMap.Contains(dynamicStaticContainer.Id))
|
||||
{
|
||||
// Skip adding containers to aggredated data if container id is in ignore list
|
||||
if (containerIgnoreListExists && ignoreListForMap.Contains(dynamicStaticContainer.Id))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Skip adding containers to aggregated data if container id is in ignore list
|
||||
continue;
|
||||
}
|
||||
|
||||
// Increment count by 1
|
||||
if (!mapAggregatedDataDict.TryAdd(dynamicStaticContainer, 1))
|
||||
mapAggregatedDataDict[dynamicStaticContainer] += 1;
|
||||
// Increment times container seen in dump by 1
|
||||
if (!mapAggregatedDataDict.TryAdd(dynamicStaticContainer, 1))
|
||||
{
|
||||
mapAggregatedDataDict[dynamicStaticContainer] += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -126,10 +131,10 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
td => new StaticDataPoint
|
||||
{
|
||||
Template = td.Key,
|
||||
Probability = GetStaticContainerProbability(kv.Key, td, mapDumpCounter)
|
||||
Probability = GetStaticContainerProbability(kv.Key, td, mapDumpCounter) // kv.Key = map name
|
||||
}
|
||||
).ToList()
|
||||
).ToList().ForEach(kv => staticContainers[kv.Key].StaticContainers = kv.Value);
|
||||
).ToList().ForEach(kv => staticContainers[kv.Key].StaticContainers = kv.Value); // Hydrate staticContainers.StaticContainers
|
||||
|
||||
// Static containers
|
||||
output.Add(OutputFileType.StaticContainer, staticContainers);
|
||||
@ -146,7 +151,7 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
// Static loot distribution
|
||||
output.Add(
|
||||
OutputFileType.StaticLoot,
|
||||
StaticLootProcessor.CreateStaticLootDistribution(dumpProcessData.ContainerCounts)
|
||||
StaticLootProcessor.CreateStaticLootDistribution(dumpProcessData.ContainerCounts, staticContainers)
|
||||
);
|
||||
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
@ -199,23 +204,23 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
.ToList()
|
||||
.ForEach(tuple =>
|
||||
{
|
||||
var mapi = tuple.Key;
|
||||
var g = tuple.ToList();
|
||||
var mapName = tuple.Key;
|
||||
var partialFileMetaData = tuple.ToList();
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Info))
|
||||
LoggerFactory.GetInstance().Log(
|
||||
$"Processing map {mapi}, total dump data to process: {g.Count}",
|
||||
$"Processing map {mapName}, total dump data to process: {partialFileMetaData.Count}",
|
||||
LogLevel.Info
|
||||
);
|
||||
dumpProcessData.MapCounts[mapi] = g.Count;
|
||||
dumpProcessData.MapCounts[mapName] = partialFileMetaData.Count;
|
||||
|
||||
var lockObjectContainerCounts = new object();
|
||||
|
||||
var lockObjectCounts = new object();
|
||||
var counts = new LooseLootCounts();
|
||||
var looseLootCounts = new LooseLootCounts();
|
||||
|
||||
var lockObjectDictionaryCounts = new object();
|
||||
var dictionaryCounts = new FlatKeyableDictionary<string, int>();
|
||||
counts.Counts = dictionaryCounts.GetKey();
|
||||
looseLootCounts.Counts = dictionaryCounts.GetKey();
|
||||
|
||||
/*
|
||||
var dictionaryItemCounts = new FlatKeyableDictionary<string, List<string>>();
|
||||
@ -226,22 +231,21 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
var dictionaryItemProperties = new FlatKeyableDictionary<string, FlatKeyableList<Template>>();
|
||||
|
||||
var actualDictionaryItemProperties = new FlatKeyableDictionary<string, IKey>();
|
||||
counts.ItemProperties = actualDictionaryItemProperties.GetKey();
|
||||
looseLootCounts.ItemProperties = actualDictionaryItemProperties.GetKey();
|
||||
|
||||
dumpProcessData.LooseLootCounts.Add(mapi, counts.GetKey());
|
||||
dumpProcessData.LooseLootCounts.Add(mapName, looseLootCounts.GetKey());
|
||||
// add the items to the queue
|
||||
foreach (var gi in g)
|
||||
foreach (var partialData in partialFileMetaData)
|
||||
{
|
||||
_partialDataToProcess.Add(gi);
|
||||
_partialDataToProcess.Add(partialData);
|
||||
}
|
||||
|
||||
// Call GC before running threads
|
||||
g = null;
|
||||
partialFileMetaData = null;
|
||||
tuple = null;
|
||||
GCHandler.Collect();
|
||||
|
||||
// The data storage factory has a lock, we dont want the locks to occur when multithreading
|
||||
|
||||
for (int i = 0; i < LootDumpProcessorContext.GetConfig().Threads; i++)
|
||||
{
|
||||
Runners.Add(
|
||||
@ -254,26 +258,35 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
try
|
||||
{
|
||||
var dumpData = _dataStorage.GetItem<ParsedDump>(partialData.ParsedDumpKey);
|
||||
|
||||
// Static containers
|
||||
lock (lockObjectContainerCounts)
|
||||
{
|
||||
dumpProcessData.ContainerCounts.AddRange(dumpData.Containers);
|
||||
if (!dumpProcessData.ContainerCounts.ContainsKey(mapName))
|
||||
{
|
||||
dumpProcessData.ContainerCounts.Add(mapName, dumpData.Containers);
|
||||
}
|
||||
else
|
||||
{
|
||||
dumpProcessData.ContainerCounts[mapName].AddRange(dumpData.Containers);
|
||||
}
|
||||
}
|
||||
|
||||
// loose loot into ids on files
|
||||
// Loose loot into ids on files
|
||||
var loadedDictionary =
|
||||
_dataStorage
|
||||
.GetItem<SubdivisionedKeyableDictionary<string, List<Template>>>(
|
||||
dumpData.LooseLoot.ItemProperties
|
||||
);
|
||||
foreach (var (k, v) in loadedDictionary)
|
||||
foreach (var (uniqueKey, containerTemplate) in loadedDictionary)
|
||||
{
|
||||
var count = dumpData.LooseLoot.Counts[k];
|
||||
var count = dumpData.LooseLoot.Counts[uniqueKey];
|
||||
lock (lockObjectDictionaryCounts)
|
||||
{
|
||||
if (dictionaryCounts.ContainsKey(k))
|
||||
dictionaryCounts[k] += count;
|
||||
if (dictionaryCounts.ContainsKey(uniqueKey))
|
||||
dictionaryCounts[uniqueKey] += count;
|
||||
else
|
||||
dictionaryCounts[k] = count;
|
||||
dictionaryCounts[uniqueKey] = count;
|
||||
}
|
||||
|
||||
/*
|
||||
@ -288,20 +301,20 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
|
||||
lock (lockObjectDictionaryItemProperties)
|
||||
{
|
||||
if (!dictionaryItemProperties.TryGetValue(k, out var values))
|
||||
if (!dictionaryItemProperties.TryGetValue(uniqueKey, out var values))
|
||||
{
|
||||
values = new FlatKeyableList<Template>();
|
||||
dictionaryItemProperties.Add(k, values);
|
||||
actualDictionaryItemProperties.Add(k, values.GetKey());
|
||||
dictionaryItemProperties.Add(uniqueKey, values);
|
||||
actualDictionaryItemProperties.Add(uniqueKey, values.GetKey());
|
||||
}
|
||||
|
||||
values.AddRange(v);
|
||||
values.AddRange(containerTemplate);
|
||||
}
|
||||
}
|
||||
|
||||
lock (lockObjectCounts)
|
||||
{
|
||||
counts.MapSpawnpointCount.Add(dumpData.LooseLoot.MapSpawnpointCount);
|
||||
looseLootCounts.MapSpawnpointCount.Add(dumpData.LooseLoot.MapSpawnpointCount);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
@ -346,8 +359,8 @@ public class MultithreadSteppedDumpProcessor : IDumpProcessor
|
||||
_dataStorage.Store(actualDictionaryItemProperties);
|
||||
actualDictionaryItemProperties = null;
|
||||
GCHandler.Collect();
|
||||
_dataStorage.Store(counts);
|
||||
counts = null;
|
||||
_dataStorage.Store(looseLootCounts);
|
||||
looseLootCounts = null;
|
||||
GCHandler.Collect();
|
||||
});
|
||||
return dumpProcessData;
|
||||
|
@ -85,7 +85,8 @@ public static class LooseLootProcessor
|
||||
// we want to cleanup the data, so we calculate the mean for the values we get raw
|
||||
// For whatever reason, we sometimes get dumps that have A LOT more loose loot point than
|
||||
// the average
|
||||
var initialMean = np.mean(np.array(looseLootCounts.MapSpawnpointCount)).ToArray<double>().First();
|
||||
var values = looseLootCounts.MapSpawnpointCount.Select(Convert.ToDouble);
|
||||
var initialMean = np.mean(np.array(values)).ToArray<double>().First();
|
||||
var looseLootCountTolerancePercentage = LootDumpProcessorContext.GetConfig().ProcessorConfig.LooseLootCountTolerancePercentage / 100;
|
||||
// We calculate here a high point to check, anything above this value will be ignored
|
||||
// The data that was inside those loose loot points still counts for them though!
|
||||
|
@ -4,6 +4,7 @@ using LootDumpProcessor.Model.Output;
|
||||
using LootDumpProcessor.Model.Output.StaticContainer;
|
||||
using LootDumpProcessor.Model.Processing;
|
||||
using LootDumpProcessor.Utils;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace LootDumpProcessor.Process.Processor;
|
||||
|
||||
@ -33,6 +34,7 @@ public static class StaticLootProcessor
|
||||
public static Tuple<string, MapStaticLoot> CreateStaticWeaponsAndStaticForcedContainers(RootData rawMapDump)
|
||||
{
|
||||
var mapName = rawMapDump.Data.Name;
|
||||
var mapId = rawMapDump.Data.Id.ToLower();
|
||||
var staticLootPositions = (from li in rawMapDump.Data.Loot
|
||||
where li.IsContainer ?? false
|
||||
select li).ToList();
|
||||
@ -46,8 +48,8 @@ public static class StaticLootProcessor
|
||||
}
|
||||
}
|
||||
|
||||
var forcedStaticItems = LootDumpProcessorContext.GetForcedItems().ContainsKey(mapName)
|
||||
? LootDumpProcessorContext.GetForcedItems()[mapName]
|
||||
var forcedStaticItems = LootDumpProcessorContext.GetForcedItems().ContainsKey(mapId)
|
||||
? LootDumpProcessorContext.GetForcedItems()[mapId]
|
||||
: new List<StaticForced>();
|
||||
|
||||
var mapStaticData = new MapStaticLoot
|
||||
@ -55,7 +57,7 @@ public static class StaticLootProcessor
|
||||
StaticWeapons = staticWeapons,
|
||||
StaticForced = forcedStaticItems
|
||||
};
|
||||
return Tuple.Create(mapName, mapStaticData);
|
||||
return Tuple.Create(mapId, mapStaticData);
|
||||
}
|
||||
|
||||
public static List<Template> CreateDynamicStaticContainers(RootData rawMapDump)
|
||||
@ -73,94 +75,207 @@ public static class StaticLootProcessor
|
||||
return data;
|
||||
}
|
||||
|
||||
public static Dictionary<string, List<AmmoDistribution>> CreateAmmoDistribution(
|
||||
List<PreProcessedStaticLoot> container_counts
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="container_counts"></param>
|
||||
/// <returns>key = mapid / </returns>
|
||||
public static Dictionary<string, Dictionary<string, List<AmmoDistribution>>> CreateAmmoDistribution(
|
||||
Dictionary<string, List<PreProcessedStaticLoot>> container_counts
|
||||
)
|
||||
{
|
||||
var ammo = new List<string>();
|
||||
foreach (var ci in container_counts)
|
||||
var allMapsAmmoDistro = new Dictionary<string, Dictionary<string, List<AmmoDistribution>>>();
|
||||
foreach (var mapAndContainers in container_counts)
|
||||
{
|
||||
ammo.AddRange(from item in ci.Items
|
||||
where LootDumpProcessorContext.GetTarkovItems().IsBaseClass(item.Tpl, BaseClasses.Ammo)
|
||||
select item.Tpl);
|
||||
var mapid = mapAndContainers.Key;
|
||||
var containers = mapAndContainers.Value;
|
||||
|
||||
|
||||
var ammo = new List<string>();
|
||||
foreach (var ci in containers)
|
||||
{
|
||||
ammo.AddRange(from item in ci.Items
|
||||
where LootDumpProcessorContext.GetTarkovItems().IsBaseClass(item.Tpl, BaseClasses.Ammo)
|
||||
select item.Tpl);
|
||||
}
|
||||
|
||||
var ammo_counts = new List<CaliberTemplateCount>();
|
||||
ammo_counts.AddRange(
|
||||
ammo.GroupBy(a => a)
|
||||
.Select(g => new CaliberTemplateCount
|
||||
{
|
||||
Caliber = LootDumpProcessorContext.GetTarkovItems().AmmoCaliber(g.Key),
|
||||
Template = g.Key,
|
||||
Count = g.Count()
|
||||
})
|
||||
);
|
||||
ammo_counts = ammo_counts.OrderBy(x => x.Caliber).ToList();
|
||||
var ammo_distribution = new Dictionary<string, List<AmmoDistribution>>();
|
||||
foreach (var _tup_3 in ammo_counts.GroupBy(x => x.Caliber))
|
||||
{
|
||||
var k = _tup_3.Key;
|
||||
var g = _tup_3.ToList();
|
||||
ammo_distribution[k] = (from gi in g
|
||||
select new AmmoDistribution
|
||||
{
|
||||
Tpl = gi.Template,
|
||||
RelativeProbability = gi.Count
|
||||
}).ToList();
|
||||
}
|
||||
|
||||
allMapsAmmoDistro.TryAdd(mapid, ammo_distribution);
|
||||
}
|
||||
|
||||
var ammo_counts = new List<CaliberTemplateCount>();
|
||||
ammo_counts.AddRange(
|
||||
ammo.GroupBy(a => a)
|
||||
.Select(g => new CaliberTemplateCount
|
||||
{
|
||||
Caliber = LootDumpProcessorContext.GetTarkovItems().AmmoCaliber(g.Key),
|
||||
Template = g.Key,
|
||||
Count = g.Count()
|
||||
})
|
||||
);
|
||||
ammo_counts = ammo_counts.OrderBy(x => x.Caliber).ToList();
|
||||
var ammo_distribution = new Dictionary<string, List<AmmoDistribution>>();
|
||||
foreach (var _tup_3 in ammo_counts.GroupBy(x => x.Caliber))
|
||||
{
|
||||
var k = _tup_3.Key;
|
||||
var g = _tup_3.ToList();
|
||||
ammo_distribution[k] = (from gi in g
|
||||
select new AmmoDistribution
|
||||
{
|
||||
Tpl = gi.Template,
|
||||
RelativeProbability = gi.Count
|
||||
}).ToList();
|
||||
}
|
||||
return allMapsAmmoDistro;
|
||||
|
||||
return ammo_distribution;
|
||||
//var ammo = new List<string>();
|
||||
//foreach (var ci in container_counts)
|
||||
//{
|
||||
// ammo.AddRange(from item in ci.Items
|
||||
// where LootDumpProcessorContext.GetTarkovItems().IsBaseClass(item.Tpl, BaseClasses.Ammo)
|
||||
// select item.Tpl);
|
||||
//}
|
||||
|
||||
//var ammo_counts = new List<CaliberTemplateCount>();
|
||||
//ammo_counts.AddRange(
|
||||
// ammo.GroupBy(a => a)
|
||||
// .Select(g => new CaliberTemplateCount
|
||||
// {
|
||||
// Caliber = LootDumpProcessorContext.GetTarkovItems().AmmoCaliber(g.Key),
|
||||
// Template = g.Key,
|
||||
// Count = g.Count()
|
||||
// })
|
||||
//);
|
||||
//ammo_counts = ammo_counts.OrderBy(x => x.Caliber).ToList();
|
||||
//var ammo_distribution = new Dictionary<string, List<AmmoDistribution>>();
|
||||
//foreach (var _tup_3 in ammo_counts.GroupBy(x => x.Caliber))
|
||||
//{
|
||||
// var k = _tup_3.Key;
|
||||
// var g = _tup_3.ToList();
|
||||
// ammo_distribution[k] = (from gi in g
|
||||
// select new AmmoDistribution
|
||||
// {
|
||||
// Tpl = gi.Template,
|
||||
// RelativeProbability = gi.Count
|
||||
// }).ToList();
|
||||
//}
|
||||
|
||||
//return ammo_distribution;
|
||||
}
|
||||
|
||||
public static Dictionary<string, StaticItemDistribution> CreateStaticLootDistribution(
|
||||
List<PreProcessedStaticLoot> container_counts
|
||||
)
|
||||
/// <summary>
|
||||
/// Dict key = map,
|
||||
/// value = sub dit:
|
||||
/// key = container Ids
|
||||
/// value = items + counts
|
||||
/// </summary>
|
||||
public static Dictionary<string, Dictionary<string, StaticItemDistribution>> CreateStaticLootDistribution(
|
||||
Dictionary<string, List<PreProcessedStaticLoot>> container_counts,
|
||||
Dictionary<string, MapStaticLoot> staticContainers)
|
||||
{
|
||||
var static_loot_distribution = new Dictionary<string, StaticItemDistribution>();
|
||||
var types = Enumerable.Distinct((from ci in container_counts
|
||||
select ci.Type).ToList());
|
||||
|
||||
foreach (var typei in types)
|
||||
var allMapsStaticLootDisto = new Dictionary< string, Dictionary<string, StaticItemDistribution>>();
|
||||
// Iterate over each map we have containers for
|
||||
foreach (var mapContainersKvp in container_counts)
|
||||
{
|
||||
var container_counts_selected = (from ci in container_counts
|
||||
where ci.Type == typei
|
||||
select ci).ToList();
|
||||
var itemscounts = new List<int>();
|
||||
foreach (var ci in container_counts_selected)
|
||||
{
|
||||
itemscounts.Add((from cii in ci.Items
|
||||
where cii.ParentId == ci.ContainerId
|
||||
select cii).ToList().Count);
|
||||
}
|
||||
var mapName = mapContainersKvp.Key;
|
||||
var containers = mapContainersKvp.Value;
|
||||
|
||||
static_loot_distribution[typei] = new StaticItemDistribution();
|
||||
static_loot_distribution[typei].ItemCountDistribution = itemscounts.GroupBy(i => i)
|
||||
.Select(g => new ItemCountDistribution
|
||||
{
|
||||
Count = g.Key,
|
||||
RelativeProbability = g.Count()
|
||||
}).ToList();
|
||||
// TODO: Change for different algo that splits items per parent once parentid = containerid, then compose
|
||||
// TODO: key and finally create distribution based on composed Id instead
|
||||
var itemsHitCounts = new Dictionary<string, int>();
|
||||
foreach (var ci in container_counts_selected)
|
||||
{
|
||||
foreach (var cii in ci.Items.Where(cii => cii.ParentId == ci.ContainerId))
|
||||
{
|
||||
if (itemsHitCounts.ContainsKey(cii.Tpl))
|
||||
itemsHitCounts[cii.Tpl] += 1;
|
||||
else
|
||||
itemsHitCounts[cii.Tpl] = 1;
|
||||
}
|
||||
}
|
||||
var static_loot_distribution = new Dictionary<string, StaticItemDistribution>();
|
||||
var uniqueContainerTypeIds = Enumerable.Distinct((from ci in containers
|
||||
select ci.Type).ToList());
|
||||
|
||||
static_loot_distribution[typei].ItemDistribution = itemsHitCounts.Select(v => new StaticDistribution
|
||||
foreach (var typeId in uniqueContainerTypeIds)
|
||||
{
|
||||
Tpl = v.Key,
|
||||
RelativeProbability = v.Value
|
||||
}).ToList();
|
||||
var container_counts_selected = (from ci in containers
|
||||
where ci.Type == typeId
|
||||
select ci).ToList();
|
||||
|
||||
// Get array of all times a count of items was found in container
|
||||
List<int> itemCountsInContainer = GetCountOfItemsInContainer(container_counts_selected);
|
||||
|
||||
// Create structure to hold item count + weight that it will be picked
|
||||
// Group same counts together
|
||||
static_loot_distribution[typeId] = new StaticItemDistribution();
|
||||
static_loot_distribution[typeId].ItemCountDistribution = itemCountsInContainer.GroupBy(i => i)
|
||||
.Select(g => new ItemCountDistribution
|
||||
{
|
||||
Count = g.Key,
|
||||
RelativeProbability = g.Count()
|
||||
}).ToList();
|
||||
|
||||
static_loot_distribution[typeId].ItemDistribution = CreateItemDistribution(container_counts_selected);
|
||||
}
|
||||
// Key = containers tpl, value = items + count weights
|
||||
allMapsStaticLootDisto.TryAdd(mapName, static_loot_distribution);
|
||||
|
||||
}
|
||||
|
||||
return static_loot_distribution;
|
||||
return allMapsStaticLootDisto;
|
||||
|
||||
//var static_loot_distribution = new Dictionary<string, StaticItemDistribution>();
|
||||
//var uniqueContainerTypeIds = Enumerable.Distinct((from ci in container_counts
|
||||
// select ci.Type).ToList());
|
||||
|
||||
//foreach (var typeId in uniqueContainerTypeIds)
|
||||
//{
|
||||
// var container_counts_selected = (from ci in container_counts
|
||||
// where ci.Type == typeId
|
||||
// select ci).ToList();
|
||||
|
||||
// // Get array of all times a count of items was found in container
|
||||
// List<int> itemCountsInContainer = GetCountOfItemsInContainer(container_counts_selected);
|
||||
|
||||
// // Create structure to hold item count + weight that it will be picked
|
||||
// // Group same counts together
|
||||
// static_loot_distribution[typeId] = new StaticItemDistribution();
|
||||
// static_loot_distribution[typeId].ItemCountDistribution = itemCountsInContainer.GroupBy(i => i)
|
||||
// .Select(g => new ItemCountDistribution
|
||||
// {
|
||||
// Count = g.Key,
|
||||
// RelativeProbability = g.Count()
|
||||
// }).ToList();
|
||||
|
||||
// static_loot_distribution[typeId].ItemDistribution = CreateItemDistribution(container_counts_selected);
|
||||
//}
|
||||
//// Key = containers tpl, value = items + count weights
|
||||
//return static_loot_distribution;
|
||||
}
|
||||
|
||||
private static List<StaticDistribution> CreateItemDistribution(List<PreProcessedStaticLoot> container_counts_selected)
|
||||
{
|
||||
// TODO: Change for different algo that splits items per parent once parentid = containerid, then compose
|
||||
// TODO: key and finally create distribution based on composed Id instead
|
||||
var itemsHitCounts = new Dictionary<string, int>();
|
||||
foreach (var ci in container_counts_selected)
|
||||
{
|
||||
foreach (var cii in ci.Items.Where(cii => cii.ParentId == ci.ContainerId))
|
||||
{
|
||||
if (itemsHitCounts.ContainsKey(cii.Tpl))
|
||||
itemsHitCounts[cii.Tpl] += 1;
|
||||
else
|
||||
itemsHitCounts[cii.Tpl] = 1;
|
||||
}
|
||||
}
|
||||
|
||||
// WIll create array of objects that have a tpl + relative probability weight value
|
||||
return itemsHitCounts.Select(v => new StaticDistribution
|
||||
{
|
||||
Tpl = v.Key,
|
||||
RelativeProbability = v.Value
|
||||
}).ToList();
|
||||
}
|
||||
|
||||
private static List<int> GetCountOfItemsInContainer(List<PreProcessedStaticLoot> container_counts_selected)
|
||||
{
|
||||
var itemCountsInContainer = new List<int>();
|
||||
foreach (var containerWithItems in container_counts_selected)
|
||||
{
|
||||
// Only count item if its parent is the container, only root items are counted (not mod/attachment items)
|
||||
itemCountsInContainer.Add((from cii in containerWithItems.Items
|
||||
where cii.ParentId == containerWithItems.ContainerId
|
||||
select cii).ToList().Count);
|
||||
}
|
||||
|
||||
return itemCountsInContainer;
|
||||
}
|
||||
}
|
@ -39,7 +39,7 @@ public class JsonFileIntakeReader : IIntakeReader
|
||||
{
|
||||
basicInfo = new BasicInfo
|
||||
{
|
||||
Map = fi.Data.Name,
|
||||
Map = fi.Data.Id.ToLower(),
|
||||
FileHash = ProcessorUtil.HashFile(fileData),
|
||||
Data = fi,
|
||||
Date = date.Value,
|
||||
@ -48,10 +48,16 @@ public class JsonFileIntakeReader : IIntakeReader
|
||||
_totalMapDumpsCounter[fi.Data.Name] += 1;
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
|
||||
LoggerFactory.GetInstance().Log($"File {file} fully read, returning data", LogLevel.Debug);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Map dump limit reached, exit
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Debug))
|
||||
LoggerFactory.GetInstance().Log($"Ignoring file {file} as the file cap for map {fi.Data.Name} has been reached", LogLevel.Debug);
|
||||
LoggerFactory.GetInstance().Log($"Ignoring file {file} as the file cap for map {fi.Data.Id} has been reached", LogLevel.Debug);
|
||||
basicInfo = null;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (LoggerFactory.GetInstance().CanBeLogged(LogLevel.Warning))
|
||||
|
@ -1,7 +1,9 @@
|
||||
using LootDumpProcessor.Model.Output;
|
||||
using LootDumpProcessor.Model.Output.LooseLoot;
|
||||
using LootDumpProcessor.Model.Output.StaticContainer;
|
||||
using LootDumpProcessor.Model.Processing;
|
||||
using LootDumpProcessor.Serializers.Json;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace LootDumpProcessor.Process.Writer;
|
||||
|
||||
@ -45,30 +47,45 @@ public class FileWriter : IWriter
|
||||
var looseLootData = (Dictionary<string, LooseLootRoot>)data;
|
||||
foreach (var (key, value) in looseLootData)
|
||||
{
|
||||
foreach (var s in LootDumpProcessorContext.GetDirectoryMappings()[key].Name)
|
||||
{
|
||||
if (!Directory.Exists($@"{_outputPath}\locations\{s}"))
|
||||
Directory.CreateDirectory($@"{_outputPath}\locations\{s}");
|
||||
File.WriteAllText($@"{_outputPath}\locations\{s}\looseLoot.json",
|
||||
_jsonSerializer.Serialize(value));
|
||||
}
|
||||
if (!Directory.Exists($@"{_outputPath}\locations\{key}"))
|
||||
Directory.CreateDirectory($@"{_outputPath}\locations\{key}");
|
||||
File.WriteAllText($@"{_outputPath}\locations\{key}\looseLoot.json",
|
||||
_jsonSerializer.Serialize(value));
|
||||
}
|
||||
|
||||
break;
|
||||
case OutputFileType.StaticContainer:
|
||||
var staticContainer = (Dictionary<string, MapStaticLoot>)data;
|
||||
File.WriteAllText($@"{_outputPath}\loot\staticContainers.json",
|
||||
_jsonSerializer.Serialize(staticContainer));
|
||||
foreach (var (key, value) in staticContainer)
|
||||
{
|
||||
if (!Directory.Exists($@"{_outputPath}\locations\{key}"))
|
||||
Directory.CreateDirectory($@"{_outputPath}\locations\{key}");
|
||||
File.WriteAllText($@"{_outputPath}\locations\{key}\staticContainers.json",
|
||||
_jsonSerializer.Serialize(value));
|
||||
}
|
||||
|
||||
break;
|
||||
case OutputFileType.StaticLoot:
|
||||
var staticLoot = (Dictionary<string, StaticItemDistribution>)data;
|
||||
File.WriteAllText($@"{_outputPath}\loot\staticLoot.json",
|
||||
_jsonSerializer.Serialize(staticLoot));
|
||||
var staticLootData = (Dictionary<string, Dictionary<string, StaticItemDistribution>>)data;
|
||||
foreach (var (key, value) in staticLootData)
|
||||
{
|
||||
if (!Directory.Exists($@"{_outputPath}\locations\{key}"))
|
||||
Directory.CreateDirectory($@"{_outputPath}\locations\{key}");
|
||||
File.WriteAllText($@"{_outputPath}\locations\{key}\staticLoot.json",
|
||||
_jsonSerializer.Serialize(value));
|
||||
}
|
||||
|
||||
break;
|
||||
case OutputFileType.StaticAmmo:
|
||||
var staticAmmo = (Dictionary<string, List<AmmoDistribution>>)data;
|
||||
File.WriteAllText($@"{_outputPath}\loot\staticAmmo.json",
|
||||
_jsonSerializer.Serialize(staticAmmo));
|
||||
var staticAmmo = (Dictionary<string, Dictionary<string, List<AmmoDistribution>>>)data;
|
||||
foreach (var (key, value) in staticAmmo)
|
||||
{
|
||||
if (!Directory.Exists($@"{_outputPath}\locations\{key}"))
|
||||
Directory.CreateDirectory($@"{_outputPath}\locations\{key}");
|
||||
File.WriteAllText($@"{_outputPath}\locations\{key}\staticAmmo.json",
|
||||
_jsonSerializer.Serialize(value));
|
||||
}
|
||||
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException(nameof(type), type, null);
|
||||
|
Loading…
x
Reference in New Issue
Block a user