import json import yaml import time import py7zr import hashlib import datetime from tqdm import tqdm from pathlib import Path from concurrent import futures from collections import defaultdict from itertools import groupby from src.tarkov_items import TarkovItems from src.static_loot import preprocess_staticloot, StaticLootProcessor from src.loose_loot import preprocess_looseloot, LooseLootProcessor with open('config/config.yaml', 'r') as fin: config = yaml.load(fin, Loader=yaml.FullLoader) with open(f'config/{config["config"]["static"]["forced_yaml"]}', 'r') as fin: forced_static_yaml = yaml.load(fin, Loader=yaml.FullLoader) STATIC_WEAPON_IDS = forced_static_yaml['static_weapon_ids'] FORCED_STATIC = forced_static_yaml['forced_items'] with open(f'config/{config["config"]["loose"]["forced_yaml"]}', 'r') as fin: FORCED_LOOSE = yaml.load(fin, Loader=yaml.FullLoader) with open(f'config/{config["server"]["map_directory_mapping_yaml"]}', 'r') as fin: directory_mapping = yaml.load(fin, Loader=yaml.FullLoader) tarkov_server_dir = Path(config["server"]["location"]) loot_dump_archive = Path(config["archives"]["target_folder"]) / config["archives"][ "loot_filename"] def hash_file(text): sha256 = hashlib.sha256() sha256.update(text) return sha256.hexdigest() def parse_dumps(input): blacklist = ["Hideout"] fname = input[0] bio = input[1] text = bio.read() fi = json.loads(text) datestr = fname.split(".getLocalloot_")[-1].split(".")[0] date = datetime.datetime.strptime(datestr, "%Y-%m-%d_%H-%M-%S") if ( fi["data"] is not None and fi["data"]["Name"] not in blacklist and date > datetime.datetime.strptime( directory_mapping[fi["data"]["Name"]]["threshold_date"], "%Y-%m-%d" ) ): basic_info = { "map": fi["data"]["Name"], "filehash": hash_file(text), "date": date, "fname": fname } looseloot = [li for li in fi["data"]["Loot"] if not li["IsStatic"]] staticloot = [li for li in fi["data"]["Loot"] if li["IsStatic"]] looseloot_processed = preprocess_looseloot(looseloot) containers = preprocess_staticloot(staticloot, STATIC_WEAPON_IDS) return { "basic_info": basic_info, "looseloot": looseloot_processed, "containers": containers } else: return None def main(): tarkov_items = TarkovItems( items=tarkov_server_dir / "project/assets/database/templates/items.json", handbook=tarkov_server_dir / "project/assets/database/templates/handbook.json", locales=tarkov_server_dir / "project/assets/database/locales/global/en.json" ) loose_loot_dir = tarkov_server_dir / "project/assets/database/locations" static_loot_dir = tarkov_server_dir / "project/assets/database/loot" print("Open dump archive", end="; ") map_files = {} gather_loot_results = [] time_start = time.time() with py7zr.SevenZipFile(loot_dump_archive, 'r') as archive: archive_files = set(archive.getnames()) with futures.ProcessPoolExecutor() as executor: print("Gathering dumps") for result in list( tqdm(executor.map(parse_dumps, archive.read(archive_files).items()), total=len(archive_files))): if result is not None: gather_loot_results.append(result) # get the newest dump per map mapi = result["basic_info"]["map"] if mapi not in map_files: map_files[mapi] = ( result["basic_info"]["date"], result["basic_info"]["fname"] ) else: if result["basic_info"]["date"] > map_files[mapi][0]: map_files[mapi] = ( result["basic_info"]["date"], result["basic_info"]["fname"] ) print(f"Reading dumps took {time.time() - time_start} seconds.") dump_count = len(gather_loot_results) # remove duplicate dumps time_start = time.time() gather_loot_results = sorted(gather_loot_results, key=lambda x: x["basic_info"]["filehash"]) gather_loot_results_unique = [] for _, g in groupby(gather_loot_results, key=lambda x: x["basic_info"]["filehash"]): g = list(g) if len(g) > 1: # print(f"Duplicate dumps: {', '.join([gi['filename'] for gi in g])}") pass gather_loot_results_unique.append(g[0]) del gather_loot_results dump_count_unique = len(gather_loot_results_unique) print( f"Removing duplicates took {time.time() - time_start} seconds: {dump_count - dump_count_unique} / {dump_count}") # Map Reduce print("Map reducing dumps", end="; ") time_start = time.time() gather_loot_results_unique = sorted(gather_loot_results_unique, key=lambda x: x["basic_info"]["map"]) looseloot_counts = {} container_counts = [] map_counts = {} for mapi, g in groupby(gather_loot_results_unique, key=lambda x: x["basic_info"]["map"]): g = list(g) map_counts[mapi] = len(g) looseloot_counts[mapi] = {} looseloot_counts[mapi]["counts"] = defaultdict(int) looseloot_counts[mapi]["items"] = defaultdict(list) looseloot_counts[mapi]["itemproperties"] = defaultdict(list) looseloot_counts[mapi]["map_spawnpoint_count"] = [] for gi in g: container_counts += gi["containers"] for k, v in gi["looseloot"]["counts"].items(): looseloot_counts[mapi]["counts"][k] += v for k, v in gi["looseloot"]["items"].items(): looseloot_counts[mapi]["items"][k] += v for k, v in gi["looseloot"]["itemproperties"].items(): looseloot_counts[mapi]["itemproperties"][k] += v looseloot_counts[mapi]["map_spawnpoint_count"] += [ gi["looseloot"]["map_spawnpoint_count"]] del gather_loot_results_unique print(f"took {time.time() - time_start} seconds.") static_loot_processor = StaticLootProcessor( tarkov_items=tarkov_items, static_weapon_ids=STATIC_WEAPON_IDS, forced_static=FORCED_STATIC ) # create static containers (containers per map, forced loot in map, static weapons in map) print("Create \"static containers\"", end='; ') time_start = time.time() static_containers = {} with py7zr.SevenZipFile(loot_dump_archive, 'r') as archive: targets = [datename_tuple[1] for _, datename_tuple in map_files.items()] targets = sorted(targets) for fname, bio in archive.read(targets).items(): mapi, static_containers_mi = static_loot_processor.create_static_containers( bio) static_containers[mapi] = static_containers_mi print(f"took {time.time() - time_start} seconds.") with open(static_loot_dir / "staticContainers.json", "w") as fout: json.dump(static_containers, fout, indent=1) # Ammo distribution time_start = time.time() print(f"Creating \"ammo\" distribution", end="; ") ammo_distribution = static_loot_processor.create_ammo_distribution(container_counts) print(f"took {time.time() - time_start} seconds.") with open(static_loot_dir / "staticAmmo.json", "w") as fout: json.dump(ammo_distribution, fout, indent=1) # Static loot distribution time_start = time.time() print(f"Creating \"static container\"", end='; ') static_loot_distribution = static_loot_processor.create_static_loot_distribution( container_counts) print(f"took {time.time() - time_start} seconds.") with open(static_loot_dir / "staticLoot.json", 'w') as fout: json.dump(static_loot_distribution, fout, indent=1) # Loose loot distribution loose_loot_processor = LooseLootProcessor( tarkov_items=tarkov_items, FORCED_LOOSE=FORCED_LOOSE ) time_start = time.time() print(f"Calculating \"loose loot\" distribution", end='; ') loose_loot_distribution = loose_loot_processor.create_loose_loot_distribution( map_counts, looseloot_counts) print(f"took {time.time() - time_start} seconds") for mi, cnt in map_counts.items(): for mapdir in directory_mapping[mi]["name"]: with open(loose_loot_dir / mapdir / "looseLoot.json", "w") as fout: json.dump(loose_loot_distribution[mi], fout, indent=1) if __name__ == '__main__': main()