introduce configuration possibility exclude dumps older than a threshold date map specific in the map_directory_mapping.yaml
This commit is contained in:
parent
5e7f7d5b8c
commit
0b58147250
@ -31,7 +31,7 @@ with open(f'config/{config["config"]["loose"]["forced_yaml"]}', 'r') as fin:
|
|||||||
FORCED_LOOSE = yaml.load(fin, Loader=yaml.FullLoader)
|
FORCED_LOOSE = yaml.load(fin, Loader=yaml.FullLoader)
|
||||||
|
|
||||||
with open(f'config/{config["server"]["map_directory_mapping_yaml"]}', 'r') as fin:
|
with open(f'config/{config["server"]["map_directory_mapping_yaml"]}', 'r') as fin:
|
||||||
loose_loot_dir_map = yaml.load(fin, Loader=yaml.FullLoader)
|
directory_mapping = yaml.load(fin, Loader=yaml.FullLoader)
|
||||||
|
|
||||||
tarkov_server_dir = Path(config["server"]["location"])
|
tarkov_server_dir = Path(config["server"]["location"])
|
||||||
loot_dump_archive = Path(config["archives"]["target_folder"]) / config["archives"]["loot_filename"]
|
loot_dump_archive = Path(config["archives"]["target_folder"]) / config["archives"]["loot_filename"]
|
||||||
@ -51,11 +51,19 @@ def parse_dumps(input):
|
|||||||
text = bio.read()
|
text = bio.read()
|
||||||
|
|
||||||
fi = json.loads(text)
|
fi = json.loads(text)
|
||||||
|
threshold_date = datetime.datetime.strptime(
|
||||||
|
directory_mapping[fi["data"]["Name"]]["threshold_date"],
|
||||||
|
"%Y-%m-%d"
|
||||||
|
)
|
||||||
|
|
||||||
datestr = fname.split(".getLocalloot_")[-1].split(".")[0]
|
datestr = fname.split(".getLocalloot_")[-1].split(".")[0]
|
||||||
date = datetime.datetime.strptime(datestr, "%Y-%m-%d_%H-%M-%S")
|
date = datetime.datetime.strptime(datestr, "%Y-%m-%d_%H-%M-%S")
|
||||||
|
|
||||||
if fi["data"] is not None and fi["data"]["Name"] not in blacklist:
|
if (
|
||||||
|
fi["data"] is not None
|
||||||
|
and fi["data"]["Name"] not in blacklist
|
||||||
|
and date > threshold_date
|
||||||
|
):
|
||||||
basic_info = {
|
basic_info = {
|
||||||
"map": fi["data"]["Name"],
|
"map": fi["data"]["Name"],
|
||||||
"filehash": hash_file(text),
|
"filehash": hash_file(text),
|
||||||
@ -213,7 +221,7 @@ def main():
|
|||||||
print(f"took {time.time() - time_start} seconds")
|
print(f"took {time.time() - time_start} seconds")
|
||||||
|
|
||||||
for mi, cnt in map_counts.items():
|
for mi, cnt in map_counts.items():
|
||||||
for mapdir in loose_loot_dir_map[mi]:
|
for mapdir in directory_mapping[mi]["name"]:
|
||||||
with open(loose_loot_dir / mapdir / "looseLoot.json", "w") as fout:
|
with open(loose_loot_dir / mapdir / "looseLoot.json", "w") as fout:
|
||||||
json.dump(loose_loot_distribution[mi], fout, indent=1)
|
json.dump(loose_loot_distribution[mi], fout, indent=1)
|
||||||
|
|
||||||
|
@ -1,18 +1,34 @@
|
|||||||
---
|
---
|
||||||
Customs:
|
Customs:
|
||||||
|
name:
|
||||||
- bigmap
|
- bigmap
|
||||||
|
threshold_date: "2022-06-27"
|
||||||
Factory:
|
Factory:
|
||||||
|
name:
|
||||||
- factory4_day
|
- factory4_day
|
||||||
- factory4_night
|
- factory4_night
|
||||||
|
threshold_date: "2022-06-27"
|
||||||
Interchange:
|
Interchange:
|
||||||
|
name:
|
||||||
- interchange
|
- interchange
|
||||||
|
threshold_date: "2022-06-27"
|
||||||
Laboratory:
|
Laboratory:
|
||||||
|
name:
|
||||||
- laboratory
|
- laboratory
|
||||||
|
threshold_date: "2020-01-01"
|
||||||
Lighthouse:
|
Lighthouse:
|
||||||
|
name:
|
||||||
- lighthouse
|
- lighthouse
|
||||||
|
threshold_date: "2022-06-27"
|
||||||
ReserveBase:
|
ReserveBase:
|
||||||
|
name:
|
||||||
- rezervbase
|
- rezervbase
|
||||||
|
threshold_date: "2022-06-27"
|
||||||
Shoreline:
|
Shoreline:
|
||||||
|
name:
|
||||||
- shoreline
|
- shoreline
|
||||||
|
threshold_date: "2022-06-27"
|
||||||
Woods:
|
Woods:
|
||||||
|
name:
|
||||||
- woods
|
- woods
|
||||||
|
threshold_date: "2022-06-27"
|
Loading…
x
Reference in New Issue
Block a user