Compare commits
1 Commits
Author | SHA1 | Date | |
---|---|---|---|
3b883daedb |
3
.gitignore
vendored
3
.gitignore
vendored
@ -12,6 +12,5 @@ item_cache.json
|
||||
/bkp/
|
||||
|
||||
# Ignore build files
|
||||
/dist/
|
||||
/build/
|
||||
app.spec
|
||||
pyoxidizer.bzl
|
38
app.py
38
app.py
@ -5,11 +5,18 @@ import logging
|
||||
import os
|
||||
import traceback
|
||||
import time
|
||||
import sys
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
# Determine logging level
|
||||
if '--debug' in sys.argv:
|
||||
logging_level = logging.DEBUG
|
||||
else:
|
||||
logging_level = logging.INFO
|
||||
|
||||
# Setup logging to file
|
||||
logging.basicConfig(filename='log.log', level=logging.DEBUG, format='%(asctime)s %(levelname)s:%(message)s')
|
||||
logging.basicConfig(filename='log.log', level=logging_level, format='%(asctime)s %(levelname)s:%(message)s')
|
||||
|
||||
# File paths
|
||||
ASSORT_FILE_PATH = 'assort.json'
|
||||
@ -21,7 +28,7 @@ RUBLE_TPL_ID = '5449016a4bdc2d6f028b456f'
|
||||
try:
|
||||
with open(ASSORT_FILE_PATH) as f:
|
||||
assort_data = json.load(f)
|
||||
logging.debug("Assort data loaded successfully")
|
||||
logging.info("Assort data loaded successfully")
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading assort data: {e}")
|
||||
logging.error(traceback.format_exc())
|
||||
@ -29,7 +36,7 @@ except Exception as e:
|
||||
try:
|
||||
with open(QUEST_ASSORT_FILE_PATH) as f:
|
||||
quest_assort_data = json.load(f)
|
||||
logging.debug("Quest assort data loaded successfully")
|
||||
logging.info("Quest assort data loaded successfully")
|
||||
except json.JSONDecodeError as e:
|
||||
logging.error(f"Error loading quest assort data (malformed JSON): {e}")
|
||||
logging.error(traceback.format_exc())
|
||||
@ -46,14 +53,14 @@ def load_item_cache():
|
||||
try:
|
||||
with open(CACHE_FILE_PATH) as f:
|
||||
item_cache = json.load(f)
|
||||
logging.debug("Item cache loaded successfully")
|
||||
logging.info("Item cache loaded successfully")
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading item cache: {e}")
|
||||
logging.error(traceback.format_exc())
|
||||
item_cache = {}
|
||||
else:
|
||||
item_cache = {}
|
||||
logging.debug("Initialized empty item cache")
|
||||
logging.info("Initialized empty item cache")
|
||||
|
||||
load_item_cache()
|
||||
|
||||
@ -88,15 +95,15 @@ def get_main_items_with_details(assort_data):
|
||||
item_copy['quest_requirement'] = get_quest_requirement(item_copy['_id'])
|
||||
item_copy['offer_name'] = item_copy['_id'] # Add offer name
|
||||
items.append(item_copy)
|
||||
logging.debug(f"Main items with details: {items}")
|
||||
logging.info(f"Main items with details: {items}")
|
||||
return items
|
||||
|
||||
def get_item_details_cached(tpl):
|
||||
if tpl in item_cache:
|
||||
logging.debug(f"Cache hit for tpl {tpl}")
|
||||
logging.info(f"Cache hit for tpl {tpl}")
|
||||
return item_cache[tpl]
|
||||
else:
|
||||
logging.debug(f"Cache miss for tpl {tpl}, fetching from API")
|
||||
logging.info(f"Cache miss for tpl {tpl}, fetching from API")
|
||||
item_details = get_items_details([tpl])
|
||||
if tpl in item_details:
|
||||
item_cache[tpl] = item_details[tpl]
|
||||
@ -112,7 +119,7 @@ def get_items_details(tpls):
|
||||
try:
|
||||
response = requests.post(TARKOV_API_URL, json={'query': query})
|
||||
data = response.json()
|
||||
logging.debug(f"Item details for tpls {tpls}: {data}")
|
||||
logging.info(f"Item details for tpls {tpls}: {data}")
|
||||
return {tpl: data['data'][f'item{index}'] for index, tpl in enumerate(tpls)}
|
||||
except Exception as e:
|
||||
logging.error(f"Error fetching item details for tpls {tpls}: {e}")
|
||||
@ -131,7 +138,7 @@ def get_item_parts_with_details(parent_id, assort_data):
|
||||
sub_parts.append(part_copy)
|
||||
return sub_parts
|
||||
parts = fetch_parts(parent_id)
|
||||
logging.debug(f"Parts for parent_id {parent_id}: {parts}")
|
||||
logging.info(f"Parts for parent_id {parent_id}: {parts}")
|
||||
return parts
|
||||
|
||||
def get_barter_scheme_with_details(item_id, assort_data):
|
||||
@ -145,7 +152,7 @@ def get_barter_scheme_with_details(item_id, assort_data):
|
||||
req_copy['details'] = req_details
|
||||
scheme_details.append(req_copy)
|
||||
barter_scheme.append(scheme_details)
|
||||
logging.debug(f"Barter scheme for item_id {item_id}: {barter_scheme}")
|
||||
logging.info(f"Barter scheme for item_id {item_id}: {barter_scheme}")
|
||||
return barter_scheme
|
||||
|
||||
def get_quest_requirement(item_id):
|
||||
@ -254,11 +261,11 @@ def save_assort_data():
|
||||
backup_file_path = os.path.join('./bkp', f"assort_backup_{int(time.time())}.json")
|
||||
if os.path.exists(ASSORT_FILE_PATH):
|
||||
os.rename(ASSORT_FILE_PATH, backup_file_path)
|
||||
logging.debug(f"Backup of assort data created at {backup_file_path}")
|
||||
logging.info(f"Backup of assort data created at {backup_file_path}")
|
||||
|
||||
with open(ASSORT_FILE_PATH, 'w') as f:
|
||||
json.dump(cleaned_assort_data, f, indent=2) # Use 2 spaces for indentation
|
||||
logging.debug("Assort data saved successfully")
|
||||
logging.info("Assort data saved successfully")
|
||||
except Exception as e:
|
||||
logging.error(f"Error saving assort data: {e}")
|
||||
logging.error(traceback.format_exc())
|
||||
@ -267,7 +274,7 @@ def save_item_cache():
|
||||
try:
|
||||
with open(CACHE_FILE_PATH, 'w') as f:
|
||||
json.dump(item_cache, f, indent=2) # Use 2 spaces for indentation
|
||||
logging.debug("Item cache saved successfully")
|
||||
logging.info("Item cache saved successfully")
|
||||
except Exception as e:
|
||||
logging.error(f"Error saving item cache: {e}")
|
||||
logging.error(traceback.format_exc())
|
||||
@ -316,4 +323,5 @@ def start_build_cache():
|
||||
return jsonify({"status": "error"}), 500
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True)
|
||||
print("Webapp is hosted at http://127.0.0.1:5000")
|
||||
app.run(debug='--debug' in sys.argv)
|
Loading…
x
Reference in New Issue
Block a user