diff --git a/myscripts/working_tool.py b/myscripts/working_tool.py index 039f580..2472cbb 100644 --- a/myscripts/working_tool.py +++ b/myscripts/working_tool.py @@ -6,8 +6,15 @@ import requests import tempfile import json -# 存储上次上传的信息 -LAST_UPLOAD_FILE = '.last_upload.json' +# 缓存目录和文件配置 +CACHE_DIR = '.working_cache' +UPLOAD_CACHE_FILE = os.path.join(CACHE_DIR, 'upload.json') +DOWNLOAD_CACHE_FILE = os.path.join(CACHE_DIR, 'download.json') + +# 确保缓存目录存在 +def ensure_cache_dir(): + """确保缓存目录存在""" + os.makedirs(CACHE_DIR, exist_ok=True) def get_file_hash(file_path): @@ -69,8 +76,8 @@ def check_files_modified(working_files): current_hashes[file_path] = get_file_hash(file_path) # 读取上次的哈希值 - if os.path.exists(LAST_UPLOAD_FILE): - with open(LAST_UPLOAD_FILE, 'r') as f: + if os.path.exists(UPLOAD_CACHE_FILE): + with open(UPLOAD_CACHE_FILE, 'r') as f: last_data = json.load(f) # 比较哈希值 @@ -129,9 +136,9 @@ def download_and_extract(download_url, extract_dir='.', password=None): os.makedirs(extract_dir, exist_ok=True) # 检查是否与最后一次下载的URL相同 - last_upload_file = LAST_UPLOAD_FILE - if os.path.exists(last_upload_file): - with open(last_upload_file, 'r') as f: + ensure_cache_dir() + if os.path.exists(DOWNLOAD_CACHE_FILE): + with open(DOWNLOAD_CACHE_FILE, 'r') as f: last_data = json.load(f) if last_data.get('download_url') == download_url: @@ -196,6 +203,11 @@ def download_and_extract(download_url, extract_dir='.', password=None): with open(download_info_file, 'w') as f: json.dump(download_info, f, indent=2) + # 保存下载信息到全局缓存 + ensure_cache_dir() + with open(DOWNLOAD_CACHE_FILE, 'w') as f: + json.dump(download_info, f, indent=2) + print(f"Download information saved to {download_info_file}") return True except (requests.exceptions.RequestException, zipfile.BadZipFile, IOError, Exception) as e: @@ -276,7 +288,8 @@ def upload_working_files(password=None): current_hashes[file_path] = get_file_hash(file_path) # 保存本次上传信息 - with open(LAST_UPLOAD_FILE, 'w') as f: + ensure_cache_dir() + with open(UPLOAD_CACHE_FILE, 'w') as f: json.dump({ 'hashes': current_hashes, 'download_url': download_url,