Make optimize.py detected deleted folders and files
This commit is contained in:
parent
26fd32615c
commit
dce2c8254b
|
@ -4,58 +4,70 @@ from PIL import Image
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
def compute_hash(file_path):
|
def compute_hash(file_path):
|
||||||
# Compute the hash of the file content
|
|
||||||
with open(file_path, 'rb') as f:
|
with open(file_path, 'rb') as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
hash_value = hashlib.md5(content).hexdigest()
|
hash_value = hashlib.md5(content).hexdigest()
|
||||||
return hash_value
|
return hash_value
|
||||||
|
|
||||||
def compress_and_save(source_dir, target_dir, hash_dir):
|
def compress_and_save(source_dir, target_dir, hash_dir):
|
||||||
# Ensure the target directory exists
|
|
||||||
if not os.path.exists(target_dir):
|
if not os.path.exists(target_dir):
|
||||||
os.makedirs(target_dir)
|
os.makedirs(target_dir)
|
||||||
|
|
||||||
# Ensure the hash directory exists
|
|
||||||
if not os.path.exists(hash_dir):
|
if not os.path.exists(hash_dir):
|
||||||
os.makedirs(hash_dir)
|
os.makedirs(hash_dir)
|
||||||
|
|
||||||
|
processed_files = set()
|
||||||
|
|
||||||
for root, dirs, files in os.walk(source_dir):
|
for root, dirs, files in os.walk(source_dir):
|
||||||
for file_name in files:
|
for file_name in files:
|
||||||
if file_name.lower().endswith('.png'):
|
source_path = os.path.join(root, file_name)
|
||||||
source_path = os.path.join(root, file_name)
|
relative_path = os.path.relpath(source_path, source_dir)
|
||||||
relative_path = os.path.relpath(source_path, source_dir)
|
target_path = os.path.join(target_dir, relative_path)
|
||||||
target_path = os.path.join(target_dir, relative_path)
|
hash_file_path = os.path.join(hash_dir, relative_path + '.hash')
|
||||||
|
|
||||||
# Compute the hash of the original file
|
processed_files.add(relative_path)
|
||||||
original_hash = compute_hash(source_path)
|
|
||||||
# Create the corresponding hash file path
|
original_hash = compute_hash(source_path)
|
||||||
hash_file_path = os.path.join(hash_dir, relative_path + '.hash')
|
|
||||||
# Check if the hash has changed
|
if not os.path.exists(hash_file_path) or open(hash_file_path, 'r').read() != original_hash:
|
||||||
if not os.path.exists(hash_file_path) or open(hash_file_path, 'r').read() != original_hash:
|
os.makedirs(os.path.dirname(target_path), exist_ok=True)
|
||||||
# Create the directory structure in the target directory if it doesn't exist
|
|
||||||
os.makedirs(os.path.dirname(target_path), exist_ok=True)
|
if file_name.lower().endswith('.png'):
|
||||||
with Image.open(source_path) as img:
|
with Image.open(source_path) as img:
|
||||||
img = img.convert('RGBA')
|
img = img.convert('RGBA')
|
||||||
img.save(target_path, format='PNG', optimize=True)
|
img.save(target_path, format='PNG', optimize=True)
|
||||||
os.makedirs(os.path.dirname(hash_file_path), exist_ok=True)
|
else:
|
||||||
with open(hash_file_path, 'w') as hash_file:
|
shutil.copyfile(source_path, target_path)
|
||||||
hash_file.write(original_hash)
|
|
||||||
else:
|
os.makedirs(os.path.dirname(hash_file_path), exist_ok=True)
|
||||||
# Copy non-PNG files to the target directory
|
with open(hash_file_path, 'w') as hash_file:
|
||||||
source_path = os.path.join(root, file_name)
|
hash_file.write(original_hash)
|
||||||
relative_path = os.path.relpath(source_path, source_dir)
|
|
||||||
target_path = os.path.join(target_dir, relative_path)
|
for root, dirs, files in os.walk(target_dir, topdown=False):
|
||||||
os.makedirs(os.path.dirname(target_path), exist_ok=True)
|
for file_name in files:
|
||||||
shutil.copyfile(source_path, target_path)
|
file_path = os.path.join(root, file_name)
|
||||||
|
relative_path = os.path.relpath(file_path, target_dir)
|
||||||
|
if relative_path not in processed_files:
|
||||||
|
os.remove(file_path)
|
||||||
|
hash_file_path = os.path.join(hash_dir, relative_path + '.hash')
|
||||||
|
if os.path.exists(hash_file_path):
|
||||||
|
os.remove(hash_file_path)
|
||||||
|
|
||||||
|
for dir_name in dirs:
|
||||||
|
dir_path = os.path.join(root, dir_name)
|
||||||
|
if not os.listdir(dir_path):
|
||||||
|
os.rmdir(dir_path)
|
||||||
|
|
||||||
|
for root, dirs, files in os.walk(hash_dir, topdown=False):
|
||||||
|
for dir_name in dirs:
|
||||||
|
dir_path = os.path.join(root, dir_name)
|
||||||
|
if not os.listdir(dir_path):
|
||||||
|
os.rmdir(dir_path)
|
||||||
|
|
||||||
# Replace 'source_directory_path' with the path to your directory containing PNG files
|
|
||||||
source_directory_path = 'resources'
|
source_directory_path = 'resources'
|
||||||
|
|
||||||
# Replace 'target_directory_path' with the path to the directory where compressed images will be saved
|
|
||||||
target_directory_path = 'optimizedResources'
|
target_directory_path = 'optimizedResources'
|
||||||
|
|
||||||
# Replace 'hash_directory_path' with the path to the directory where hashes will be stored
|
|
||||||
hash_directory_path = 'hashes'
|
hash_directory_path = 'hashes'
|
||||||
|
|
||||||
print("Optimizing images...")
|
print("Optimizing images...")
|
||||||
compress_and_save(source_directory_path, target_directory_path, hash_directory_path)
|
compress_and_save(source_directory_path, target_directory_path, hash_directory_path)
|
||||||
|
print("Optimization complete.")
|
Loading…
Reference in New Issue