Created
September 4, 2024 17:30
-
-
Save reagle/ede21915f00fa8c133c5d29178af930d to your computer and use it in GitHub Desktop.
Wrapper script for importing, copying, and renaming photos, and creating gallery.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
"""Wrapper script for importing, copying, and renaming photos, and creating gallery.""" | |
__author__ = "Joseph Reagle" | |
__copyright__ = "Copyright (C) 2024 Joseph Reagle" | |
__license__ = "GLPv3" | |
__version__ = "1.0" | |
import logging as log | |
import re | |
import shutil | |
import subprocess | |
import tempfile | |
import time | |
from datetime import datetime | |
from pathlib import Path | |
import exiftool | |
from send2trash import send2trash | |
YEAR_MONTH = datetime.now().strftime("%Y/%m") | |
HOME = Path.home() | |
PHOTO_DIR = HOME / "f" / "photo" | |
CONVERTED_DIR = PHOTO_DIR / "converted" | |
GALLERY_DIR = PHOTO_DIR / "gallery" | |
HIRES_DIR = PHOTO_DIR / "hires" | |
MONTH_DIR = HIRES_DIR / YEAR_MONTH | |
IMPORT_DIR = PHOTO_DIR / "import" | |
SDCARD_PATH = Path("/Volumes/Untitled/") | |
SDCARD_IMG_PATH = SDCARD_PATH / "DCIM" | |
SDCARD_VID_PATH = SDCARD_PATH / "PRIVATE/M4ROOT/CLIP/" | |
def import_from_sd() -> None: | |
"""Import photos and videos from SD card to IMPORTED_DIR.""" | |
file_pattern = re.compile(r".*\.(jpg|arw|mp4)$", re.IGNORECASE) | |
exclude_pattern = re.compile(r".*(Trashes|THMBNL).*") | |
for file_path in SDCARD_PATH.rglob("*"): | |
if ( | |
file_path.is_file() | |
and file_pattern.match(file_path.name) | |
and not exclude_pattern.match(str(file_path)) | |
): | |
shutil.copy2(file_path, IMPORT_DIR / file_path.name) | |
log.info(f"copied {file_path.name=}") | |
for index, file_path in enumerate(IMPORT_DIR.iterdir()): | |
log.info(f"{file_path.name=}") | |
mtime = datetime.fromtimestamp(file_path.stat().st_mtime) | |
new_name = f"{mtime:%d-%H%M%S}_{index}{file_path.suffix.lower()}" | |
file_path.rename(IMPORT_DIR / new_name) | |
# LR_PATH = Path("/Applications/Adobe Lightroom Classic/Adobe Lightroom Classic.app") | |
# subprocess.run(["open", "-W", LR_PATH]) | |
def copy_to_hires() -> None: | |
"""Copy files in CONVERTED_DIR to hires monthly.""" | |
MONTH_DIR.mkdir(parents=True, exist_ok=True) | |
for file_path in CONVERTED_DIR.glob("*.*"): | |
file_path.rename(MONTH_DIR / file_path.name) | |
DK_PATH = Path("/Applications/digiKam.org/digikam.app") | |
subprocess.run(["open", "-W", DK_PATH]) | |
def safe_rename(file_path: Path, new_file_path: Path) -> Path: | |
"""Safely rename a file.""" | |
tmp_file = tempfile.NamedTemporaryFile( | |
prefix=file_path.name, dir=file_path.parent, delete=False | |
) | |
tmp_path = Path(tmp_file.name) | |
tmp_file.close() | |
log.info(f"renaming {file_path} to {tmp_path}") | |
file_path.rename(tmp_path) | |
while True: | |
if new_file_path.exists(): | |
stem = new_file_path.stem | |
suffix = new_file_path.suffix | |
if "_" in stem: | |
name, right_chunk = stem.rsplit("_", 1) | |
if right_chunk.isdigit(): | |
new_number = int(right_chunk) + 1 | |
new_file_path = new_file_path.with_name( | |
f"{name}_{new_number}{suffix}" | |
) | |
else: | |
new_file_path = new_file_path.with_name(f"{stem}_1{suffix}") | |
else: | |
new_file_path = new_file_path.with_name(f"{stem}_1{suffix}") | |
else: | |
break | |
log.info(f"renaming {tmp_path} to {new_file_path}") | |
tmp_path.rename(new_file_path) | |
return new_file_path | |
def ensure_list(value: str | list[str] | None) -> list[str]: | |
"""Convert None, a single string, or a list of strings to a list of strings. | |
Necessary because of inconsistent exiftool returns. | |
""" | |
match value: | |
case None: | |
return [] | |
case str(): | |
return [value] | |
case list(): | |
return value | |
case _: | |
raise ValueError(f"Unexpected type: {type(value)}") | |
def rename_media(files: list[Path]): | |
"""Rename images and videos using ExifTool.""" | |
DATE_FORMAT = "%d" if args.suppress_time else "%d-%H%M" | |
with exiftool.ExifToolHelper() as et: | |
for file_path in files: | |
log.info(f"trying get_metadata for {file_path}") | |
metadata = et.get_metadata(str(file_path))[0] | |
# Try to get the creation date | |
date_time = metadata.get("EXIF:DateTimeOriginal") or metadata.get( | |
"QuickTime:CreateDate" | |
) | |
if not date_time: | |
print(f"Skipping, no creation date found for {file_path.name}") | |
continue | |
# Format the date | |
formatted_date = time.strftime( | |
DATE_FORMAT, time.strptime(date_time, "%Y:%m:%d %H:%M:%S") | |
) | |
# Get keywords | |
keywords = ensure_list( | |
metadata.get("IPTC:Keywords") | |
or metadata.get("XMP:TagsList") | |
or metadata.get("QuickTime:Category") | |
) | |
if keywords: | |
keyword_string = "-" + "-".join(keywords) | |
else: | |
keyword_string = "" | |
print(f"Warning, no tags found; review `metadata` in {file_path.name}") | |
new_fn = f"{formatted_date}{keyword_string}{file_path.suffix.lower()}" | |
new_file_path = safe_rename(file_path, file_path.with_name(new_fn)) | |
new_file_path.chmod(0o644) | |
def create_gallery(): | |
"""Run sigal static gallery.""" | |
subprocess.run( | |
["sigal", "build", HIRES_DIR, GALLERY_DIR], check=True, cwd=PHOTO_DIR | |
) | |
def cleanup_files(): | |
"""Trash files on SD card, import, and converted.""" | |
for directory in [IMPORT_DIR, CONVERTED_DIR]: | |
for item in directory.glob("*"): | |
send2trash(item) | |
for directory in [SDCARD_IMG_PATH, SDCARD_VID_PATH]: | |
if directory.exists(): | |
shutil.rmtree(directory, ignore_errors=False) | |
if __name__ == "__main__": | |
import argparse | |
arg_parser = argparse.ArgumentParser( | |
description="Wrapper script for importing, copying, and renaming photos, and creating gallery." | |
) | |
arg_parser.add_argument("files", nargs="*", metavar="FILE") | |
arg_parser.add_argument( | |
"-i", | |
"--import-sd", | |
action="store_true", | |
default=False, | |
help="import files from SD card for development", | |
) | |
arg_parser.add_argument( | |
"-c", | |
"--copy", | |
action="store_true", | |
default=False, | |
help="copy developed files to hires folder", | |
) | |
arg_parser.add_argument( | |
"-r", | |
"--rename", | |
action="store_true", | |
default=False, | |
help="rename JPEG and MP4 files based on timestamp", | |
) | |
arg_parser.add_argument( | |
"-s", | |
"--suppress-time", | |
action="store_true", | |
default=False, | |
help="when renaming, use day only, suppressing time", | |
) | |
arg_parser.add_argument( | |
"-g", | |
"--gallery", | |
action="store_true", | |
default=False, | |
help="run sigal to generate gallery", | |
) | |
arg_parser.add_argument( | |
"-t", | |
"--trash", | |
action="store_true", | |
default=False, | |
help="trash files on SD card and in import", | |
) | |
arg_parser.add_argument( | |
"-L", | |
"--log-to-file", | |
action="store_true", | |
default=False, | |
help="log to file %(prog)s.log", | |
) | |
arg_parser.add_argument( | |
"-V", | |
"--verbose", | |
action="count", | |
default=0, | |
help="increase verbosity (specify multiple times for more)", | |
) | |
arg_parser.add_argument("--version", action="version", version="1.0") | |
args = arg_parser.parse_args() | |
log_level = (log.CRITICAL) - (args.verbose * 10) | |
LOG_FORMAT = "%(levelname).4s %(funcName).10s:%(lineno)-4d| %(message)s" | |
if args.log_to_file: | |
log.basicConfig( | |
filename="PROG-TEMPLATE.log", | |
filemode="w", | |
level=log_level, | |
format=LOG_FORMAT, | |
) | |
else: | |
log.basicConfig(level=log_level, format=LOG_FORMAT) | |
if not args.files: | |
args.files = [ | |
file | |
# eschew mkv because exiftool/digikam doesn't write metadata | |
for pattern in ("*.jp*g", "*.mp4") | |
for file in Path().glob(pattern, case_sensitive=False) | |
] | |
if args.import_sd: | |
import_from_sd() | |
if args.copy: | |
copy_to_hires() | |
if args.rename: | |
rename_media(args.files) | |
if args.gallery: | |
create_gallery() | |
if args.trash: | |
cleanup_files() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment