-
-
Save tetebueno/5800504133709c9844bcd1779019d390 to your computer and use it in GitHub Desktop.
import re as _re | |
import json as _json | |
JSON_EXTENSION = '.json' | |
def find_json_for_file(file: Path): | |
try: | |
if file.with_name(file.name + JSON_EXTENSION).is_file(): | |
# file.jpg -> file.jpg.json | |
the_json_path = file.with_name(file.name + JSON_EXTENSION) | |
elif file.with_name(file.name.replace(file.suffix, JSON_EXTENSION)).is_file(): | |
# file.jpg -> file.json | |
the_json_path = file.with_name(file.name.replace(file.suffix.lower(), JSON_EXTENSION)) | |
elif len(file.name) >= 47: | |
# fileee...eee.jpg -> fileee...eee..json | |
the_json_path = file.with_name(file.name[0:46] + JSON_EXTENSION) | |
elif bool(_re.search(r'^(.+)(\(\d+\))(\..+)$', file.name)): | |
weird_search = _re.search(r'^(.+)(\(\d+\))(\..+)$', file.name) | |
if file.with_name(weird_search.group(1) + JSON_EXTENSION).is_file(): | |
# file(1).jpg -> file.json | |
the_json_path = file.with_name(weird_search.group(1) + JSON_EXTENSION) | |
else: | |
# file(1).jpg -> file.jpg(1).json | |
the_json_path = file.with_name(weird_search.group(1) + weird_search.group(3) + weird_search.group(2) + JSON_EXTENSION) | |
#print('Using ' + the_json_path.name + ' for ' + file.name) | |
with open(the_json_path, 'r') as f: | |
json_dict = _json.load(f) | |
return json_dict | |
except: | |
raise FileNotFoundError(f'Couldn\'t find json for file: {file}') |
If you want this for each file of a folder, just walk through the folder calling the function. If you want all json files in a folder, just walk with glob through *.json
. I know it's a silly answer, but I don't think I understood the question.
Sorry for the confusion.
What syntax would be used for
def find_json_for_file(file: Path):
if I want it to go through an entire folder, instead of running the script for each individual file?
Will this work for all file types(HEIC, gif, mov, mp4, avi) that come with a photos take out, or does something need to be changed first?
This is only used to determine the json file for each media file, regardless of the extensión.
To walk through a folder I guess something like this should work:
for file un list(Path('the directory').rglob('*')):
if not(file.is_dir):
the_json = find_json_for_file(file)
# Do whatever you want with the json...
I am confused can you help
Sure.
Sure.
where can i contact you discord, telegram or what please mention your username also thank you so much
This gist is good enough. What's the problem?
This gist is good enough. What's the problem?
ah yes its good but i am not comfortable here. Please provide
I have no idea how to use this, I have downloaded python so far and tried running the code but idk where to go from there. Can anyone help with a step by step tutorial? I'd be very thankful. Trying to merge all .json and photos together from a Takeout Folder.
@kyro-h did you get that step by step instruction? I'm in the same situation you wear in a year ago. I would really appreciate any help anyone. @tetebueno
@HJ1q I ended up using a different python script. (https://pastebin.com/SkYwF0Jy)
should be something like, nameofprogram "folder to be scanned")
It should rename all your jason files to match the name of the image files.
then i used exiftool (https://exiftool.org/) to apply the jason metadata to the images.
(https://pastebin.com/5Wr7yzkh)
That's all if i remember correctly form a year ago.
Also, do not run this on your only copy of your take out. Make a backup and
try it on a smaller set of images, so you don't waste time on something that
didn't do what you thought it would do.
Please check this:
https://github.com/TheLastGimbus/GooglePhotosTakeoutHelper
Is there a way to get this to work for a folder of images/files (HEIC, gif, mov, mp4, avi) and not just pointing it to one jpeg file at a time?