- Go to your Shopify
admin/settings/files
page - Open your browser Dev tools, go to the console
- Paste the content of the
console_download_list.js
file, and press enter - Your browser will automatically fetch each page and download the list with the links of all the files on the CDN.
- Using your preffered code editor, edit the HTML file by adding each link in img tag.
- Open the HTML file you just edit in a browser then right click-save as. It will download the HTML file again along with all the images in the location you specify.
Forked from ridem/Download-Shopify-CDN-Assets.md
Last active
September 23, 2024 13:41
-
-
Save lexthor/63ac60fe5bce357084a934503c677233 to your computer and use it in GitHub Desktop.
Download all Shopify CDN assets from a store
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
function fetchPageAssets() { | |
var assets = $("#assets-table .next-input--readonly") | |
assets.each(function (index, input) { | |
files.push(input.value) | |
if (index + 1 == assets.length) { | |
var lastItem = $(input).parents("tr[bind-class]").attr('bind-class').replace(/\D/g,'') | |
$.ajax({ | |
url: "/admin/settings/files?direction=next&last_id=" + lastItem + "&last_value=" + lastItem + "&limit=100&order=id+desc", | |
}).done(function (data) { | |
var mutationObserver = new MutationObserver(function (mutations, observer) { | |
mutations.some(function (mutation) { | |
if (mutation.target.id && | |
mutation.target.id == "assets-area" && | |
mutation.addedNodes[0].nextElementSibling && | |
mutation.addedNodes[0].nextElementSibling.innerHTML.indexOf("empty") > -1 | |
) { | |
downloadListFile() | |
observer.disconnect() | |
return true; | |
} else if (mutation.target.id && | |
mutation.target.id == "assets-area" && | |
mutation.previousSibling && | |
mutation.previousSibling.className == "ui-layout ui-layout--full-width" | |
) { | |
fetchPageAssets() | |
observer.disconnect() | |
return true; | |
} | |
}) | |
}); | |
mutationObserver.observe(document, { | |
childList: true, | |
subtree: true | |
}); | |
var newDoc = document.open("text/html", "replace"); | |
newDoc.write(data); | |
newDoc.close(); | |
}) | |
} | |
}) | |
} | |
function downloadListFile() { | |
var downloader = $("<a id='download-file' href='' download='shopify-files.html'></a>") | |
$(".ui-title-bar").append(downloader) | |
var data = 'data:application/octet-stream;base64,' + window.btoa(files.join("\r\n")); | |
$('#download-file').attr('href', data); | |
$('#download-file')[0].click(); | |
} | |
var files = [] | |
fetchPageAssets() |
Thanks @wasalwayshere this worked perfectly and was huge help!
Go to: admin/settings/files?limit=250 in Shopify
- I used paste+enter in console (Chrome) to output the list of URLs:
const arrOfImgsSources = []; for(var i = 0; i < document.images.length; i++) { let str1 = "_60x60"; let img_src =document.images[i].src; const isSubstring = img_src.includes(str1); if (isSubstring) { var clean = img_src.replace(/_60x60/g,''); arrOfImgsSources.push(clean); var filename = clean.replace(/^.*[\\\/]/, '') var clean_filename = filename.split('?')[0] //console.log(clean_filename) console.log(clean) } }; //console.log(arrOfImgsSources);
Copy and pasted to a file named urls.txt, then in MacOS terminal, I ran:
wget --content-disposition --trust-server-names -i urls.txt
Then to clean up the filenames:
find . -type f -name "*\?*" -print0 | while IFS= read -r -d '' file; do mv -f "$file" "`echo $file | cut -d? -f1`"; done
Thanks a bunch, this worked flawlessly
Bump; this code has saved me for 6mo+ thank you @wasalwayshere
Clean up filenames part doesn't work on Macos, getting errors for cut -d? as bellow:
zsh: no matches found: -d?
For Macos, use below:
find . -type f -name "*\?*" -print0 |
while IFS= read -r -d '' file;
do
mv -f "$file" "`echo $file | cut -d '?' -f1`";
done
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
That works! Thanks @wasalwayshere