Last active
January 9, 2019 16:02
-
-
Save a2nt/88c786fd80ca8fdaa03cbd3b13d3039c to your computer and use it in GitHub Desktop.
SS3 > SS4.1 protected folder migration script
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<?php | |
/* | |
* Generates FileHash and moves files to .protected folder | |
* Re-generates CMS thumbs on the second run and publish files | |
*/ | |
use SilverStripe\Dev\BuildTask; | |
use SilverStripe\Control\Director; | |
use SilverStripe\Assets\File; | |
use SilverStripe\AssetAdmin\Controller\AssetAdmin; | |
use SilverStripe\ORM\DB; | |
class PublishAllFiles extends BuildTask | |
{ | |
protected $title = 'Publish All Files'; | |
public function run($request) | |
{ | |
// 10 mins limit | |
set_time_limit(600); | |
$admin = self::singleton(AssetAdmin::class); | |
$files = File::get(); | |
foreach ($files as $file) { | |
$name = $file->getFilename(); | |
$originalDir = BASE_PATH . '/'.Director::publicDir().'/assets/'; | |
if(!$file->getField('FileHash') && file_exists($originalDir.$name) && !is_dir($originalDir.$name)) { | |
$hash = sha1_file($originalDir.$name); | |
DB::query('UPDATE "File" SET "FileHash" = \''.$hash.'\' WHERE "ID" = \''.$file->ID.'\' LIMIT 1;'); | |
$targetDir = str_replace('./','',BASE_PATH . '/' . Director::publicDir() . '/assets/.protected/'. dirname($name) | |
.'/'. substr($hash, 0, 10) . '/'); | |
if(!file_exists($targetDir)) { | |
mkdir($targetDir, 0755, true); | |
} | |
rename($originalDir . $name, $targetDir . basename($name)); | |
echo '<b style="color:red">'.$originalDir . $name .' > '. $targetDir . basename($name).'</b><br>'; | |
}else{ | |
$admin->generateThumbnails($file); | |
$file->copyVersionToStage('Stage', 'Live'); | |
echo '<b style="color:green">Published: '.$name.'</b><br>'; | |
} | |
} | |
exit('Done!'); | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
I recommend resetting the time limit in the loop. 10 minutes isn't enough when dealing with thousands of files. Also when dealing with a huge amount of files it's better to first do simple DB query to get the ID's instead of the File DataObject. It also helps to destroy the $file and the end of the loop. That way only one file is memory at a time and it's wiped at the end of its loop.
https://gist.github.com/thezenmonkey/1b7846a01255e94c02906da6d121385a