Skip to content

Instantly share code, notes, and snippets.

@davidlu1001
Last active August 23, 2024 05:30
Show Gist options
  • Select an option

  • Save davidlu1001/0c272b690f04cc430ad13a778f30fa11 to your computer and use it in GitHub Desktop.

Select an option

Save davidlu1001/0c272b690f04cc430ad13a778f30fa11 to your computer and use it in GitHub Desktop.
File Mover
[CmdletBinding(SupportsShouldProcess=$true)]
Param(
[Parameter(Mandatory=$false)]
[string]$ConfigFile,
[Parameter(Mandatory=$false)]
[ValidateScript({Test-Path $_ -PathType Container})]
[string]$SourceFolder,
[Parameter(Mandatory=$false)]
[string]$TargetFolderBase,
[Parameter(Mandatory=$false)]
[string]$TargetSubFolder,
[Parameter(Mandatory=$false)]
[DateTime]$CutoffTime,
[Parameter(Mandatory=$false)]
[ValidateSet("CreateTime", "ModifyTime", "AccessTime")]
[string]$CutoffTimeType = "ModifyTime",
[Parameter(Mandatory=$false)]
[string]$LogFile = "FileMove_$(Get-Date -Format 'yyyyMMdd_HHmmss').log",
[Parameter(Mandatory=$false)]
[string]$CsvFile = "FileMoveReport_$(Get-Date -Format 'yyyyMMdd_HHmmss').csv",
[Parameter(Mandatory=$false)]
[string[]]$FileExtensions,
[Parameter(Mandatory=$false)]
[long]$MinSize,
[Parameter(Mandatory=$false)]
[long]$MaxSize,
[Parameter(Mandatory=$false)]
[ValidateSet("Rename", "Skip", "Overwrite")]
[string]$DuplicateHandling = "Rename",
[Parameter(Mandatory=$false)]
[switch]$IncrementalMode,
[Parameter(Mandatory=$false)]
[int]$MaxRetries = 3,
[Parameter(Mandatory=$false)]
[int]$MaxParallelJobs = 5,
[Parameter(Mandatory=$false)]
[System.Management.Automation.PSCredential]$Credential,
[Parameter(Mandatory=$false)]
[switch]$Dryrun
)
# Error action preference
$ErrorActionPreference = "Stop"
# Function to write log messages
function Write-Log {
param (
[string]$Message,
[string]$LogFile,
[ValidateSet("INFO", "WARNING", "ERROR")]
[string]$Level = "INFO"
)
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
$logMessage = "$timestamp - [$Level] - $Message"
$logMessage | Out-File -Append -FilePath $LogFile
switch ($Level) {
"INFO" { Write-Verbose $Message }
"WARNING" { Write-Warning $Message }
"ERROR" { Write-Error $Message }
}
}
# Import configuration if provided
function Import-Configuration {
if ($ConfigFile -and (Test-Path $ConfigFile)) {
try {
$config = Get-Content $ConfigFile | ConvertFrom-Json
foreach ($prop in $config.PSObject.Properties) {
if (-not $PSBoundParameters.ContainsKey($prop.Name)) {
if ($prop.Name -eq "CutoffTime") {
Set-Variable -Name $prop.Name -Value ([DateTime]::ParseExact($prop.Value, "yyyy-MM-dd HH:mm:ss", [System.Globalization.CultureInfo]::InvariantCulture)) -Scope Script
} else {
Set-Variable -Name $prop.Name -Value $prop.Value -Scope Script
}
}
}
Write-Log "Configuration imported successfully from $ConfigFile" $LogFile
}
catch {
Write-Log "Error importing configuration: $_" $LogFile "ERROR"
throw
}
}
elseif ($ConfigFile) {
throw "Config file not found: $ConfigFile"
}
}
# Validate required parameters
function Validate-Parameters {
if (-not $SourceFolder -or -not $TargetFolderBase -or -not $CutoffTime) {
throw "SourceFolder, TargetFolderBase, and CutoffTime are required. Provide them as parameters or in the config file."
}
Write-Log "Parameters validated successfully" $LogFile
}
# Function to get files based on criteria
function Get-FilesToMove {
try {
$getChildItemParams = @{
Path = $SourceFolder
File = $true
ErrorAction = 'Stop'
}
if ($null -ne $Credential) {
$getChildItemParams['Credential'] = $Credential
}
Write-Log "Attempting to get files from source folder: $SourceFolder" $LogFile
$allFiles = Get-ChildItem @getChildItemParams
if ($null -eq $allFiles -or $allFiles.Count -eq 0) {
Write-Log "No files found in the source folder." $LogFile
return @()
}
Write-Log "Total files found before filtering: $($allFiles.Count)" $LogFile
$files = $allFiles | ForEach-Object {
$file = $_
$include = $true
if ($null -eq $file) {
Write-Log "Encountered a null file object, skipping." $LogFile
return
}
try {
$fileTime = switch ($CutoffTimeType) {
"CreateTime" { $file.CreationTime }
"ModifyTime" { $file.LastWriteTime }
"AccessTime" { $file.LastAccessTime }
}
if ($null -eq $fileTime) {
Write-Log "Unable to get $CutoffTimeType for file $($file.Name), skipping. File details: $($file | Format-List | Out-String)" $LogFile
return
}
# Use CompareTo method for more reliable datetime comparison
if ($fileTime.CompareTo($CutoffTime) -ge 0) {
Write-Verbose "File $($file.Name) excluded due to $CutoffTimeType : $($fileTime.ToString('yyyy-MM-dd HH:mm:ss')) (CutoffTime: $($CutoffTime.ToString('yyyy-MM-dd HH:mm:ss')))"
$include = $false
} else {
Write-Verbose "File $($file.Name) included due to $CutoffTimeType : $($fileTime.ToString('yyyy-MM-dd HH:mm:ss')) (CutoffTime: $($CutoffTime.ToString('yyyy-MM-dd HH:mm:ss')))"
}
if ($FileExtensions -and $file.Extension -notin $FileExtensions) {
Write-Verbose "File $($file.Name) excluded due to extension: $($file.Extension)"
$include = $false
}
if ($MinSize -and $file.Length -lt $MinSize) {
Write-Verbose "File $($file.Name) excluded due to size (too small): $($file.Length)"
$include = $false
}
if ($MaxSize -and $file.Length -gt $MaxSize) {
Write-Verbose "File $($file.Name) excluded due to size (too large): $($file.Length)"
$include = $false
}
if ($IncrementalMode -and $file.LastWriteTime -le (Get-Date).AddDays(-1)) {
Write-Verbose "File $($file.Name) excluded due to incremental mode: $($file.LastWriteTime.ToString('yyyy-MM-dd HH:mm:ss'))"
$include = $false
}
if ($include) {
$file
}
}
catch {
Write-Log "Error processing file $($file.Name): $_" $LogFile "ERROR"
Write-Log "File details: $($file | Format-List | Out-String)" $LogFile "ERROR"
}
}
$filteredFileCount = if ($null -eq $files) { 0 } else { @($files).Count }
Write-Log "Files matching criteria: $filteredFileCount" $LogFile
return $files
}
catch {
Write-Log "Error in Get-FilesToMove: $_" $LogFile "ERROR"
Write-Log "Error details: $($_.Exception.Message)" $LogFile "ERROR"
Write-Log "Stack trace: $($_.ScriptStackTrace)" $LogFile "ERROR"
throw
}
}
# Function to process files in parallel
function Process-FilesInParallel {
param (
[Parameter(Mandatory=$true)]
[System.IO.FileInfo[]]$Files,
[Parameter(Mandatory=$true)]
[string]$TargetFolderBase,
[Parameter(Mandatory=$false)]
[string]$TargetSubFolder,
[Parameter(Mandatory=$true)]
[string]$CutoffTimeType,
[Parameter(Mandatory=$true)]
[string]$DuplicateHandling,
[Parameter(Mandatory=$true)]
[int]$MaxRetries,
[Parameter(Mandatory=$true)]
[int]$MaxParallelJobs,
[Parameter(Mandatory=$false)]
[System.Management.Automation.PSCredential]$Credential,
[Parameter(Mandatory=$false)]
[switch]$Dryrun
)
$jobs = @()
$results = @()
$processedCount = 0
$totalFiles = $Files.Count
$scriptBlock = {
param(
$Name,
$FullName,
$DirectoryName,
$Length,
$CreationTime,
$LastWriteTime,
$LastAccessTime,
$TargetFolderBase,
$TargetSubFolder,
$CutoffTimeType,
$DuplicateHandling,
$MaxRetries,
$Credential,
$Dryrun
)
function Move-FileWithStatus {
param (
[Parameter(Mandatory=$true)]
[string]$SourcePath,
[Parameter(Mandatory=$true)]
[string]$TargetPath,
[Parameter(Mandatory=$true)]
[string]$DuplicateHandling,
[Parameter(Mandatory=$true)]
[int]$MaxRetries,
[Parameter(Mandatory=$false)]
[System.Management.Automation.PSCredential]$Credential,
[Parameter(Mandatory=$false)]
[switch]$Dryrun
)
if ($Dryrun) {
return "ToBeMoved"
}
$retryCount = 0
do {
try {
$moveItemParams = @{
Path = $SourcePath
Destination = $TargetPath
Force = $true
ErrorAction = 'Stop'
}
if ($null -ne $Credential) {
$moveItemParams['Credential'] = $Credential
}
if ($DuplicateHandling -eq "Rename" -and (Test-Path $TargetPath)) {
$i = 1
$fileInfo = [System.IO.FileInfo]::new($TargetPath)
do {
$newName = "{0}_{1}{2}" -f $fileInfo.BaseName, $i, $fileInfo.Extension
$TargetPath = Join-Path $fileInfo.DirectoryName $newName
$i++
} while (Test-Path $TargetPath)
$moveItemParams['Destination'] = $TargetPath
}
elseif ($DuplicateHandling -eq "Skip" -and (Test-Path $TargetPath)) {
return "Skipped: File already exists"
}
Move-Item @moveItemParams
return "Success"
}
catch {
$retryCount++
if ($retryCount -ge $MaxRetries) {
return "Failed: $_"
}
Start-Sleep -Seconds (2 * $retryCount)
}
} while ($retryCount -lt $MaxRetries)
}
$fileTime = switch ($CutoffTimeType) {
"CreateTime" { $CreationTime }
"ModifyTime" { $LastWriteTime }
"AccessTime" { $LastAccessTime }
}
$subFolder = if ($TargetSubFolder) {
$TargetSubFolder
} else {
$fileTime.Year.ToString()
}
$targetFolder = Join-Path $TargetFolderBase $subFolder
$targetPath = Join-Path $targetFolder $Name
# Ensure target folder exists (even in Dryrun mode for reporting purposes)
if (-not (Test-Path $targetFolder)) {
try {
if (-not $Dryrun) {
New-Item -ItemType Directory -Path $targetFolder -Force -ErrorAction Stop | Out-Null
}
}
catch {
return [PSCustomObject]@{
Name = $Name
SourceFolder = $DirectoryName
SizeInBytes = $Length
SizeInMB = [math]::Round($Length / 1MB, 2)
CreateTime = $CreationTime.ToString("yyyy-MM-dd HH:mm:ss")
ModifiedTime = $LastWriteTime.ToString("yyyy-MM-dd HH:mm:ss")
AccessTime = $LastAccessTime.ToString("yyyy-MM-dd HH:mm:ss")
TargetFolder = $targetFolder
MoveStatus = "Failed: Unable to create target folder - $_"
}
}
}
$moveResult = Move-FileWithStatus -SourcePath $FullName -TargetPath $targetPath -DuplicateHandling $DuplicateHandling -MaxRetries $MaxRetries -Credential $Credential -Dryrun:$Dryrun
return [PSCustomObject]@{
Name = $Name
SourceFolder = $DirectoryName
SizeInBytes = $Length
SizeInMB = [math]::Round($Length / 1MB, 2)
CreateTime = $CreationTime.ToString("yyyy-MM-dd HH:mm:ss")
ModifiedTime = $LastWriteTime.ToString("yyyy-MM-dd HH:mm:ss")
AccessTime = $LastAccessTime.ToString("yyyy-MM-dd HH:mm:ss")
TargetFolder = $targetFolder
MoveStatus = $moveResult
}
}
foreach ($file in $Files) {
while ($jobs.Count -ge $MaxParallelJobs) {
$completedJob = Wait-Job -Job $jobs -Any
$jobResult = Receive-Job -Job $completedJob
$results += $jobResult
$jobs = $jobs | Where-Object { $_ -ne $completedJob }
Remove-Job -Job $completedJob
$processedCount++
$percentComplete = ($processedCount / $totalFiles) * 100
Write-Progress -Activity "Processing Files" -Status "Progress" -PercentComplete $percentComplete
}
$job = Start-Job -ScriptBlock $scriptBlock -ArgumentList $file.Name, $file.FullName, $file.DirectoryName, $file.Length, $file.CreationTime, $file.LastWriteTime, $file.LastAccessTime, $TargetFolderBase, $TargetSubFolder, $CutoffTimeType, $DuplicateHandling, $MaxRetries, $Credential, $Dryrun
$jobs += $job
}
# Process any remaining jobs
while ($jobs.Count -gt 0) {
$completedJob = Wait-Job -Job $jobs -Any
$jobResult = Receive-Job -Job $completedJob
$results += $jobResult
$jobs = $jobs | Where-Object { $_ -ne $completedJob }
Remove-Job -Job $completedJob
$processedCount++
$percentComplete = ($processedCount / $totalFiles) * 100
Write-Progress -Activity "Processing Files" -Status "Progress" -PercentComplete $percentComplete
}
return $results
}
# Main script execution
try {
$startTime = Get-Date
Import-Configuration
Validate-Parameters
# Determine initial TargetFolder for logging purposes
$initialTargetFolder = if ($TargetSubFolder) {
Join-Path $TargetFolderBase $TargetSubFolder
} else {
$TargetFolderBase
}
Write-Log "Script started with following parameters:" $LogFile
Write-Log "SourceFolder: $SourceFolder" $LogFile
Write-Log "TargetFolderBase: $TargetFolderBase" $LogFile
Write-Log "TargetSubFolder: $TargetSubFolder" $LogFile
Write-Log "Initial Target: $initialTargetFolder" $LogFile
Write-Log "CutoffTime: $($CutoffTime.ToString('yyyy-MM-dd HH:mm:ss'))" $LogFile
Write-Log "CutoffTimeType: $CutoffTimeType" $LogFile
Write-Log "FileExtensions: $($FileExtensions -join ', ')" $LogFile
Write-Log "MinSize: $MinSize" $LogFile
Write-Log "MaxSize: $MaxSize" $LogFile
Write-Log "DuplicateHandling: $DuplicateHandling" $LogFile
Write-Log "IncrementalMode: $IncrementalMode" $LogFile
Write-Log "MaxRetries: $MaxRetries" $LogFile
Write-Log "MaxParallelJobs: $MaxParallelJobs" $LogFile
Write-Log "Dryrun: $Dryrun" $LogFile
if ($null -ne $Credential) {
Write-Log "Using provided credentials for file operations" $LogFile
}
# Verify source folder exists
$testPathParams = @{
Path = $SourceFolder
PathType = 'Container'
ErrorAction = 'Stop'
}
if ($null -ne $Credential) {
$testPathParams['Credential'] = $Credential
}
if (-not (Test-Path @testPathParams)) {
throw "Source folder does not exist or is not accessible: $SourceFolder"
}
# Ensure target base folder exists (even in Dryrun mode for reporting purposes)
$newItemParams = @{
ItemType = 'Directory'
Path = $TargetFolderBase
Force = $true
ErrorAction = 'Stop'
}
if ($null -ne $Credential) {
$newItemParams['Credential'] = $Credential
}
if (-not (Test-Path $TargetFolderBase)) {
try {
if (-not $Dryrun) {
New-Item @newItemParams | Out-Null
}
Write-Log "Target base folder would be created: $TargetFolderBase" $LogFile
}
catch {
throw "Failed to create target base folder: $TargetFolderBase. Error: $_"
}
}
# Get files to move
Write-Log "Attempting to get files to move..." $LogFile
$filesToMove = Get-FilesToMove
if ($null -eq $filesToMove -or @($filesToMove).Count -eq 0) {
Write-Log "No files found to move. Exiting." $LogFile
return
}
$totalFiles = @($filesToMove).Count
Write-Log "Found $totalFiles files to move" $LogFile
# Process files in parallel
$activityDescription = if ($Dryrun) { "Simulating File Processing" } else { "Processing Files" }
Write-Log "Starting to $activityDescription" $LogFile
$csvData = Process-FilesInParallel -Files $filesToMove -TargetFolderBase $TargetFolderBase -TargetSubFolder $TargetSubFolder -CutoffTimeType $CutoffTimeType -DuplicateHandling $DuplicateHandling -MaxRetries $MaxRetries -MaxParallelJobs $MaxParallelJobs -Credential $Credential -Dryrun:$Dryrun
Write-Log "Finished $activityDescription" $LogFile
# Export CSV report with specified columns
if ($null -ne $csvData -and @($csvData).Count -gt 0) {
$csvData | Select-Object Name, SourceFolder, SizeInBytes, SizeInMB, CreateTime, ModifiedTime, AccessTime, TargetFolder, MoveStatus | Export-Csv -Path $CsvFile -NoTypeInformation
Write-Log "CSV report exported to $CsvFile" $LogFile
$totalProcessed = @($csvData).Count
$successfulMoves = @($csvData | Where-Object { $_.MoveStatus -eq 'Success' -or $_.MoveStatus -eq 'ToBeMoved' }).Count
$failedMoves = @($csvData | Where-Object { $_.MoveStatus -like 'Failed*' }).Count
$skippedFiles = @($csvData | Where-Object { $_.MoveStatus -eq 'Skipped: File already exists' }).Count
$actionWord = if ($Dryrun) { "would be" } else { "were" }
Write-Log "Total files processed: $totalProcessed" $LogFile
Write-Log "Successful moves (or to be moved): $successfulMoves" $LogFile
Write-Log "Failed moves: $failedMoves" $LogFile
Write-Log "Skipped files: $skippedFiles" $LogFile
} else {
Write-Log "No files were processed. Check the log for details." $LogFile
}
$endTime = Get-Date
$duration = $endTime - $startTime
$modeDescription = if ($Dryrun) { "Dryrun" } else { "Actual run" }
Write-Log "Script completed successfully ($modeDescription). Duration: $($duration.TotalSeconds) seconds" $LogFile
}
catch {
Write-Log "An error occurred in the main script execution: $_" $LogFile "ERROR"
Write-Log "Error details: $($_.Exception.Message)" $LogFile "ERROR"
Write-Log "Stack trace: $($_.ScriptStackTrace)" $LogFile "ERROR"
throw
}
finally {
Write-Progress -Activity "Processing Files" -Completed
}

FileMover Script How-To Guide

1. Overview

The FileMover script is a PowerShell tool designed for efficient file management. It enables parallel processing of file movements from a source to a target location, with support for various filtering criteria, credential-based access, and a Dryrun mode for simulating file moves without actually moving files.

2. Prerequisites

  • PowerShell 5.1 or later (compatible with Windows PowerShell, not requiring PowerShell Core)
  • Windows operating system
  • Appropriate permissions for source and target folders

3. Installation

Save the script as FileMover.ps1 in your preferred location.

4. Configuration

The script supports configuration via command-line parameters or a JSON configuration file. To use a config file:

  1. Create a JSON file (e.g., config.json) with the following structure:
    {
      "SourceFolder": "C:\\Source",
      "TargetFolderBase": "D:\\Target",
      "TargetSubFolder": "",
      "CutoffTime": "2023-08-01T00:00:00",
      "CutoffTimeType": "ModifyTime",
      "FileExtensions": [".txt", ".log"],
      "MinSize": 1024,
      "MaxSize": 1048576,
      "DuplicateHandling": "Rename",
      "IncrementalMode": false,
      "MaxRetries": 3,
      "MaxParallelJobs": 5,
      "Dryrun": false
    }
  2. Reference this file using the -ConfigFile parameter when running the script.

5. Usage

Basic usage:

.\FileMover.ps1 -SourceFolder "C:\Source" -TargetFolderBase "D:\Target" -CutoffTime "2023-08-01 00:00:00"

Advanced usage with Dryrun:

.\FileMover.ps1 -SourceFolder "C:\Source" `
                -TargetFolderBase "D:\Target" `
                -TargetSubFolder "2024" `
                -CutoffTime "2024-08-01 18:00:00" `
                -CutoffTimeType "ModifyTime" `
                -DuplicateHandling "Rename" `
                -MaxParallelJobs 10 `
                -Dryrun `
                -Verbose

With configuration file:

.\FileMover.ps1 -ConfigFile "config.json"

With credentials (for network shares):

$cred = Get-Credential
.\FileMover.ps1 -SourceFolder "\\server\share" -TargetFolderBase "D:\Target" -CutoffTime "2023-08-01 00:00:00" -Credential $cred

6. Parameters

Parameter Type Required Description
ConfigFile String No Path to JSON configuration file
SourceFolder String Yes* Source folder path
TargetFolderBase String Yes* Base path for target folder
TargetSubFolder String No Subfolder within TargetFolderBase (default: empty, auto-generated based on file time)
CutoffTime DateTime Yes* Files created before this time will be moved
CutoffTimeType String No Type of time to use for cutoff: "CreateTime", "ModifyTime", or "AccessTime" (default: "ModifyTime")
LogFile String No Custom log file path
CsvFile String No Custom CSV report file path
FileExtensions String[] No Array of file extensions to include
MinSize Long No Minimum file size in bytes
MaxSize Long No Maximum file size in bytes
DuplicateHandling String No How to handle duplicates: "Rename", "Skip", or "Overwrite"
IncrementalMode Switch No Only move files modified in the last 24 hours
MaxRetries Int No Maximum number of retries for failed moves
MaxParallelJobs Int No Maximum number of parallel jobs
Credential PSCredential No Credentials for network access
Dryrun Switch No Simulate file moves without actually moving files

*Required unless specified in config file

7. Dryrun Mode

When the -Dryrun switch is used, the script will:

  • Simulate the file move process without actually moving any files
  • Report files that would be moved as "ToBeMoved" in the CSV report
  • Not create any folders in the target location
  • Provide a full report of what would happen in an actual run

This mode is useful for testing and verifying the script's behavior before performing actual file moves.

8. Output

  1. Console output: Progress information and verbose logging (if -Verbose is used)
  2. Log file: Detailed execution log
  3. CSV report: Comprehensive file movement report

CSV Columns:

  • Name: File name
  • SourceFolder: Original file location
  • SizeInBytes: File size in bytes
  • SizeInMB: File size in megabytes
  • CreateTime: File creation time
  • ModifiedTime: Last modification time
  • AccessTime: Last access time
  • TargetFolder: Destination folder (including subfolder)
  • MoveStatus: Result of the move operation (or "ToBeMoved" in Dryrun mode)

9. Troubleshooting

  1. "Access Denied" errors:

    • Verify permissions on source and target folders
    • Ensure provided credentials have necessary access rights
  2. Files not moving:

    • Check CutoffTime parameter
    • Verify file extensions and size limits
    • Review CSV report for specific file statuses
  3. Performance issues:

    • Adjust MaxParallelJobs parameter
    • Consider network latency for remote operations
  4. Script fails to start:

    • Verify PowerShell version (5.1+)
    • Check execution policy

10. Best Practices

  • Always run the script with the -Verbose parameter during initial setup and testing.
  • Use Dryrun mode to verify script behavior before performing actual file moves.
  • Use a configuration file for complex or frequently used parameter sets.
  • Regularly review and archive log files and CSV reports.
  • Test the script with a small subset of files before running it on large data sets.
  • Schedule the script to run during off-peak hours if moving large numbers of files.
  • Use incremental mode for recurring operations.
  • Adjust the MaxParallelJobs parameter based on your system's capabilities and the nature of the files being moved.
  • Use TargetSubFolder parameter when you want to override the automatic subfolder creation based on file timestamps.
  • Monitor system resources during large file moves.

11. Security Considerations

  1. The script does not delete files from the source folder. It only moves them to the target folder.
  2. Avoid hardcoding credentials in scripts or config files.
  3. Be cautious when moving files from untrusted sources.
  4. Use Dryrun mode to verify script behavior in sensitive environments.

12. Performance Tuning

  1. Adjust MaxParallelJobs based on available system resources.
  2. Optimize file filtering to reduce initial file enumeration time.

13. Examples

  1. Basic local file move with automatic subfolder creation:

    .\FileMover.ps1 -SourceFolder "C:\OldFiles" -TargetFolderBase "D:\Archive" -CutoffTime "2023-01-01"
  2. Dryrun with specific file types and size limits:

    .\FileMover.ps1 -SourceFolder "E:\Data" -TargetFolderBase "F:\Backup" -TargetSubFolder "2023Q2" -CutoffTime "2023-06-01" -FileExtensions @(".txt", ".csv") -MinSize 1024 -MaxSize 1048576 -Dryrun
  3. Incremental mode with custom logging:

    .\FileMover.ps1 -SourceFolder "\\server\share" -TargetFolderBase "D:\DailyBackup" -CutoffTime (Get-Date).AddDays(-1) -IncrementalMode -CutoffTimeType "ModifyTime" -LogFile "C:\Logs\DailyMove.log" -Verbose
  4. High-parallelism network move with credentials:

    $cred = Get-Credential
    .\FileMover.ps1 -SourceFolder "\\sourceserver\data" -TargetFolderBase "\\targetserver\archive" -CutoffTime "2023-08-01" -Credential $cred -MaxParallelJobs 20 -MaxRetries 5
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment