diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..eba1110b --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# Auto detect text files and perform LF normalization +* text=auto \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..b88a6521 --- /dev/null +++ b/.gitignore @@ -0,0 +1,22 @@ + +# ignore the settings folder and files for VSCode and PSS +.vscode/* +*.psproj +*TempPoint* + +# Ignore staging info from Visual Studio +library/d365fo.tools/.vs/* +library/d365fo.tools/d365fo.tools/bin/* +library/d365fo.tools/d365fo.tools/obj/* + +# ignore PowerShell Studio MetaData +d365fo.tools/d365fo.tools.psproj +d365fo.tools/d365fo.tools.psproj.bak +d365fo.tools/d365fo.tools.psprojs +d365fo.tools/d365fo.tools.psproj + +# ignore the TestResults +TestResults/* + +# ignore the publishing Directory +publish/* \ No newline at end of file diff --git a/LICENSE b/LICENSE index ff5f8b5b..1450602c 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2018 d365collaborative +Copyright (c) 2018 Motz Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -18,4 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +SOFTWARE. \ No newline at end of file diff --git a/build/filesAfter.txt b/build/filesAfter.txt new file mode 100644 index 00000000..1e8d104a --- /dev/null +++ b/build/filesAfter.txt @@ -0,0 +1,7 @@ +# List all files that are loaded in the preimport.ps1 +# In the order they are loaded during preimport + +internal\configurations\*.ps1 +internal\tepp\*.tepp.ps1 +internal\tepp\assignment.ps1 +internal\scripts\license.ps1 \ No newline at end of file diff --git a/build/filesBefore.txt b/build/filesBefore.txt new file mode 100644 index 00000000..5c93018f --- /dev/null +++ b/build/filesBefore.txt @@ -0,0 +1,2 @@ +# List all files that are loaded in the postimport.ps1 +# In the order they are loaded during postimport diff --git a/build/vsts-build.ps1 b/build/vsts-build.ps1 new file mode 100644 index 00000000..63db1911 --- /dev/null +++ b/build/vsts-build.ps1 @@ -0,0 +1,65 @@ +<# +This script publishes the module to the gallery. +It expects as input an ApiKey authorized to publish the module. + +Insert any build steps you may need to take before publishing it here. +#> +param ( + $ApiKey +) + +# Prepare publish folder +Write-PSFMessage -Level Important -Message "Creating and populating publishing directory" +$publishDir = New-Item -Path $env:SYSTEM_DEFAULTWORKINGDIRECTORY -Name publish -ItemType Directory +Copy-Item -Path "$($env:SYSTEM_DEFAULTWORKINGDIRECTORY)\d365fo.tools" -Destination $publishDir.FullName -Recurse -Force + +# Create commands.ps1 +$text = @() +Get-ChildItem -Path "$($publishDir.FullName)\d365fo.tools\internal\functions\" -Recurse -File -Filter "*.ps1" | ForEach-Object { + $text += [System.IO.File]::ReadAllText($_.FullName) +} +Get-ChildItem -Path "$($publishDir.FullName)\d365fo.tools\functions\" -Recurse -File -Filter "*.ps1" | ForEach-Object { + $text += [System.IO.File]::ReadAllText($_.FullName) +} +$text -join "`n`n" | Set-Content -Path "$($publishDir.FullName)\d365fo.tools\commands.ps1" + +# Create resourcesBefore.ps1 +$processed = @() +$text = @() +foreach ($line in (Get-Content "$($PSScriptRoot)\filesBefore.txt" | Where-Object { $_ -notlike "#*" })) +{ + if ([string]::IsNullOrWhiteSpace($line)) { continue } + + $basePath = Join-Path "$($publishDir.FullName)\d365fo.tools" $line + foreach ($entry in (Resolve-PSFPath -Path $basePath)) + { + $item = Get-Item $entry + if ($item.PSIsContainer) { continue } + if ($item.FullName -in $processed) { continue } + $text += [System.IO.File]::ReadAllText($item.FullName) + $processed += $item.FullName + } +} +if ($text) { $text -join "`n`n" | Set-Content -Path "$($publishDir.FullName)\d365fo.tools\resourcesBefore.ps1" } + +# Create resourcesAfter.ps1 +$processed = @() +$text = @() +foreach ($line in (Get-Content "$($PSScriptRoot)\filesAfter.txt" | Where-Object { $_ -notlike "#*" })) +{ + if ([string]::IsNullOrWhiteSpace($line)) { continue } + + $basePath = Join-Path "$($publishDir.FullName)\d365fo.tools" $line + foreach ($entry in (Resolve-PSFPath -Path $basePath)) + { + $item = Get-Item $entry + if ($item.PSIsContainer) { continue } + if ($item.FullName -in $processed) { continue } + $text += [System.IO.File]::ReadAllText($item.FullName) + $processed += $item.FullName + } +} +if ($text) { $text -join "`n`n" | Set-Content -Path "$($publishDir.FullName)\d365fo.tools\resourcesAfter.ps1" } + +# Publish to Gallery +Publish-Module -Path "$($publishDir.FullName)\d365fo.tools" -NuGetApiKey $ApiKey -Force \ No newline at end of file diff --git a/build/vsts-prerequisites.ps1 b/build/vsts-prerequisites.ps1 new file mode 100644 index 00000000..1e3c6579 --- /dev/null +++ b/build/vsts-prerequisites.ps1 @@ -0,0 +1,10 @@ +Write-Host "Installing Pester" -ForegroundColor Cyan +Install-Module Pester -Force -SkipPublisherCheck +Write-Host "Installing PSFramework" -ForegroundColor Cyan +Install-Module PSFramework -Force -SkipPublisherCheck +Write-Host "Installing PSScriptAnalyzer" -ForegroundColor Cyan +Install-Module PSScriptAnalyzer -Force -SkipPublisherCheck +Write-Host "Installing Azure.Storage" -ForegroundColor Cyan +Install-Module Azure.Storage -Force -SkipPublisherCheck +Write-Host "Installing AzureAd" -ForegroundColor Cyan +Install-Module AzureAd -Force -SkipPublisherCheck \ No newline at end of file diff --git a/build/vsts-validate.ps1 b/build/vsts-validate.ps1 new file mode 100644 index 00000000..92e760a1 --- /dev/null +++ b/build/vsts-validate.ps1 @@ -0,0 +1,7 @@ +# Guide for available variables and working with secrets: +# https://docs.microsoft.com/en-us/vsts/build-release/concepts/definitions/build/variables?tabs=powershell + +# Needs to ensure things are Done Right and only legal commits to master get built + +# Run internal pester tests +& "$PSScriptRoot\..\d365fo.tools\tests\pester.ps1" \ No newline at end of file diff --git a/d365fo.tools.psd1 b/d365fo.tools.psd1 deleted file mode 100644 index 3d4c7c14..00000000 --- a/d365fo.tools.psd1 +++ /dev/null @@ -1,231 +0,0 @@ -@{ - - # Script module or binary module file associated with this manifest. - RootModule = 'd365fo.tools.psm1' - - # Version number of this module. - ModuleVersion = '0.4.09' - - # Supported PSEditions - # CompatiblePSEditions = @() - - # ID used to uniquely identify this module - GUID = '7c7b26d4-f764-4cb0-a692-459a0a689dbb' - - # Author of this module - Author = 'Motz Jensen & Rasmus Andersen' - - # Company or vendor of this module - CompanyName = 'Essence Solutions' - - # Copyright statement for this module - Copyright = '(c) 2018 Motz Jensen & Rasmus Andersen. All rights reserved.' - - # Description of the functionality provided by this module - Description = 'A set of tools that will assist you when working with Dynamics 365 Finance & Operations development / demo machines.' - - # Minimum version of the Windows PowerShell engine required by this module - PowerShellVersion = '3.0' - - # Name of the Windows PowerShell host required by this module - PowerShellHostName = '' - - # Minimum version of the Windows PowerShell host required by this module - PowerShellHostVersion = '' - - # Minimum version of Microsoft .NET Framework required by this module. This prerequisite is valid for the PowerShell Desktop edition only. - DotNetFrameworkVersion = '' - - # Minimum version of the common language runtime (CLR) required by this module. This prerequisite is valid for the PowerShell Desktop edition only. - CLRVersion = '' - - # Processor architecture (None, X86, Amd64) required by this module - ProcessorArchitecture = '' - - # Modules that must be imported into the global environment prior to importing this module - RequiredModules = @( - @{ ModuleName = 'PSFramework'; ModuleVersion = '0.9.24.85' }, - @{ ModuleName = 'Azure.Storage'; ModuleVersion = '4.4.0' }, #4.3.1 - @{ ModuleName = 'AzureAd'; ModuleVersion = '2.0.1.16' } - - ) - - # Assemblies that must be loaded prior to importing this module - RequiredAssemblies = @() - - # Script files (.ps1) that are run in the caller's environment prior to importing this module. - ScriptsToProcess = @() - - # Type files (.ps1xml) to be loaded when importing this module - TypesToProcess = @() - - # Format files (.ps1xml) to be loaded when importing this module - FormatsToProcess = @() - - # Modules to import as nested modules of the module specified in RootModule/ModuleToProcess - NestedModules = @() - - # Functions to export from this module, for best performance, do not use wildcards and do not delete the entry, use an empty array if there are no functions to export. - FunctionsToExport = @( - 'Add-D365AzureStorageConfig', - 'Add-D365EnvironmentConfig', - - 'Backup-D365MetaDataDir', - - 'Disable-D365MaintenanceMode' - 'Disable-D365User', - - 'Enable-D365MaintenanceMode', - 'Enable-D365User', - - 'Get-D365ActiveAzureStorageConfig', - 'Get-D365ActiveEnvironmentConfig', - - 'Get-D365AOTObjects', - - 'Get-D365AzureStorageConfig', - 'Get-D365AzureStorageFile', - - 'Get-D365DatabaseAccess', - 'Get-D365DecryptedConfigFile', - 'Get-D365DotNetClass', - 'Get-D365DotNetMethod', - - 'Get-D365Environment', - 'Get-D365EnvironmentConfig', - 'Get-D365EnvironmentSettings', - 'Get-D365ExposedService', - - 'Get-D365InstalledHotfix', - 'Get-D365InstalledPackage', - 'Get-D365InstalledService', - 'Get-D365InstanceName', - - 'Get-D365Label', - 'Get-D365LogicAppConfig', - 'Get-D365OfflineAuthenticationAdminEmail', - - 'Get-D365PackageBundleDetail', - 'Get-D365PackageLabelFile', - 'Get-D365ProductInformation', - - 'Get-D365SDPCleanUp', - 'Get-D365Table', - 'Get-D365TableField', - 'Get-D365TableSequence', - 'Get-D365TfsUri', - 'Get-D365TfsWorkspace', - - 'Get-D365Url', - 'Get-D365User', - 'Get-D365UserAuthenticationDetail', - 'Get-D365WindowsActivationStatus', - - 'Import-D365AadUser', - 'Import-D365Bacpac', - 'Import-D365BacpacOldVersion', - - 'Initialize-D365Config', - - 'Invoke-D365AzureStorageDownload', - 'Invoke-D365AzureStorageUpload', - - 'Invoke-D365DataFlush', - 'Invoke-D365DBSync', - 'Invoke-D365InstallLicense', - 'Invoke-D365LogicApp', - - 'Invoke-D365ModelUtil', - 'Invoke-D365ReArmWindows', - - 'Invoke-D365SDPInstall', - 'Invoke-D365SCDPBundleInstall', - 'Invoke-D365SpHelp', - 'Invoke-D365SysFlushAodCache', - 'Invoke-D365SysRunnerClass', - - 'Invoke-D365TableBrowser', - - 'New-D365Bacpac', - 'New-D365BacpacOldVersion', - 'New-D365TopologyFile', - - 'Remove-D365Database', - 'Remove-D365User', - - 'Rename-D365Instance', - 'Rename-D365ComputerName', - - 'Set-D365ActiveAzureStorageConfig', - 'Set-D365ActiveEnvironmentConfig', - 'Set-D365Admin', - - 'Set-D365FavoriteBookmark', - 'Set-D365LogicAppConfig', - 'Set-D365OfflineAuthenticationAdminEmail', - 'Set-D365SDPCleanUp', - 'Set-D365StartPage', - 'Set-D365SysAdmin', - 'Set-D365WorkstationMode', - - 'Start-D365Environment', - - 'Stop-D365Environment', - - 'Switch-D365ActiveDatabase', - - 'Update-D365User' - - - ) - - # Cmdlets to export from this module, for best performance, do not use wildcards and do not delete the entry, use an empty array if there are no cmdlets to export. - CmdletsToExport = @() - - # Variables to export from this module - VariablesToExport = @() - - # Aliases to export from this module, for best performance, do not use wildcards and do not delete the entry, use an empty array if there are no aliases to export. - AliasesToExport = @() - - # DSC resources to export from this module - # DscResourcesToExport = @() - - # List of all modules packaged with this module - ModuleList = @() - - # List of all files packaged with this module - FileList = @() - - # Private data to pass to the module specified in RootModule/ModuleToProcess. This may also contain a PSData hashtable with additional module metadata used by PowerShell. - PrivateData = @{ - PSData = @{ - # Tags applied to this module. These help with module discovery in online galleries. - Tags = @('d365fo', 'Dynamics365', 'D365', 'Finance&Operations', 'FinanceOperations', 'FinanceAndOperations', 'Dynamics365FO') - - # A URL to the license for this module. - LicenseUri = "https://opensource.org/licenses/MIT" - - # A URL to the main website for this project. - ProjectUri = 'https://github.com/d365collaborative/d365fo.tools' - - # A URL to an icon representing this module. - # IconUri = '' - - # ReleaseNotes of this module - # ReleaseNotes = '' - - # Indicates this is a pre-release/testing version of the module. - IsPrerelease = 'True' - - } # End of PSData hashtable - - } # End of PrivateData hashtable - - # HelpInfo URI of this module - # HelpInfoURI = '' - - # Default prefix for commands exported from this module. Override the default prefix using Import-Module -Prefix. - # DefaultCommandPrefix = '' - -} \ No newline at end of file diff --git a/d365fo.tools/bin/readme.md b/d365fo.tools/bin/readme.md new file mode 100644 index 00000000..19ecbae4 --- /dev/null +++ b/d365fo.tools/bin/readme.md @@ -0,0 +1,7 @@ +# bin folder + +The bin folder exists to store binary data. And scripts related to the type system. + +This may include your own C#-based library, third party libraries you want to include (watch the license!), or a script declaring type accelerators (effectively aliases for .NET types) + +For more information on Type Accelerators, see the help on Set-PSFTypeAlias \ No newline at end of file diff --git a/d365fo.tools/d365fo.tools.psd1 b/d365fo.tools/d365fo.tools.psd1 new file mode 100644 index 00000000..53ec88bf --- /dev/null +++ b/d365fo.tools/d365fo.tools.psd1 @@ -0,0 +1,196 @@ +@{ + # Script module or binary module file associated with this manifest + ModuleToProcess = 'd365fo.tools.psm1' + + # Version number of this module. + ModuleVersion = '0.4.09' + + # ID used to uniquely identify this module + GUID = '7c7b26d4-f764-4cb0-a692-459a0a689dbb' + + # Author of this module + Author = 'Motz Jensen & Rasmus Andersen' + + # Company or vendor of this module + CompanyName = 'Essence Solutions' + + # Copyright statement for this module + Copyright = '(c) 2018 Motz Jensen & Rasmus Andersen. All rights reserved.' + + # Description of the functionality provided by this module + Description = 'A set of tools that will assist you when working with Dynamics 365 Finance & Operations development / demo machines.' + + # Minimum version of the Windows PowerShell engine required by this module + PowerShellVersion = '5.0' + + # Modules that must be imported into the global environment prior to importing + # this module + RequiredModules = @( + @{ ModuleName = 'PSFramework'; ModuleVersion = '0.9.25.113' }, + @{ ModuleName = 'Azure.Storage'; ModuleVersion = '4.4.0' }, #4.3.1 + @{ ModuleName = 'AzureAd'; ModuleVersion = '2.0.1.16' } + ) + + # Assemblies that must be loaded prior to importing this module + # RequiredAssemblies = @('bin\d365fo.tools.dll') + + # Type files (.ps1xml) to be loaded when importing this module + # TypesToProcess = @('xml\d365fo.tools.Types.ps1xml') + + # Format files (.ps1xml) to be loaded when importing this module + # FormatsToProcess = @('xml\d365fo.tools.Format.ps1xml') + + # Functions to export from this module + FunctionsToExport = @( + 'Add-D365AzureStorageConfig', + 'Add-D365EnvironmentConfig', + + 'Backup-D365MetaDataDir', + + 'Disable-D365MaintenanceMode' + 'Disable-D365User', + + 'Enable-D365MaintenanceMode', + 'Enable-D365User', + + 'Get-D365ActiveAzureStorageConfig', + 'Get-D365ActiveEnvironmentConfig', + + 'Get-D365AOTObjects', + + 'Get-D365AzureStorageConfig', + 'Get-D365AzureStorageFile', + + 'Get-D365DatabaseAccess', + 'Get-D365DecryptedConfigFile', + 'Get-D365DotNetClass', + 'Get-D365DotNetMethod', + + 'Get-D365Environment', + 'Get-D365EnvironmentConfig', + 'Get-D365EnvironmentSettings', + 'Get-D365ExposedService', + + 'Get-D365InstalledHotfix', + 'Get-D365InstalledPackage', + 'Get-D365InstalledService', + 'Get-D365InstanceName', + + 'Get-D365Label', + 'Get-D365LogicAppConfig', + 'Get-D365OfflineAuthenticationAdminEmail', + + 'Get-D365PackageBundleDetail', + 'Get-D365PackageLabelFile', + 'Get-D365ProductInformation', + + 'Get-D365SDPCleanUp', + 'Get-D365Table', + 'Get-D365TableField', + 'Get-D365TableSequence', + 'Get-D365TfsUri', + 'Get-D365TfsWorkspace', + + 'Get-D365Url', + 'Get-D365User', + 'Get-D365UserAuthenticationDetail', + 'Get-D365WindowsActivationStatus', + + 'Import-D365AadUser', + 'Import-D365Bacpac', + 'Import-D365BacpacOldVersion', + + 'Initialize-D365Config', + + 'Invoke-D365AzureStorageDownload', + 'Invoke-D365AzureStorageUpload', + + 'Invoke-D365DataFlush', + 'Invoke-D365DBSync', + 'Invoke-D365InstallLicense', + 'Invoke-D365LogicApp', + + 'Invoke-D365ModelUtil', + 'Invoke-D365ReArmWindows', + + 'Invoke-D365SDPInstall', + 'Invoke-D365SCDPBundleInstall', + 'Invoke-D365SpHelp', + 'Invoke-D365SysFlushAodCache', + 'Invoke-D365SysRunnerClass', + + 'Invoke-D365TableBrowser', + + 'New-D365Bacpac', + 'New-D365BacpacOldVersion', + 'New-D365TopologyFile', + + 'Remove-D365Database', + 'Remove-D365User', + + 'Rename-D365Instance', + 'Rename-D365ComputerName', + + 'Set-D365ActiveAzureStorageConfig', + 'Set-D365ActiveEnvironmentConfig', + 'Set-D365Admin', + + 'Set-D365FavoriteBookmark', + 'Set-D365LogicAppConfig', + 'Set-D365OfflineAuthenticationAdminEmail', + 'Set-D365SDPCleanUp', + 'Set-D365StartPage', + 'Set-D365SysAdmin', + 'Set-D365WorkstationMode', + + 'Start-D365Environment', + + 'Stop-D365Environment', + + 'Switch-D365ActiveDatabase', + + 'Update-D365User' + ) + + # Cmdlets to export from this module + CmdletsToExport = '' + + # Variables to export from this module + VariablesToExport = '' + + # Aliases to export from this module + AliasesToExport = '' + + # List of all modules packaged with this module + ModuleList = @() + + # List of all files packaged with this module + FileList = @() + + # Private data to pass to the module specified in ModuleToProcess. This may also contain a PSData hashtable with additional module metadata used by PowerShell. + PrivateData = @{ + + #Support for PowerShellGet galleries. + PSData = @{ + # Tags applied to this module. These help with module discovery in online galleries. + Tags = @('d365fo', 'Dynamics365', 'D365', 'Finance&Operations', 'FinanceOperations', 'FinanceAndOperations', 'Dynamics365FO') + + # A URL to the license for this module. + LicenseUri = "https://opensource.org/licenses/MIT" + + # A URL to the main website for this project. + ProjectUri = 'https://github.com/d365collaborative/d365fo.tools' + + # A URL to an icon representing this module. + # IconUri = '' + + # ReleaseNotes of this module + # ReleaseNotes = '' + + # Indicates this is a pre-release/testing version of the module. + IsPrerelease = 'True' + + } # End of PSData hashtable + + } # End of PrivateData hashtable +} \ No newline at end of file diff --git a/d365fo.tools/d365fo.tools.psm1 b/d365fo.tools/d365fo.tools.psm1 new file mode 100644 index 00000000..6bec9aec --- /dev/null +++ b/d365fo.tools/d365fo.tools.psm1 @@ -0,0 +1,85 @@ +$script:ModuleRoot = $PSScriptRoot +$script:ModuleVersion = "0.4.09" + +# Detect whether at some level dotsourcing was enforced +$script:doDotSource = Get-PSFConfigValue -FullName d365fo.tools.Import.DoDotSource -Fallback $false +if ($d365fo.tools_dotsourcemodule) { $script:doDotSource = $true } + +<# +Note on Resolve-Path: +All paths are sent through Resolve-Path/Resolve-PSFPath in order to convert them to the correct path separator. +This allows ignoring path separators throughout the import sequence, which could otherwise cause trouble depending on OS. +Resolve-Path can only be used for paths that already exist, Resolve-PSFPath can accept that the last leaf my not exist. +This is important when testing for paths. +#> + +# Detect whether at some level loading individual module files, rather than the compiled module was enforced +$importIndividualFiles = Get-PSFConfigValue -FullName d365fo.tools.Import.IndividualFiles -Fallback $false +if ($d365fo.tools_importIndividualFiles) { $importIndividualFiles = $true } +if (Test-Path (Resolve-PSFPath -Path "$($script:ModuleRoot)\..\.git" -SingleItem -NewChild)) { $importIndividualFiles = $true } +if (-not (Test-Path (Resolve-PSFPath "$($script:ModuleRoot)\commands.ps1" -SingleItem -NewChild))) { $importIndividualFiles = $true } + +function Import-ModuleFile +{ + <# + .SYNOPSIS + Loads files into the module on module import. + + .DESCRIPTION + This helper function is used during module initialization. + It should always be dotsourced itself, in order to proper function. + + This provides a central location to react to files being imported, if later desired + + .PARAMETER Path + The path to the file to load + + .EXAMPLE + PS C:\> . Import-ModuleFile -File $function.FullName + + Imports the file stored in $function according to import policy + #> + [CmdletBinding()] + Param ( + [string] + $Path + ) + + if ($doDotSource) { . (Resolve-Path $Path) } + else { $ExecutionContext.InvokeCommand.InvokeScript($false, ([scriptblock]::Create([io.file]::ReadAllText((Resolve-Path $Path)))), $null, $null) } +} + +if ($importIndividualFiles) +{ + # Execute Preimport actions + . Import-ModuleFile -Path "$ModuleRoot\internal\scripts\preimport.ps1" + + # Import all internal functions + foreach ($function in (Get-ChildItem "$ModuleRoot\internal\functions" -Filter "*.ps1" -Recurse -ErrorAction Ignore)) + { + . Import-ModuleFile -Path $function.FullName + } + + # Import all public functions + foreach ($function in (Get-ChildItem "$ModuleRoot\functions" -Filter "*.ps1" -Recurse -ErrorAction Ignore)) + { + . Import-ModuleFile -Path $function.FullName + } + + # Execute Postimport actions + . Import-ModuleFile -Path "$ModuleRoot\internal\scripts\postimport.ps1" +} +else +{ + if (Test-Path (Resolve-PSFPath "$($script:ModuleRoot)\resourcesBefore.ps1" -SingleItem -NewChild)) + { + . Import-ModuleFile -Path "$($script:ModuleRoot)\resourcesBefore.ps1" + } + + . Import-ModuleFile -Path "$($script:ModuleRoot)\commands.ps1" + + if (Test-Path (Resolve-PSFPath "$($script:ModuleRoot)\resourcesAfter.ps1" -SingleItem -NewChild)) + { + . Import-ModuleFile -Path "$($script:ModuleRoot)\resourcesAfter.ps1" + } +} \ No newline at end of file diff --git a/d365fo.tools/en-us/about_d365fo.tools.help.txt b/d365fo.tools/en-us/about_d365fo.tools.help.txt new file mode 100644 index 00000000..4a068544 --- /dev/null +++ b/d365fo.tools/en-us/about_d365fo.tools.help.txt @@ -0,0 +1,11 @@ +TOPIC + about_d365fo.tools + +SHORT DESCRIPTION + Explains how to use the d365fo.tools powershell module + +LONG DESCRIPTION + + +KEYWORDS + d365fo.tools \ No newline at end of file diff --git a/functions/Import-d365bacpacoldversion.ps1 b/d365fo.tools/functions/Import-d365bacpacoldversion.ps1 similarity index 100% rename from functions/Import-d365bacpacoldversion.ps1 rename to d365fo.tools/functions/Import-d365bacpacoldversion.ps1 diff --git a/functions/add-d365azurestorageconfig.ps1 b/d365fo.tools/functions/add-d365azurestorageconfig.ps1 similarity index 100% rename from functions/add-d365azurestorageconfig.ps1 rename to d365fo.tools/functions/add-d365azurestorageconfig.ps1 diff --git a/functions/add-d365environmentconfig.ps1 b/d365fo.tools/functions/add-d365environmentconfig.ps1 similarity index 100% rename from functions/add-d365environmentconfig.ps1 rename to d365fo.tools/functions/add-d365environmentconfig.ps1 diff --git a/functions/backup-d365metadatadir.ps1 b/d365fo.tools/functions/backup-d365metadatadir.ps1 similarity index 100% rename from functions/backup-d365metadatadir.ps1 rename to d365fo.tools/functions/backup-d365metadatadir.ps1 diff --git a/functions/disable-d365maintenancemode.ps1 b/d365fo.tools/functions/disable-d365maintenancemode.ps1 similarity index 100% rename from functions/disable-d365maintenancemode.ps1 rename to d365fo.tools/functions/disable-d365maintenancemode.ps1 diff --git a/functions/disable-d365user.ps1 b/d365fo.tools/functions/disable-d365user.ps1 similarity index 100% rename from functions/disable-d365user.ps1 rename to d365fo.tools/functions/disable-d365user.ps1 diff --git a/functions/enable-d365maintenancemode.ps1 b/d365fo.tools/functions/enable-d365maintenancemode.ps1 similarity index 100% rename from functions/enable-d365maintenancemode.ps1 rename to d365fo.tools/functions/enable-d365maintenancemode.ps1 diff --git a/functions/enable-d365user.ps1 b/d365fo.tools/functions/enable-d365user.ps1 similarity index 100% rename from functions/enable-d365user.ps1 rename to d365fo.tools/functions/enable-d365user.ps1 diff --git a/functions/get-d365activeazurestorageconfig.ps1 b/d365fo.tools/functions/get-d365activeazurestorageconfig.ps1 similarity index 100% rename from functions/get-d365activeazurestorageconfig.ps1 rename to d365fo.tools/functions/get-d365activeazurestorageconfig.ps1 diff --git a/functions/get-d365activeenvironmentconfig.ps1 b/d365fo.tools/functions/get-d365activeenvironmentconfig.ps1 similarity index 100% rename from functions/get-d365activeenvironmentconfig.ps1 rename to d365fo.tools/functions/get-d365activeenvironmentconfig.ps1 diff --git a/functions/get-d365aotobjects.ps1 b/d365fo.tools/functions/get-d365aotobjects.ps1 similarity index 100% rename from functions/get-d365aotobjects.ps1 rename to d365fo.tools/functions/get-d365aotobjects.ps1 diff --git a/functions/get-d365azurestorageconfig.ps1 b/d365fo.tools/functions/get-d365azurestorageconfig.ps1 similarity index 100% rename from functions/get-d365azurestorageconfig.ps1 rename to d365fo.tools/functions/get-d365azurestorageconfig.ps1 diff --git a/functions/get-d365azurestoragefile.ps1 b/d365fo.tools/functions/get-d365azurestoragefile.ps1 similarity index 100% rename from functions/get-d365azurestoragefile.ps1 rename to d365fo.tools/functions/get-d365azurestoragefile.ps1 diff --git a/functions/get-d365clickoncetrustprompt.ps1 b/d365fo.tools/functions/get-d365clickoncetrustprompt.ps1 similarity index 100% rename from functions/get-d365clickoncetrustprompt.ps1 rename to d365fo.tools/functions/get-d365clickoncetrustprompt.ps1 diff --git a/functions/get-d365databaseaccess.ps1 b/d365fo.tools/functions/get-d365databaseaccess.ps1 similarity index 100% rename from functions/get-d365databaseaccess.ps1 rename to d365fo.tools/functions/get-d365databaseaccess.ps1 diff --git a/functions/get-d365decryptedconfigfile.ps1 b/d365fo.tools/functions/get-d365decryptedconfigfile.ps1 similarity index 100% rename from functions/get-d365decryptedconfigfile.ps1 rename to d365fo.tools/functions/get-d365decryptedconfigfile.ps1 diff --git a/functions/get-d365dotnetclass.ps1 b/d365fo.tools/functions/get-d365dotnetclass.ps1 similarity index 100% rename from functions/get-d365dotnetclass.ps1 rename to d365fo.tools/functions/get-d365dotnetclass.ps1 diff --git a/functions/get-d365dotnetmethod.ps1 b/d365fo.tools/functions/get-d365dotnetmethod.ps1 similarity index 100% rename from functions/get-d365dotnetmethod.ps1 rename to d365fo.tools/functions/get-d365dotnetmethod.ps1 diff --git a/functions/get-d365environment.ps1 b/d365fo.tools/functions/get-d365environment.ps1 similarity index 100% rename from functions/get-d365environment.ps1 rename to d365fo.tools/functions/get-d365environment.ps1 diff --git a/functions/get-d365environmentconfig.ps1 b/d365fo.tools/functions/get-d365environmentconfig.ps1 similarity index 100% rename from functions/get-d365environmentconfig.ps1 rename to d365fo.tools/functions/get-d365environmentconfig.ps1 diff --git a/functions/get-d365environmentsettings.ps1 b/d365fo.tools/functions/get-d365environmentsettings.ps1 similarity index 100% rename from functions/get-d365environmentsettings.ps1 rename to d365fo.tools/functions/get-d365environmentsettings.ps1 diff --git a/functions/get-d365exposedservice.ps1 b/d365fo.tools/functions/get-d365exposedservice.ps1 similarity index 100% rename from functions/get-d365exposedservice.ps1 rename to d365fo.tools/functions/get-d365exposedservice.ps1 diff --git a/functions/get-d365installedhotfix.ps1 b/d365fo.tools/functions/get-d365installedhotfix.ps1 similarity index 100% rename from functions/get-d365installedhotfix.ps1 rename to d365fo.tools/functions/get-d365installedhotfix.ps1 diff --git a/functions/get-d365installedpackage.ps1 b/d365fo.tools/functions/get-d365installedpackage.ps1 similarity index 100% rename from functions/get-d365installedpackage.ps1 rename to d365fo.tools/functions/get-d365installedpackage.ps1 diff --git a/functions/get-d365installedservice.ps1 b/d365fo.tools/functions/get-d365installedservice.ps1 similarity index 100% rename from functions/get-d365installedservice.ps1 rename to d365fo.tools/functions/get-d365installedservice.ps1 diff --git a/functions/get-d365instancename.ps1 b/d365fo.tools/functions/get-d365instancename.ps1 similarity index 100% rename from functions/get-d365instancename.ps1 rename to d365fo.tools/functions/get-d365instancename.ps1 diff --git a/functions/get-d365label.ps1 b/d365fo.tools/functions/get-d365label.ps1 similarity index 100% rename from functions/get-d365label.ps1 rename to d365fo.tools/functions/get-d365label.ps1 diff --git a/functions/get-d365logicappconfig.ps1 b/d365fo.tools/functions/get-d365logicappconfig.ps1 similarity index 100% rename from functions/get-d365logicappconfig.ps1 rename to d365fo.tools/functions/get-d365logicappconfig.ps1 diff --git a/functions/get-d365offlineauthenticationadminemail.ps1 b/d365fo.tools/functions/get-d365offlineauthenticationadminemail.ps1 similarity index 100% rename from functions/get-d365offlineauthenticationadminemail.ps1 rename to d365fo.tools/functions/get-d365offlineauthenticationadminemail.ps1 diff --git a/functions/get-d365packagebundledetail.ps1 b/d365fo.tools/functions/get-d365packagebundledetail.ps1 similarity index 100% rename from functions/get-d365packagebundledetail.ps1 rename to d365fo.tools/functions/get-d365packagebundledetail.ps1 diff --git a/functions/get-d365packagelabelfile.ps1 b/d365fo.tools/functions/get-d365packagelabelfile.ps1 similarity index 100% rename from functions/get-d365packagelabelfile.ps1 rename to d365fo.tools/functions/get-d365packagelabelfile.ps1 diff --git a/functions/get-d365productinformation.ps1 b/d365fo.tools/functions/get-d365productinformation.ps1 similarity index 100% rename from functions/get-d365productinformation.ps1 rename to d365fo.tools/functions/get-d365productinformation.ps1 diff --git a/functions/get-d365sdpcleanup.ps1 b/d365fo.tools/functions/get-d365sdpcleanup.ps1 similarity index 100% rename from functions/get-d365sdpcleanup.ps1 rename to d365fo.tools/functions/get-d365sdpcleanup.ps1 diff --git a/functions/get-d365table.ps1 b/d365fo.tools/functions/get-d365table.ps1 similarity index 100% rename from functions/get-d365table.ps1 rename to d365fo.tools/functions/get-d365table.ps1 diff --git a/functions/get-d365tablefield.ps1 b/d365fo.tools/functions/get-d365tablefield.ps1 similarity index 100% rename from functions/get-d365tablefield.ps1 rename to d365fo.tools/functions/get-d365tablefield.ps1 diff --git a/functions/get-d365tablesequence.ps1 b/d365fo.tools/functions/get-d365tablesequence.ps1 similarity index 100% rename from functions/get-d365tablesequence.ps1 rename to d365fo.tools/functions/get-d365tablesequence.ps1 diff --git a/functions/get-d365tfsuri.ps1 b/d365fo.tools/functions/get-d365tfsuri.ps1 similarity index 100% rename from functions/get-d365tfsuri.ps1 rename to d365fo.tools/functions/get-d365tfsuri.ps1 diff --git a/functions/get-d365tfsworkspace.ps1 b/d365fo.tools/functions/get-d365tfsworkspace.ps1 similarity index 100% rename from functions/get-d365tfsworkspace.ps1 rename to d365fo.tools/functions/get-d365tfsworkspace.ps1 diff --git a/functions/get-d365url.ps1 b/d365fo.tools/functions/get-d365url.ps1 similarity index 100% rename from functions/get-d365url.ps1 rename to d365fo.tools/functions/get-d365url.ps1 diff --git a/functions/get-d365user.ps1 b/d365fo.tools/functions/get-d365user.ps1 similarity index 96% rename from functions/get-d365user.ps1 rename to d365fo.tools/functions/get-d365user.ps1 index 94e44c17..9123bf89 100644 --- a/functions/get-d365user.ps1 +++ b/d365fo.tools/functions/get-d365user.ps1 @@ -69,7 +69,7 @@ function Get-D365User { $SqlCommand = Get-SqlCommand @SqlParams -TrustedConnection $UseTrustedConnection - $sqlCommand.CommandText = (Get-Content "$script:PSModuleRoot\internal\sql\get-user.sql") -join [Environment]::NewLine + $sqlCommand.CommandText = (Get-Content "$script:ModuleRoot\internal\sql\get-user.sql") -join [Environment]::NewLine $null = $sqlCommand.Parameters.Add("@Email", $Email.Replace("*", "%")) diff --git a/functions/get-d365userauthenticationdetail.ps1 b/d365fo.tools/functions/get-d365userauthenticationdetail.ps1 similarity index 100% rename from functions/get-d365userauthenticationdetail.ps1 rename to d365fo.tools/functions/get-d365userauthenticationdetail.ps1 diff --git a/functions/get-d365windowsactivationstatus.ps1 b/d365fo.tools/functions/get-d365windowsactivationstatus.ps1 similarity index 100% rename from functions/get-d365windowsactivationstatus.ps1 rename to d365fo.tools/functions/get-d365windowsactivationstatus.ps1 diff --git a/functions/import-d365aaduser.ps1 b/d365fo.tools/functions/import-d365aaduser.ps1 similarity index 100% rename from functions/import-d365aaduser.ps1 rename to d365fo.tools/functions/import-d365aaduser.ps1 diff --git a/functions/import-d365bacpac.ps1 b/d365fo.tools/functions/import-d365bacpac.ps1 similarity index 100% rename from functions/import-d365bacpac.ps1 rename to d365fo.tools/functions/import-d365bacpac.ps1 diff --git a/functions/initialize-d365config.ps1 b/d365fo.tools/functions/initialize-d365config.ps1 similarity index 100% rename from functions/initialize-d365config.ps1 rename to d365fo.tools/functions/initialize-d365config.ps1 diff --git a/functions/invoke-d365azurestoragedownload.ps1 b/d365fo.tools/functions/invoke-d365azurestoragedownload.ps1 similarity index 100% rename from functions/invoke-d365azurestoragedownload.ps1 rename to d365fo.tools/functions/invoke-d365azurestoragedownload.ps1 diff --git a/functions/invoke-d365azurestorageupload.ps1 b/d365fo.tools/functions/invoke-d365azurestorageupload.ps1 similarity index 100% rename from functions/invoke-d365azurestorageupload.ps1 rename to d365fo.tools/functions/invoke-d365azurestorageupload.ps1 diff --git a/functions/invoke-d365dataflush.ps1 b/d365fo.tools/functions/invoke-d365dataflush.ps1 similarity index 100% rename from functions/invoke-d365dataflush.ps1 rename to d365fo.tools/functions/invoke-d365dataflush.ps1 diff --git a/functions/invoke-d365dbsync.ps1 b/d365fo.tools/functions/invoke-d365dbsync.ps1 similarity index 100% rename from functions/invoke-d365dbsync.ps1 rename to d365fo.tools/functions/invoke-d365dbsync.ps1 diff --git a/functions/invoke-d365installlicense.ps1 b/d365fo.tools/functions/invoke-d365installlicense.ps1 similarity index 100% rename from functions/invoke-d365installlicense.ps1 rename to d365fo.tools/functions/invoke-d365installlicense.ps1 diff --git a/functions/invoke-d365logicapp.ps1 b/d365fo.tools/functions/invoke-d365logicapp.ps1 similarity index 100% rename from functions/invoke-d365logicapp.ps1 rename to d365fo.tools/functions/invoke-d365logicapp.ps1 diff --git a/functions/invoke-d365modelutil.ps1 b/d365fo.tools/functions/invoke-d365modelutil.ps1 similarity index 100% rename from functions/invoke-d365modelutil.ps1 rename to d365fo.tools/functions/invoke-d365modelutil.ps1 diff --git a/functions/invoke-d365rearmwindows.ps1 b/d365fo.tools/functions/invoke-d365rearmwindows.ps1 similarity index 100% rename from functions/invoke-d365rearmwindows.ps1 rename to d365fo.tools/functions/invoke-d365rearmwindows.ps1 diff --git a/functions/invoke-d365scdpbundleinstall.ps1 b/d365fo.tools/functions/invoke-d365scdpbundleinstall.ps1 similarity index 100% rename from functions/invoke-d365scdpbundleinstall.ps1 rename to d365fo.tools/functions/invoke-d365scdpbundleinstall.ps1 diff --git a/functions/invoke-d365sdpinstall.ps1 b/d365fo.tools/functions/invoke-d365sdpinstall.ps1 similarity index 100% rename from functions/invoke-d365sdpinstall.ps1 rename to d365fo.tools/functions/invoke-d365sdpinstall.ps1 diff --git a/functions/invoke-d365sphelp.ps1 b/d365fo.tools/functions/invoke-d365sphelp.ps1 similarity index 100% rename from functions/invoke-d365sphelp.ps1 rename to d365fo.tools/functions/invoke-d365sphelp.ps1 diff --git a/functions/invoke-d365sysflushaodcache.ps1 b/d365fo.tools/functions/invoke-d365sysflushaodcache.ps1 similarity index 100% rename from functions/invoke-d365sysflushaodcache.ps1 rename to d365fo.tools/functions/invoke-d365sysflushaodcache.ps1 diff --git a/functions/invoke-d365sysrunnerclass.ps1 b/d365fo.tools/functions/invoke-d365sysrunnerclass.ps1 similarity index 100% rename from functions/invoke-d365sysrunnerclass.ps1 rename to d365fo.tools/functions/invoke-d365sysrunnerclass.ps1 diff --git a/functions/invoke-d365tablebrowser.ps1 b/d365fo.tools/functions/invoke-d365tablebrowser.ps1 similarity index 100% rename from functions/invoke-d365tablebrowser.ps1 rename to d365fo.tools/functions/invoke-d365tablebrowser.ps1 diff --git a/functions/new-d365bacpac.ps1 b/d365fo.tools/functions/new-d365bacpac.ps1 similarity index 100% rename from functions/new-d365bacpac.ps1 rename to d365fo.tools/functions/new-d365bacpac.ps1 diff --git a/d365fo.tools/functions/new-d365isvlicense.ps1 b/d365fo.tools/functions/new-d365isvlicense.ps1 new file mode 100644 index 00000000..0954b168 --- /dev/null +++ b/d365fo.tools/functions/new-d365isvlicense.ps1 @@ -0,0 +1,53 @@ +function New-D365ISVLicense { + [CmdletBinding()] + param ( + [Alias('Template')] + [string] $Path = "$Script:BinDirTools\CustomDeployablePackage\ImportISVLicense.zip", + + [Parameter(Mandatory = $true)] + [string] $LicenseFile, + + [string] $OutputPath = "C:\temp\d365fo.tools\ISVLicense.zip" + + ) + + begin { + $oldprogressPreference = $global:progressPreference + $global:progressPreference = 'silentlyContinue' + } + + process { + #if (-not (Test-PathExists -Path $Path, $LicenseFile -Type "Leaf")) {return} + #if ((Test-PathExists -Path $OutputPath -Type "Leaf")){ + # Write-PSFMessage -Level Host -Message "The output file already. Please delete the file or change the desired output path." + # Stop-PSFFunction -Message "Stopping because of errors" + + # $global:progressPreference = $oldprogressPreference + # return + # } + + $Path,$LicenseFile | Unblock-File + + $ExtractionPath = [System.IO.Path]::GetTempPath() + + $packageTemp = Join-Path $ExtractionPath ((Get-Random -Maximum 99999).ToString()) + + Write-PSFMessage -Level Verbose -Message "Extracting the zip file to $packageTemp" -Target $packageTemp + Expand-Archive -Path $Path -DestinationPath $packageTemp + $packageTemp + + $licenseMergePath = Join-Path $packageTemp "AosService\Scripts\License" + + $licenseMergePath + + Get-ChildItem -Path $licenseMergePath | Remove-Item -Force -ErrorAction SilentlyContinue + + Copy-Item -Path $LicenseFile -Destination $licenseMergePath + + Compress-Archive -Path "$packageTemp\*" -DestinationPath $OutputPath + } + + end { + $global:progressPreference = $oldprogressPreference + } +} \ No newline at end of file diff --git a/functions/new-d365topologyfile.ps1 b/d365fo.tools/functions/new-d365topologyfile.ps1 similarity index 100% rename from functions/new-d365topologyfile.ps1 rename to d365fo.tools/functions/new-d365topologyfile.ps1 diff --git a/d365fo.tools/functions/readme.md b/d365fo.tools/functions/readme.md new file mode 100644 index 00000000..119f5ac9 --- /dev/null +++ b/d365fo.tools/functions/readme.md @@ -0,0 +1,7 @@ +# Functions + +This is the folder where the functions go. + +Depending on the complexity of the module, it is recommended to subdivide them into subfolders. + +The module will pick up all .ps1 files recursively \ No newline at end of file diff --git a/functions/remove-d365database.ps1 b/d365fo.tools/functions/remove-d365database.ps1 similarity index 100% rename from functions/remove-d365database.ps1 rename to d365fo.tools/functions/remove-d365database.ps1 diff --git a/functions/remove-d365user.ps1 b/d365fo.tools/functions/remove-d365user.ps1 similarity index 100% rename from functions/remove-d365user.ps1 rename to d365fo.tools/functions/remove-d365user.ps1 diff --git a/functions/rename-d365computername.ps1 b/d365fo.tools/functions/rename-d365computername.ps1 similarity index 100% rename from functions/rename-d365computername.ps1 rename to d365fo.tools/functions/rename-d365computername.ps1 diff --git a/functions/rename-d365instance.ps1 b/d365fo.tools/functions/rename-d365instance.ps1 similarity index 100% rename from functions/rename-d365instance.ps1 rename to d365fo.tools/functions/rename-d365instance.ps1 diff --git a/functions/set-d365activeazurestorageconfig.ps1 b/d365fo.tools/functions/set-d365activeazurestorageconfig.ps1 similarity index 100% rename from functions/set-d365activeazurestorageconfig.ps1 rename to d365fo.tools/functions/set-d365activeazurestorageconfig.ps1 diff --git a/functions/set-d365activeenvironmentconfig.ps1 b/d365fo.tools/functions/set-d365activeenvironmentconfig.ps1 similarity index 100% rename from functions/set-d365activeenvironmentconfig.ps1 rename to d365fo.tools/functions/set-d365activeenvironmentconfig.ps1 diff --git a/functions/set-d365admin.ps1 b/d365fo.tools/functions/set-d365admin.ps1 similarity index 100% rename from functions/set-d365admin.ps1 rename to d365fo.tools/functions/set-d365admin.ps1 diff --git a/functions/set-d365clickoncetrustprompt.ps1 b/d365fo.tools/functions/set-d365clickoncetrustprompt.ps1 similarity index 100% rename from functions/set-d365clickoncetrustprompt.ps1 rename to d365fo.tools/functions/set-d365clickoncetrustprompt.ps1 diff --git a/functions/set-d365favoritebookmark.ps1 b/d365fo.tools/functions/set-d365favoritebookmark.ps1 similarity index 100% rename from functions/set-d365favoritebookmark.ps1 rename to d365fo.tools/functions/set-d365favoritebookmark.ps1 diff --git a/functions/set-d365logicappconfig.ps1 b/d365fo.tools/functions/set-d365logicappconfig.ps1 similarity index 100% rename from functions/set-d365logicappconfig.ps1 rename to d365fo.tools/functions/set-d365logicappconfig.ps1 diff --git a/functions/set-d365offlineauthenticationadminemail.ps1 b/d365fo.tools/functions/set-d365offlineauthenticationadminemail.ps1 similarity index 100% rename from functions/set-d365offlineauthenticationadminemail.ps1 rename to d365fo.tools/functions/set-d365offlineauthenticationadminemail.ps1 diff --git a/functions/set-d365sdpcleanup.ps1 b/d365fo.tools/functions/set-d365sdpcleanup.ps1 similarity index 100% rename from functions/set-d365sdpcleanup.ps1 rename to d365fo.tools/functions/set-d365sdpcleanup.ps1 diff --git a/functions/set-d365startpage.ps1 b/d365fo.tools/functions/set-d365startpage.ps1 similarity index 100% rename from functions/set-d365startpage.ps1 rename to d365fo.tools/functions/set-d365startpage.ps1 diff --git a/functions/set-d365sysadmin.ps1 b/d365fo.tools/functions/set-d365sysadmin.ps1 similarity index 100% rename from functions/set-d365sysadmin.ps1 rename to d365fo.tools/functions/set-d365sysadmin.ps1 diff --git a/functions/set-d365workstationmode.ps1 b/d365fo.tools/functions/set-d365workstationmode.ps1 similarity index 100% rename from functions/set-d365workstationmode.ps1 rename to d365fo.tools/functions/set-d365workstationmode.ps1 diff --git a/functions/start-d365environment.ps1 b/d365fo.tools/functions/start-d365environment.ps1 similarity index 100% rename from functions/start-d365environment.ps1 rename to d365fo.tools/functions/start-d365environment.ps1 diff --git a/functions/stop-d365environment.ps1 b/d365fo.tools/functions/stop-d365environment.ps1 similarity index 100% rename from functions/stop-d365environment.ps1 rename to d365fo.tools/functions/stop-d365environment.ps1 diff --git a/functions/switch-d365activedatabase.ps1 b/d365fo.tools/functions/switch-d365activedatabase.ps1 similarity index 100% rename from functions/switch-d365activedatabase.ps1 rename to d365fo.tools/functions/switch-d365activedatabase.ps1 diff --git a/functions/update-d365user.ps1 b/d365fo.tools/functions/update-d365user.ps1 similarity index 100% rename from functions/update-d365user.ps1 rename to d365fo.tools/functions/update-d365user.ps1 diff --git a/d365fo.tools/internal/configurations/configuration.ps1 b/d365fo.tools/internal/configurations/configuration.ps1 new file mode 100644 index 00000000..dac03dc4 --- /dev/null +++ b/d365fo.tools/internal/configurations/configuration.ps1 @@ -0,0 +1,15 @@ +<# +This is an example configuration file + +By default, it is enough to have a single one of them, +however if you have enough configuration settings to justify having multiple copies of it, +feel totally free to split them into multiple files. +#> + +<# +# Example Configuration +Set-PSFConfig -Module 'd365fo.tools' -Name 'Example.Setting' -Value 10 -Initialize -Validation 'integer' -Handler { } -Description "Example configuration setting. Your module can then use the setting using 'Get-PSFConfigValue'" +#> + +Set-PSFConfig -Module 'd365fo.tools' -Name 'Import.DoDotSource' -Value $false -Initialize -Validation 'bool' -Description "Whether the module files should be dotsourced on import. By default, the files of this module are read as string value and invoked, which is faster but worse on debugging." +Set-PSFConfig -Module 'd365fo.tools' -Name 'Import.IndividualFiles' -Value $false -Initialize -Validation 'bool' -Description "Whether the module files should be imported individually. During the module build, all module code is compiled into few files, which are imported instead by default. Loading the compiled versions is faster, using the individual files is easier for debugging and testing out adjustments." \ No newline at end of file diff --git a/d365fo.tools/internal/configurations/readme.md b/d365fo.tools/internal/configurations/readme.md new file mode 100644 index 00000000..a523e7a2 --- /dev/null +++ b/d365fo.tools/internal/configurations/readme.md @@ -0,0 +1,14 @@ +# Configurations + +Through the `PSFramework` you have a simple method that allows you to ... + + - Publish settings + - With onboard documentation + - Input validation + - Scripts that run on change of settings + - That can be discovered and updated by the user + - That can be administrated by policy & DSC + +The configuration system is a bit too complex to describe in a help file, you can however visit us at http://psframework.org for detailed guidance. + +An example can be seen in the attached ps1 file \ No newline at end of file diff --git a/internal/dll/Microsoft.IdentityModel.Clients.ActiveDirectory.Platform.dll b/d365fo.tools/internal/dll/Microsoft.IdentityModel.Clients.ActiveDirectory.Platform.dll similarity index 100% rename from internal/dll/Microsoft.IdentityModel.Clients.ActiveDirectory.Platform.dll rename to d365fo.tools/internal/dll/Microsoft.IdentityModel.Clients.ActiveDirectory.Platform.dll diff --git a/internal/dll/Microsoft.IdentityModel.Clients.ActiveDirectory.dll b/d365fo.tools/internal/dll/Microsoft.IdentityModel.Clients.ActiveDirectory.dll similarity index 100% rename from internal/dll/Microsoft.IdentityModel.Clients.ActiveDirectory.dll rename to d365fo.tools/internal/dll/Microsoft.IdentityModel.Clients.ActiveDirectory.dll diff --git a/internal/functions/add-aadusersecurity.ps1 b/d365fo.tools/internal/functions/add-aadusersecurity.ps1 similarity index 100% rename from internal/functions/add-aadusersecurity.ps1 rename to d365fo.tools/internal/functions/add-aadusersecurity.ps1 diff --git a/internal/functions/backup-file.ps1 b/d365fo.tools/internal/functions/backup-file.ps1 similarity index 100% rename from internal/functions/backup-file.ps1 rename to d365fo.tools/internal/functions/backup-file.ps1 diff --git a/internal/functions/enum-environmenttype.ps1 b/d365fo.tools/internal/functions/enum-environmenttype.ps1 similarity index 100% rename from internal/functions/enum-environmenttype.ps1 rename to d365fo.tools/internal/functions/enum-environmenttype.ps1 diff --git a/internal/functions/get-applicationenvironment.ps1 b/d365fo.tools/internal/functions/get-applicationenvironment.ps1 similarity index 100% rename from internal/functions/get-applicationenvironment.ps1 rename to d365fo.tools/internal/functions/get-applicationenvironment.ps1 diff --git a/internal/functions/get-azureserviceobjectives.ps1 b/d365fo.tools/internal/functions/get-azureserviceobjectives.ps1 similarity index 100% rename from internal/functions/get-azureserviceobjectives.ps1 rename to d365fo.tools/internal/functions/get-azureserviceobjectives.ps1 diff --git a/internal/functions/get-backupname.ps1 b/d365fo.tools/internal/functions/get-backupname.ps1 similarity index 100% rename from internal/functions/get-backupname.ps1 rename to d365fo.tools/internal/functions/get-backupname.ps1 diff --git a/internal/functions/get-canonicalidentityprovider.ps1 b/d365fo.tools/internal/functions/get-canonicalidentityprovider.ps1 similarity index 100% rename from internal/functions/get-canonicalidentityprovider.ps1 rename to d365fo.tools/internal/functions/get-canonicalidentityprovider.ps1 diff --git a/internal/functions/get-deepclone.ps1 b/d365fo.tools/internal/functions/get-deepclone.ps1 similarity index 100% rename from internal/functions/get-deepclone.ps1 rename to d365fo.tools/internal/functions/get-deepclone.ps1 diff --git a/internal/functions/get-identityprovider.ps1 b/d365fo.tools/internal/functions/get-identityprovider.ps1 similarity index 100% rename from internal/functions/get-identityprovider.ps1 rename to d365fo.tools/internal/functions/get-identityprovider.ps1 diff --git a/internal/functions/get-instanceidentityprovider.ps1 b/d365fo.tools/internal/functions/get-instanceidentityprovider.ps1 similarity index 100% rename from internal/functions/get-instanceidentityprovider.ps1 rename to d365fo.tools/internal/functions/get-instanceidentityprovider.ps1 diff --git a/internal/functions/get-instancevalues.ps1 b/d365fo.tools/internal/functions/get-instancevalues.ps1 similarity index 100% rename from internal/functions/get-instancevalues.ps1 rename to d365fo.tools/internal/functions/get-instancevalues.ps1 diff --git a/internal/functions/get-loginfromemail.ps1 b/d365fo.tools/internal/functions/get-loginfromemail.ps1 similarity index 100% rename from internal/functions/get-loginfromemail.ps1 rename to d365fo.tools/internal/functions/get-loginfromemail.ps1 diff --git a/internal/functions/get-networkdomain.ps1 b/d365fo.tools/internal/functions/get-networkdomain.ps1 similarity index 100% rename from internal/functions/get-networkdomain.ps1 rename to d365fo.tools/internal/functions/get-networkdomain.ps1 diff --git a/internal/functions/get-productinfoprovider.ps1 b/d365fo.tools/internal/functions/get-productinfoprovider.ps1 similarity index 100% rename from internal/functions/get-productinfoprovider.ps1 rename to d365fo.tools/internal/functions/get-productinfoprovider.ps1 diff --git a/internal/functions/get-servicelist.ps1 b/d365fo.tools/internal/functions/get-servicelist.ps1 similarity index 100% rename from internal/functions/get-servicelist.ps1 rename to d365fo.tools/internal/functions/get-servicelist.ps1 diff --git a/internal/functions/get-shellbuffer.ps1 b/d365fo.tools/internal/functions/get-shellbuffer.ps1 similarity index 100% rename from internal/functions/get-shellbuffer.ps1 rename to d365fo.tools/internal/functions/get-shellbuffer.ps1 diff --git a/internal/functions/get-sqlcommand.ps1 b/d365fo.tools/internal/functions/get-sqlcommand.ps1 similarity index 100% rename from internal/functions/get-sqlcommand.ps1 rename to d365fo.tools/internal/functions/get-sqlcommand.ps1 diff --git a/internal/functions/get-tenantfromemail.ps1 b/d365fo.tools/internal/functions/get-tenantfromemail.ps1 similarity index 100% rename from internal/functions/get-tenantfromemail.ps1 rename to d365fo.tools/internal/functions/get-tenantfromemail.ps1 diff --git a/internal/functions/get-usersidfromaad.ps1 b/d365fo.tools/internal/functions/get-usersidfromaad.ps1 similarity index 100% rename from internal/functions/get-usersidfromaad.ps1 rename to d365fo.tools/internal/functions/get-usersidfromaad.ps1 diff --git a/internal/functions/import-aaduserIntod365fo.ps1 b/d365fo.tools/internal/functions/import-aaduserIntod365fo.ps1 similarity index 100% rename from internal/functions/import-aaduserIntod365fo.ps1 rename to d365fo.tools/internal/functions/import-aaduserIntod365fo.ps1 diff --git a/internal/functions/invoke-azurebackuprestore.ps1 b/d365fo.tools/internal/functions/invoke-azurebackuprestore.ps1 similarity index 100% rename from internal/functions/invoke-azurebackuprestore.ps1 rename to d365fo.tools/internal/functions/invoke-azurebackuprestore.ps1 diff --git a/internal/functions/invoke-clearazurespecificobjects.ps1 b/d365fo.tools/internal/functions/invoke-clearazurespecificobjects.ps1 similarity index 100% rename from internal/functions/invoke-clearazurespecificobjects.ps1 rename to d365fo.tools/internal/functions/invoke-clearazurespecificobjects.ps1 diff --git a/internal/functions/invoke-clearsqlspecificobjects.ps1 b/d365fo.tools/internal/functions/invoke-clearsqlspecificobjects.ps1 similarity index 100% rename from internal/functions/invoke-clearsqlspecificobjects.ps1 rename to d365fo.tools/internal/functions/invoke-clearsqlspecificobjects.ps1 diff --git a/internal/functions/invoke-customsqlscript.ps1 b/d365fo.tools/internal/functions/invoke-customsqlscript.ps1 similarity index 100% rename from internal/functions/invoke-customsqlscript.ps1 rename to d365fo.tools/internal/functions/invoke-customsqlscript.ps1 diff --git a/internal/functions/invoke-sqlbackuprestore.ps1 b/d365fo.tools/internal/functions/invoke-sqlbackuprestore.ps1 similarity index 100% rename from internal/functions/invoke-sqlbackuprestore.ps1 rename to d365fo.tools/internal/functions/invoke-sqlbackuprestore.ps1 diff --git a/internal/functions/invoke-sqlpackage.ps1 b/d365fo.tools/internal/functions/invoke-sqlpackage.ps1 similarity index 100% rename from internal/functions/invoke-sqlpackage.ps1 rename to d365fo.tools/internal/functions/invoke-sqlpackage.ps1 diff --git a/internal/functions/invoke-timesignal.ps1 b/d365fo.tools/internal/functions/invoke-timesignal.ps1 similarity index 88% rename from internal/functions/invoke-timesignal.ps1 rename to d365fo.tools/internal/functions/invoke-timesignal.ps1 index 828d20bf..3bb074c9 100644 --- a/internal/functions/invoke-timesignal.ps1 +++ b/d365fo.tools/internal/functions/invoke-timesignal.ps1 @@ -13,7 +13,8 @@ function Invoke-TimeSignal { if($Start.IsPresent) { if($Script:TimeSignals.ContainsKey($Command)) { - Write-PSFMessage -Level Verbose -Message "The command '$Command' is already taking part in time measurement." + Write-PSFMessage -Level Verbose -Message "The command '$Command' was already taking part in time measurement. The entry has been update with current date and time." + $Script:TimeSignals[$Command] = $Time } else{ $Script:TimeSignals.Add($Command, $Time) diff --git a/internal/functions/new-authorizationheader.ps1 b/d365fo.tools/internal/functions/new-authorizationheader.ps1 similarity index 100% rename from internal/functions/new-authorizationheader.ps1 rename to d365fo.tools/internal/functions/new-authorizationheader.ps1 diff --git a/internal/functions/new-d365fouser.ps1 b/d365fo.tools/internal/functions/new-d365fouser.ps1 similarity index 100% rename from internal/functions/new-d365fouser.ps1 rename to d365fo.tools/internal/functions/new-d365fouser.ps1 diff --git a/internal/functions/new-decryptedfile.ps1 b/d365fo.tools/internal/functions/new-decryptedfile.ps1 similarity index 100% rename from internal/functions/new-decryptedfile.ps1 rename to d365fo.tools/internal/functions/new-decryptedfile.ps1 diff --git a/internal/functions/new-webrequest.ps1 b/d365fo.tools/internal/functions/new-webrequest.ps1 similarity index 100% rename from internal/functions/new-webrequest.ps1 rename to d365fo.tools/internal/functions/new-webrequest.ps1 diff --git a/d365fo.tools/internal/functions/readme.md b/d365fo.tools/internal/functions/readme.md new file mode 100644 index 00000000..f65b51bb --- /dev/null +++ b/d365fo.tools/internal/functions/readme.md @@ -0,0 +1,7 @@ +# Functions + +This is the folder where the internal functions go. + +Depending on the complexity of the module, it is recommended to subdivide them into subfolders. + +The module will pick up all .ps1 files recursively \ No newline at end of file diff --git a/internal/functions/rename-configvalue.ps1 b/d365fo.tools/internal/functions/rename-configvalue.ps1 similarity index 100% rename from internal/functions/rename-configvalue.ps1 rename to d365fo.tools/internal/functions/rename-configvalue.ps1 diff --git a/internal/functions/set-adminuser.ps1 b/d365fo.tools/internal/functions/set-adminuser.ps1 similarity index 100% rename from internal/functions/set-adminuser.ps1 rename to d365fo.tools/internal/functions/set-adminuser.ps1 diff --git a/internal/functions/set-azurebacpacvalues.ps1 b/d365fo.tools/internal/functions/set-azurebacpacvalues.ps1 similarity index 100% rename from internal/functions/set-azurebacpacvalues.ps1 rename to d365fo.tools/internal/functions/set-azurebacpacvalues.ps1 diff --git a/internal/functions/set-sqlbacpacvalues.ps1 b/d365fo.tools/internal/functions/set-sqlbacpacvalues.ps1 similarity index 100% rename from internal/functions/set-sqlbacpacvalues.ps1 rename to d365fo.tools/internal/functions/set-sqlbacpacvalues.ps1 diff --git a/internal/functions/test-aaduser.ps1 b/d365fo.tools/internal/functions/test-aaduser.ps1 similarity index 100% rename from internal/functions/test-aaduser.ps1 rename to d365fo.tools/internal/functions/test-aaduser.ps1 diff --git a/internal/functions/test-aaduseridind365fo.ps1 b/d365fo.tools/internal/functions/test-aaduseridind365fo.ps1 similarity index 100% rename from internal/functions/test-aaduseridind365fo.ps1 rename to d365fo.tools/internal/functions/test-aaduseridind365fo.ps1 diff --git a/internal/functions/test-aaduserind365fo.ps1 b/d365fo.tools/internal/functions/test-aaduserind365fo.ps1 similarity index 100% rename from internal/functions/test-aaduserind365fo.ps1 rename to d365fo.tools/internal/functions/test-aaduserind365fo.ps1 diff --git a/internal/functions/test-pathexists.ps1 b/d365fo.tools/internal/functions/test-pathexists.ps1 similarity index 100% rename from internal/functions/test-pathexists.ps1 rename to d365fo.tools/internal/functions/test-pathexists.ps1 diff --git a/internal/functions/test-registryvalue.ps1 b/d365fo.tools/internal/functions/test-registryvalue.ps1 similarity index 100% rename from internal/functions/test-registryvalue.ps1 rename to d365fo.tools/internal/functions/test-registryvalue.ps1 diff --git a/internal/functions/test-trustedconnection.ps1 b/d365fo.tools/internal/functions/test-trustedconnection.ps1 similarity index 100% rename from internal/functions/test-trustedconnection.ps1 rename to d365fo.tools/internal/functions/test-trustedconnection.ps1 diff --git a/internal/functions/update-topologyfile.ps1 b/d365fo.tools/internal/functions/update-topologyfile.ps1 similarity index 100% rename from internal/functions/update-topologyfile.ps1 rename to d365fo.tools/internal/functions/update-topologyfile.ps1 diff --git a/internal/misc/D365FO.url b/d365fo.tools/internal/misc/D365FO.url similarity index 100% rename from internal/misc/D365FO.url rename to d365fo.tools/internal/misc/D365FO.url diff --git a/d365fo.tools/internal/scripts/license.ps1 b/d365fo.tools/internal/scripts/license.ps1 new file mode 100644 index 00000000..18a043d7 --- /dev/null +++ b/d365fo.tools/internal/scripts/license.ps1 @@ -0,0 +1,21 @@ +New-PSFLicense -Product 'd365fo.tools' -Manufacturer 'Motz' -ProductVersion $script:ModuleVersion -ProductType Module -Name MIT -Version "1.0.0.0" -Date (Get-Date "2018-09-20") -Text @" +Copyright (c) 2018 Motz + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +"@ \ No newline at end of file diff --git a/d365fo.tools/internal/scripts/postimport.ps1 b/d365fo.tools/internal/scripts/postimport.ps1 new file mode 100644 index 00000000..f095bdf6 --- /dev/null +++ b/d365fo.tools/internal/scripts/postimport.ps1 @@ -0,0 +1,20 @@ +# Add all things you want to run after importing the main code + +# Load Configurations +foreach ($file in (Get-ChildItem "$ModuleRoot\internal\configurations\*.ps1" -ErrorAction Ignore)) { + . Import-ModuleFile -Path $file.FullName +} + +# Load Tab Expansion +foreach ($file in (Get-ChildItem "$ModuleRoot\internal\tepp\*.tepp.ps1" -ErrorAction Ignore)) { + . Import-ModuleFile -Path $file.FullName +} + +# Load Tab Expansion Assignment +. Import-ModuleFile -Path "$ModuleRoot\internal\tepp\assignment.ps1" + +# Load License +. Import-ModuleFile -Path "$ModuleRoot\internal\scripts\license.ps1" + +# Load Variables +. Import-ModuleFile -Path "$ModuleRoot\internal\scripts\variables.ps1" \ No newline at end of file diff --git a/d365fo.tools/internal/scripts/preimport.ps1 b/d365fo.tools/internal/scripts/preimport.ps1 new file mode 100644 index 00000000..bab892e3 --- /dev/null +++ b/d365fo.tools/internal/scripts/preimport.ps1 @@ -0,0 +1 @@ +# Add all things you want to run before importing the main code \ No newline at end of file diff --git a/d365fo.tools.psm1 b/d365fo.tools/internal/scripts/variables.ps1 similarity index 88% rename from d365fo.tools.psm1 rename to d365fo.tools/internal/scripts/variables.ps1 index b613466f..3628f0ec 100644 --- a/d365fo.tools.psm1 +++ b/d365fo.tools/internal/scripts/variables.ps1 @@ -1,33 +1,4 @@ -$Script:TimeSignals = @{} - -$script:PSModuleRoot = $PSScriptRoot -function Import-ModuleFile { - - [CmdletBinding()] - Param ( - [string] - $Path - ) - if ($doDotSource) { . $Path } - else { $ExecutionContext.InvokeCommand.InvokeScript($false, ([ScriptBlock]::Create([io.file]::ReadAllText($Path))), $null, $null) } -} - -$script:doDotSource = $false -if ($psframework_dotsourcemodule) { $script:doDotSource = $true } -if (($PSVersionTable.PSVersion.Major -lt 6) -or ($PSVersionTable.OS -like "*Windows*")) { - - if ((Get-ItemProperty -Path "HKCU:\SOFTWARE\Microsoft\WindowsPowerShell\PSFramework\System" -Name "DoDotSource" -ErrorAction Ignore).DoDotSource) { $script:doDotSource = $true } -} - -# All internal functions privately available within the tool set -foreach ($function in (Get-ChildItem "$script:PSModuleRoot\internal\functions\*.ps1")) { - . Import-ModuleFile $function.FullName -} - -# All public functions available within the tool set -foreach ($function in (Get-ChildItem "$script:PSModuleRoot\functions\*.ps1")) { - . Import-ModuleFile $function.FullName -} +$Script:TimeSignals = @{} Write-PSFMessage -Level Verbose -Message "Gathering all variables to assist the different cmdlets to function" diff --git a/internal/sql/add-aaduserintod365fo.sql b/d365fo.tools/internal/sql/add-aaduserintod365fo.sql similarity index 100% rename from internal/sql/add-aaduserintod365fo.sql rename to d365fo.tools/internal/sql/add-aaduserintod365fo.sql diff --git a/internal/sql/add-bacpacdatabase.sql b/d365fo.tools/internal/sql/add-bacpacdatabase.sql similarity index 100% rename from internal/sql/add-bacpacdatabase.sql rename to d365fo.tools/internal/sql/add-bacpacdatabase.sql diff --git a/internal/sql/backuprestoredb.sql b/d365fo.tools/internal/sql/backuprestoredb.sql similarity index 100% rename from internal/sql/backuprestoredb.sql rename to d365fo.tools/internal/sql/backuprestoredb.sql diff --git a/internal/sql/checkfornewazuredb.sql b/d365fo.tools/internal/sql/checkfornewazuredb.sql similarity index 100% rename from internal/sql/checkfornewazuredb.sql rename to d365fo.tools/internal/sql/checkfornewazuredb.sql diff --git a/internal/sql/clear-azurebacpacdatabase.sql b/d365fo.tools/internal/sql/clear-azurebacpacdatabase.sql similarity index 100% rename from internal/sql/clear-azurebacpacdatabase.sql rename to d365fo.tools/internal/sql/clear-azurebacpacdatabase.sql diff --git a/internal/sql/clear-sqlbacpacdatabase.sql b/d365fo.tools/internal/sql/clear-sqlbacpacdatabase.sql similarity index 100% rename from internal/sql/clear-sqlbacpacdatabase.sql rename to d365fo.tools/internal/sql/clear-sqlbacpacdatabase.sql diff --git a/internal/sql/disable-user.sql b/d365fo.tools/internal/sql/disable-user.sql similarity index 100% rename from internal/sql/disable-user.sql rename to d365fo.tools/internal/sql/disable-user.sql diff --git a/internal/sql/enable-user.sql b/d365fo.tools/internal/sql/enable-user.sql similarity index 100% rename from internal/sql/enable-user.sql rename to d365fo.tools/internal/sql/enable-user.sql diff --git a/internal/sql/get-alltablefields.sql b/d365fo.tools/internal/sql/get-alltablefields.sql similarity index 100% rename from internal/sql/get-alltablefields.sql rename to d365fo.tools/internal/sql/get-alltablefields.sql diff --git a/internal/sql/get-azureserviceobjective.sql b/d365fo.tools/internal/sql/get-azureserviceobjective.sql similarity index 100% rename from internal/sql/get-azureserviceobjective.sql rename to d365fo.tools/internal/sql/get-azureserviceobjective.sql diff --git a/internal/sql/get-instancevalue.sql b/d365fo.tools/internal/sql/get-instancevalue.sql similarity index 100% rename from internal/sql/get-instancevalue.sql rename to d365fo.tools/internal/sql/get-instancevalue.sql diff --git a/internal/sql/get-tablefields.sql b/d365fo.tools/internal/sql/get-tablefields.sql similarity index 100% rename from internal/sql/get-tablefields.sql rename to d365fo.tools/internal/sql/get-tablefields.sql diff --git a/internal/sql/get-tables.sql b/d365fo.tools/internal/sql/get-tables.sql similarity index 100% rename from internal/sql/get-tables.sql rename to d365fo.tools/internal/sql/get-tables.sql diff --git a/internal/sql/get-tablesequence.sql b/d365fo.tools/internal/sql/get-tablesequence.sql similarity index 100% rename from internal/sql/get-tablesequence.sql rename to d365fo.tools/internal/sql/get-tablesequence.sql diff --git a/internal/sql/get-user.sql b/d365fo.tools/internal/sql/get-user.sql similarity index 100% rename from internal/sql/get-user.sql rename to d365fo.tools/internal/sql/get-user.sql diff --git a/internal/sql/invoke-sphelp.sql b/d365fo.tools/internal/sql/invoke-sphelp.sql similarity index 100% rename from internal/sql/invoke-sphelp.sql rename to d365fo.tools/internal/sql/invoke-sphelp.sql diff --git a/internal/sql/newazuredbfromcopy.sql b/d365fo.tools/internal/sql/newazuredbfromcopy.sql similarity index 100% rename from internal/sql/newazuredbfromcopy.sql rename to d365fo.tools/internal/sql/newazuredbfromcopy.sql diff --git a/internal/sql/remove-database.sql b/d365fo.tools/internal/sql/remove-database.sql similarity index 100% rename from internal/sql/remove-database.sql rename to d365fo.tools/internal/sql/remove-database.sql diff --git a/internal/sql/remove-user.sql b/d365fo.tools/internal/sql/remove-user.sql similarity index 100% rename from internal/sql/remove-user.sql rename to d365fo.tools/internal/sql/remove-user.sql diff --git a/internal/sql/set-aadusersecurityind365fo.sql b/d365fo.tools/internal/sql/set-aadusersecurityind365fo.sql similarity index 100% rename from internal/sql/set-aadusersecurityind365fo.sql rename to d365fo.tools/internal/sql/set-aadusersecurityind365fo.sql diff --git a/internal/sql/set-bacpacvaluesazure.sql b/d365fo.tools/internal/sql/set-bacpacvaluesazure.sql similarity index 100% rename from internal/sql/set-bacpacvaluesazure.sql rename to d365fo.tools/internal/sql/set-bacpacvaluesazure.sql diff --git a/internal/sql/set-bacpacvaluessql.sql b/d365fo.tools/internal/sql/set-bacpacvaluessql.sql similarity index 100% rename from internal/sql/set-bacpacvaluessql.sql rename to d365fo.tools/internal/sql/set-bacpacvaluessql.sql diff --git a/internal/sql/set-sysadmin.sql b/d365fo.tools/internal/sql/set-sysadmin.sql similarity index 100% rename from internal/sql/set-sysadmin.sql rename to d365fo.tools/internal/sql/set-sysadmin.sql diff --git a/internal/sql/switch-database.sql b/d365fo.tools/internal/sql/switch-database.sql similarity index 100% rename from internal/sql/switch-database.sql rename to d365fo.tools/internal/sql/switch-database.sql diff --git a/internal/sql/test-aaduseridind365fo.sql b/d365fo.tools/internal/sql/test-aaduseridind365fo.sql similarity index 100% rename from internal/sql/test-aaduseridind365fo.sql rename to d365fo.tools/internal/sql/test-aaduseridind365fo.sql diff --git a/internal/sql/test-aaduserind365fo.sql b/d365fo.tools/internal/sql/test-aaduserind365fo.sql similarity index 100% rename from internal/sql/test-aaduserind365fo.sql rename to d365fo.tools/internal/sql/test-aaduserind365fo.sql diff --git a/internal/sql/update-user.sql b/d365fo.tools/internal/sql/update-user.sql similarity index 100% rename from internal/sql/update-user.sql rename to d365fo.tools/internal/sql/update-user.sql diff --git a/d365fo.tools/internal/tepp/assignment.ps1 b/d365fo.tools/internal/tepp/assignment.ps1 new file mode 100644 index 00000000..a8cda49f --- /dev/null +++ b/d365fo.tools/internal/tepp/assignment.ps1 @@ -0,0 +1,4 @@ +<# +# Example: +Register-PSFTeppArgumentCompleter -Command Get-Alcohol -Parameter Type -Name d365fo.tools.alcohol +#> \ No newline at end of file diff --git a/d365fo.tools/internal/tepp/example.tepp.ps1 b/d365fo.tools/internal/tepp/example.tepp.ps1 new file mode 100644 index 00000000..398fe52d --- /dev/null +++ b/d365fo.tools/internal/tepp/example.tepp.ps1 @@ -0,0 +1,4 @@ +<# +# Example: +Register-PSFTeppScriptblock -Name "d365fo.tools.alcohol" -ScriptBlock { 'Beer','Mead','Whiskey','Wine','Vodka','Rum (3y)', 'Rum (5y)', 'Rum (7y)' } +#> \ No newline at end of file diff --git a/d365fo.tools/internal/tepp/readme.md b/d365fo.tools/internal/tepp/readme.md new file mode 100644 index 00000000..4166ac81 --- /dev/null +++ b/d365fo.tools/internal/tepp/readme.md @@ -0,0 +1,23 @@ +# Tab Expansion + +## Description + +Modern Tab Expansion was opened to users with the module `Tab Expansion Plus Plus` (TEPP). + +It allows you to define, what options a user is offered when tabbing through input options. This can save a lot of time for the user and is considered a key element in user experience. + +The `PSFramework` offers a simplified way of offering just this, as the two example files show. + +## Concept + +Custom tab completion is defined in two steps: + + - Define a scriptblock that is run when the user hits `TAB` and provides the strings that are his options. + - Assign that scriptblock to the parameter of a command. You can assign the same scriptblock multiple times. + +## Structure + +Import order matters. In order to make things work with the default scaffold, follow those rules: + + - All scriptfiles _defining_ completion scriptblocks like this: `*.tepp.ps1` + - Put all your completion assignments in `assignment.ps1` \ No newline at end of file diff --git a/d365fo.tools/readme.md b/d365fo.tools/readme.md new file mode 100644 index 00000000..d65f7353 --- /dev/null +++ b/d365fo.tools/readme.md @@ -0,0 +1,17 @@ +# PSFModule guidance + +This is a finished module layout optimized for implementing the PSFramework. + +If you don't care to deal with the details, this is what you need to do to get started seeing results: + + - Add the functions you want to publish to `/functions/` + - Update the `FunctionsToExport` node in the module manifest (d365fo.tools.psd1). All functions you want to publish should be in a list. + - Add internal helper functions the user should not see to `/internal/functions/` + + ## Path Warning + + > If you want your module to be compatible with Linux and MacOS, keep in mind that those OS are case sensitive for paths and files. + + `Import-ModuleFile` is preconfigured to resolve the path of the files specified, so it will reliably convert weird path notations the system can't handle. + Content imported through that command thus need not mind the path separator. + If you want to make sure your code too will survive OS-specific path notations, get used to using `Resolve-path` or the more powerful `Resolve-PSFPath`. \ No newline at end of file diff --git a/d365fo.tools/tests/functions/readme.md b/d365fo.tools/tests/functions/readme.md new file mode 100644 index 00000000..0148b05c --- /dev/null +++ b/d365fo.tools/tests/functions/readme.md @@ -0,0 +1,7 @@ +# Description + +This is where the function tests go. + +Make sure to put them in folders reflecting the actual module structure. + +It is not necessary to differentiate between internal and public functions here. \ No newline at end of file diff --git a/d365fo.tools/tests/general/FileIntegrity.Exceptions.ps1 b/d365fo.tools/tests/general/FileIntegrity.Exceptions.ps1 new file mode 100644 index 00000000..7f77a2fe --- /dev/null +++ b/d365fo.tools/tests/general/FileIntegrity.Exceptions.ps1 @@ -0,0 +1,27 @@ +# List of forbidden commands +$global:BannedCommands = @( + 'Write-Host', + 'Write-Verbose', + 'Write-Warning', + 'Write-Error', + 'Write-Output', + 'Write-Information', + 'Write-Debug' +) + +<# + Contains list of exceptions for banned cmdlets. + Insert the file names of files that may contain them. + + Example: + "Write-Host" = @('Write-PSFHostColor.ps1','Write-PSFMessage.ps1') +#> +$global:MayContainCommand = @{ + "Write-Host" = @() + "Write-Verbose" = @() + "Write-Warning" = @() + "Write-Error" = @() + "Write-Output" = @() + "Write-Information" = @() + "Write-Debug" = @() +} \ No newline at end of file diff --git a/d365fo.tools/tests/general/FileIntegrity.Tests.ps1 b/d365fo.tools/tests/general/FileIntegrity.Tests.ps1 new file mode 100644 index 00000000..706c22f9 --- /dev/null +++ b/d365fo.tools/tests/general/FileIntegrity.Tests.ps1 @@ -0,0 +1,90 @@ +$moduleRoot = (Resolve-Path "$PSScriptRoot\..\..").Path + +. "$PSScriptRoot\FileIntegrity.Exceptions.ps1" + +function Get-FileEncoding +{ +<# + .SYNOPSIS + Tests a file for encoding. + + .DESCRIPTION + Tests a file for encoding. + + .PARAMETER Path + The file to test +#> + [CmdletBinding()] + Param ( + [Parameter(Mandatory = $True, ValueFromPipelineByPropertyName = $True)] + [Alias('FullName')] + [string] + $Path + ) + + [byte[]]$byte = get-content -Encoding byte -ReadCount 4 -TotalCount 4 -Path $Path + + if ($byte[0] -eq 0xef -and $byte[1] -eq 0xbb -and $byte[2] -eq 0xbf) { 'UTF8' } + elseif ($byte[0] -eq 0xfe -and $byte[1] -eq 0xff) { 'Unicode' } + elseif ($byte[0] -eq 0 -and $byte[1] -eq 0 -and $byte[2] -eq 0xfe -and $byte[3] -eq 0xff) { 'UTF32' } + elseif ($byte[0] -eq 0x2b -and $byte[1] -eq 0x2f -and $byte[2] -eq 0x76) { 'UTF7' } + else { 'Unknown' } +} + +Describe "Verifying integrity of module files" { + Context "Validating PS1 Script files" { + $allFiles = Get-ChildItem -Path $moduleRoot -Recurse -Filter "*.ps1" | Where-Object FullName -NotLike "$moduleRoot\tests\*" + + foreach ($file in $allFiles) + { + $name = $file.FullName.Replace("$moduleRoot\", '') + + It "[$name] Should have UTF8 encoding" { + Get-FileEncoding -Path $file.FullName | Should -Be 'UTF8' + } + + It "[$name] Should have no trailing space" { + ($file | Select-String "\s$" | Where-Object { $_.Line.Trim().Length -gt 0}).LineNumber | Should -BeNullOrEmpty + } + + $tokens = $null + $parseErrors = $null + $ast = [System.Management.Automation.Language.Parser]::ParseFile($file.FullName, [ref]$tokens, [ref]$parseErrors) + + It "[$name] Should have no syntax errors" { + $parseErrors | Should Be $Null + } + + foreach ($command in $global:BannedCommands) + { + if ($global:MayContainCommand["$command"] -notcontains $file.Name) + { + It "[$name] Should not use $command" { + $tokens | Where-Object Text -EQ $command | Should -BeNullOrEmpty + } + } + } + + It "[$name] Should not contain aliases" { + $tokens | Where-Object TokenFlags -eq CommandName | Where-Object { Test-Path "alias:\$($_.Text)" } | Measure-Object | Select-Object -ExpandProperty Count | Should -Be 0 + } + } + } + + Context "Validating help.txt help files" { + $allFiles = Get-ChildItem -Path $moduleRoot -Recurse -Filter "*.help.txt" | Where-Object FullName -NotLike "$moduleRoot\tests\*" + + foreach ($file in $allFiles) + { + $name = $file.FullName.Replace("$moduleRoot\", '') + + It "[$name] Should have UTF8 encoding" { + Get-FileEncoding -Path $file.FullName | Should -Be 'UTF8' + } + + It "[$name] Should have no trailing space" { + ($file | Select-String "\s$" | Where-Object { $_.Line.Trim().Length -gt 0 } | Measure-Object).Count | Should -Be 0 + } + } + } +} \ No newline at end of file diff --git a/d365fo.tools/tests/general/Help.Exceptions.ps1 b/d365fo.tools/tests/general/Help.Exceptions.ps1 new file mode 100644 index 00000000..cfd941f9 --- /dev/null +++ b/d365fo.tools/tests/general/Help.Exceptions.ps1 @@ -0,0 +1,26 @@ +# List of functions that should be ignored +$global:FunctionHelpTestExceptions = @( + +) + +<# + List of arrayed enumerations. These need to be treated differently. Add full name. + Example: + + "Sqlcollaborative.Dbatools.Connection.ManagementConnectionType[]" +#> +$global:HelpTestEnumeratedArrays = @( + +) + +<# + Some types on parameters just fail their validation no matter what. + For those it becomes possible to skip them, by adding them to this hashtable. + Add by following this convention: = @() + Example: + + "Get-DbaCmObject" = @("DoNotUse") +#> +$global:HelpTestSkipParameterType = @{ + +} diff --git a/d365fo.tools/tests/general/Help.Tests.ps1 b/d365fo.tools/tests/general/Help.Tests.ps1 new file mode 100644 index 00000000..d67ffca8 --- /dev/null +++ b/d365fo.tools/tests/general/Help.Tests.ps1 @@ -0,0 +1,200 @@ +<# + .NOTES + The original test this is based upon was written by June Blender. + After several rounds of modifications it stands now as it is, but the honor remains hers. + + Thank you June, for all you have done! + + .DESCRIPTION + This test evaluates the help for all commands in a module. + + .PARAMETER SkipTest + Disables this test. + + .PARAMETER CommandPath + List of paths under which the script files are stored. + This test assumes that all functions have their own file that is named after themselves. + These paths are used to search for commands that should exist and be tested. + Will search recursively and accepts wildcards, make sure only functions are found + + .PARAMETER ModuleName + Name of the module to be tested. + The module must already be imported + + .PARAMETER ExceptionsFile + File in which exceptions and adjustments are configured. + In it there should be two arrays and a hashtable defined: + $global:FunctionHelpTestExceptions + $global:HelpTestEnumeratedArrays + $global:HelpTestSkipParameterType + These can be used to tweak the tests slightly in cases of need. + See the example file for explanations on each of these usage and effect. +#> +[CmdletBinding()] +Param ( + [switch] + $SkipTest, + + [string[]] + $CommandPath = @("$PSScriptRoot\..\..\functions", "$PSScriptRoot\..\..\internal\functions"), + + [string] + $ModuleName = "d365fo.tools", + + [string] + $ExceptionsFile = "$PSScriptRoot\Help.Exceptions.ps1" +) +if ($SkipTest) { return } +. $ExceptionsFile + +$includedNames = (Get-ChildItem $CommandPath -Recurse -File | Where-Object Name -like "*.ps1").BaseName +$commands = Get-Command -Module (Get-Module $ModuleName) -CommandType Cmdlet, Function, Workflow | Where-Object Name -in $includedNames + +## When testing help, remember that help is cached at the beginning of each session. +## To test, restart session. + + +foreach ($command in $commands) { + $commandName = $command.Name + + # Skip all functions that are on the exclusions list + if ($global:FunctionHelpTestExceptions -contains $commandName) { continue } + + # The module-qualified command fails on Microsoft.PowerShell.Archive cmdlets + $Help = Get-Help $commandName -ErrorAction SilentlyContinue + $testhelperrors = 0 + $testhelpall = 0 + Describe "Test help for $commandName" { + + $testhelpall += 1 + if ($Help.Synopsis -like '*`[``]*') { + # If help is not found, synopsis in auto-generated help is the syntax diagram + It "should not be auto-generated" { + $Help.Synopsis | Should -Not -BeLike '*`[``]*' + } + $testhelperrors += 1 + } + + $testhelpall += 1 + if ([String]::IsNullOrEmpty($Help.Description.Text)) { + # Should be a description for every function + It "gets description for $commandName" { + $Help.Description | Should -Not -BeNullOrEmpty + } + $testhelperrors += 1 + } + + $testhelpall += 1 + if ([String]::IsNullOrEmpty(($Help.Examples.Example | Select-Object -First 1).Code)) { + # Should be at least one example + It "gets example code from $commandName" { + ($Help.Examples.Example | Select-Object -First 1).Code | Should -Not -BeNullOrEmpty + } + $testhelperrors += 1 + } + + $testhelpall += 1 + if ([String]::IsNullOrEmpty(($Help.Examples.Example.Remarks | Select-Object -First 1).Text)) { + # Should be at least one example description + It "gets example help from $commandName" { + ($Help.Examples.Example.Remarks | Select-Object -First 1).Text | Should -Not -BeNullOrEmpty + } + $testhelperrors += 1 + } + + if ($testhelperrors -eq 0) { + It "Ran silently $testhelpall tests" { + $testhelperrors | Should -be 0 + } + } + + $testparamsall = 0 + $testparamserrors = 0 + Context "Test parameter help for $commandName" { + + $Common = 'Debug', 'ErrorAction', 'ErrorVariable', 'InformationAction', 'InformationVariable', 'OutBuffer', 'OutVariable', + 'PipelineVariable', 'Verbose', 'WarningAction', 'WarningVariable' + + $parameters = $command.ParameterSets.Parameters | Sort-Object -Property Name -Unique | Where-Object Name -notin $common + $parameterNames = $parameters.Name + $HelpParameterNames = $Help.Parameters.Parameter.Name | Sort-Object -Unique + foreach ($parameter in $parameters) { + $parameterName = $parameter.Name + $parameterHelp = $Help.parameters.parameter | Where-Object Name -EQ $parameterName + + $testparamsall += 1 + if ([String]::IsNullOrEmpty($parameterHelp.Description.Text)) { + # Should be a description for every parameter + It "gets help for parameter: $parameterName : in $commandName" { + $parameterHelp.Description.Text | Should -Not -BeNullOrEmpty + } + $testparamserrors += 1 + } + + $testparamsall += 1 + $codeMandatory = $parameter.IsMandatory.toString() + if ($parameterHelp.Required -ne $codeMandatory) { + # Required value in Help should match IsMandatory property of parameter + It "help for $parameterName parameter in $commandName has correct Mandatory value" { + $parameterHelp.Required | Should -Be $codeMandatory + } + $testparamserrors += 1 + } + + if ($HelpTestSkipParameterType[$commandName] -contains $parameterName) { continue } + + $codeType = $parameter.ParameterType.Name + + $testparamsall += 1 + if ($parameter.ParameterType.IsEnum) { + # Enumerations often have issues with the typename not being reliably available + $names = $parameter.ParameterType::GetNames($parameter.ParameterType) + if ($parameterHelp.parameterValueGroup.parameterValue -ne $names) { + # Parameter type in Help should match code + It "help for $commandName has correct parameter type for $parameterName" { + $parameterHelp.parameterValueGroup.parameterValue | Should -be $names + } + $testparamserrors += 1 + } + } + elseif ($parameter.ParameterType.FullName -in $HelpTestEnumeratedArrays) { + # Enumerations often have issues with the typename not being reliably available + $names = [Enum]::GetNames($parameter.ParameterType.DeclaredMembers[0].ReturnType) + if ($parameterHelp.parameterValueGroup.parameterValue -ne $names) { + # Parameter type in Help should match code + It "help for $commandName has correct parameter type for $parameterName" { + $parameterHelp.parameterValueGroup.parameterValue | Should -be $names + } + $testparamserrors += 1 + } + } + else { + # To avoid calling Trim method on a null object. + $helpType = if ($parameterHelp.parameterValue) { $parameterHelp.parameterValue.Trim() } + if ($helpType -ne $codeType) { + # Parameter type in Help should match code + It "help for $commandName has correct parameter type for $parameterName" { + $helpType | Should -be $codeType + } + $testparamserrors += 1 + } + } + } + foreach ($helpParm in $HelpParameterNames) { + $testparamsall += 1 + if ($helpParm -notin $parameterNames) { + # Shouldn't find extra parameters in help. + It "finds help parameter in code: $helpParm" { + $helpParm -in $parameterNames | Should -Be $true + } + $testparamserrors += 1 + } + } + if ($testparamserrors -eq 0) { + It "Ran silently $testparamsall tests" { + $testparamserrors | Should -be 0 + } + } + } + } +} \ No newline at end of file diff --git a/d365fo.tools/tests/general/Manifest.Tests.ps1 b/d365fo.tools/tests/general/Manifest.Tests.ps1 new file mode 100644 index 00000000..e9a45d22 --- /dev/null +++ b/d365fo.tools/tests/general/Manifest.Tests.ps1 @@ -0,0 +1,53 @@ +Describe "Validating the module manifest" { + $moduleRoot = (Resolve-Path "$PSScriptRoot\..\..").Path + $manifest = ((Get-Content "$moduleRoot\d365fo.tools.psd1") -join "`n") | Invoke-Expression + [version]$moduleVersion = Get-Item "$moduleRoot\d365fo.tools.psm1" | Select-String -Pattern '\$script:ModuleVersion = "(.*?)"' | ForEach-Object { $_.Matches[0].Groups[1].Value } + Context "Basic resources validation" { + $files = Get-ChildItem "$moduleRoot\functions" -Recurse -File -Filter "*.ps1" + It "Exports all functions in the public folder" { + + $functions = (Compare-Object -ReferenceObject $files.BaseName -DifferenceObject $manifest.FunctionsToExport | Where-Object SideIndicator -Like '<=').InputObject + $functions | Should -BeNullOrEmpty + } + It "Exports no function that isn't also present in the public folder" { + $functions = (Compare-Object -ReferenceObject $files.BaseName -DifferenceObject $manifest.FunctionsToExport | Where-Object SideIndicator -Like '=>').InputObject + $functions | Should -BeNullOrEmpty + } + + It "Exports none of its internal functions" { + $files = Get-ChildItem "$moduleRoot\internal\functions" -Recurse -File -Filter "*.ps1" + $files | Where-Object BaseName -In $manifest.FunctionsToExport | Should -BeNullOrEmpty + } + + It "Has the same version as the psm1 file" { + ([version]$manifest.ModuleVersion) | Should -Be $moduleVersion + } + } + + Context "Individual file validation" { + It "The root module file exists" { + Test-Path "$moduleRoot\$($manifest.RootModule)" | Should -Be $true + } + + foreach ($format in $manifest.FormatsToProcess) + { + It "The file $format should exist" { + Test-Path "$moduleRoot\$format" | Should -Be $true + } + } + + foreach ($type in $manifest.TypesToProcess) + { + It "The file $type should exist" { + Test-Path "$moduleRoot\$type" | Should -Be $true + } + } + + foreach ($assembly in $manifest.RequiredAssemblies) + { + It "The file $assembly should exist" { + Test-Path "$moduleRoot\$assembly" | Should -Be $true + } + } + } +} \ No newline at end of file diff --git a/d365fo.tools/tests/general/PSScriptAnalyzer.Tests.ps1 b/d365fo.tools/tests/general/PSScriptAnalyzer.Tests.ps1 new file mode 100644 index 00000000..3884aece --- /dev/null +++ b/d365fo.tools/tests/general/PSScriptAnalyzer.Tests.ps1 @@ -0,0 +1,42 @@ +[CmdletBinding()] +Param ( + [switch] + $SkipTest, + + [string[]] + $CommandPath = @("$PSScriptRoot\..\..\functions", "$PSScriptRoot\..\..\internal\functions") +) + +if ($SkipTest) { return } + +$list = New-Object System.Collections.ArrayList + +Describe 'Invoking PSScriptAnalyzer against commandbase' { + $commandFiles = Get-ChildItem -Path $CommandPath -Recurse -Filter "*.ps1" + $scriptAnalyzerRules = Get-ScriptAnalyzerRule + + foreach ($file in $commandFiles) + { + Context "Analyzing $($file.BaseName)" { + $analysis = Invoke-ScriptAnalyzer -Path $file.FullName -ExcludeRule PSAvoidTrailingWhitespace, PSShouldProcess + + forEach ($rule in $scriptAnalyzerRules) + { + It "Should pass $rule" { + If ($analysis.RuleName -contains $rule) + { + $analysis | Where-Object RuleName -EQ $rule -outvariable failures | ForEach-Object { $list.Add($_) } + + 1 | Should Be 0 + } + else + { + 0 | Should Be 0 + } + } + } + } + } +} + +$list | Out-Default \ No newline at end of file diff --git a/d365fo.tools/tests/pester.ps1 b/d365fo.tools/tests/pester.ps1 new file mode 100644 index 00000000..0f3e1d2f --- /dev/null +++ b/d365fo.tools/tests/pester.ps1 @@ -0,0 +1,91 @@ +param ( + $TestGeneral = $true, + + $TestFunctions = $true, + + [ValidateSet('None', 'Default', 'Passed', 'Failed', 'Pending', 'Skipped', 'Inconclusive', 'Describe', 'Context', 'Summary', 'Header', 'Fails', 'All')] + $Show = "None", + + $Include = "*", + + $Exclude = "" +) + +Write-PSFMessage -Level Important -Message "Starting Tests" + +Write-PSFMessage -Level Important -Message "Importing Module" + +Remove-Module d365fo.tools -ErrorAction Ignore +Import-Module "$PSScriptRoot\..\d365fo.tools.psd1" +Import-Module "$PSScriptRoot\..\d365fo.tools.psm1" -Force + +Write-PSFMessage -Level Important -Message "Creating test result folder" +$null = New-Item -Path "$PSScriptRoot\..\.." -Name TestResults -ItemType Directory -Force + +$totalFailed = 0 +$totalRun = 0 + +$testresults = @() + +#region Run General Tests +Write-PSFMessage -Level Important -Message "Modules imported, proceeding with general tests" +foreach ($file in (Get-ChildItem "$PSScriptRoot\general" -Filter "*.Tests.ps1")) +{ + Write-PSFMessage -Level Significant -Message " Executing $($file.Name)" + $TestOuputFile = Join-Path "$PSScriptRoot\..\..\TestResults" "TEST-$($file.BaseName).xml" + $results = Invoke-Pester -Script $file.FullName -Show $Show -PassThru -OutputFile $TestOuputFile -OutputFormat NUnitXml + foreach ($result in $results) + { + $totalRun += $result.TotalCount + $totalFailed += $result.FailedCount + $result.TestResult | Where-Object { -not $_.Passed } | ForEach-Object { + $name = $_.Name + $testresults += [pscustomobject]@{ + Describe = $_.Describe + Context = $_.Context + Name = "It $name" + Result = $_.Result + Message = $_.FailureMessage + } + } + } +} +#endregion Run General Tests + +#region Test Commands +Write-PSFMessage -Level Important -Message "Proceeding with individual tests" +foreach ($file in (Get-ChildItem "$PSScriptRoot\functions" -Recurse -File -Filter "*Tests.ps1")) +{ + if ($file.Name -notlike $Include) { continue } + if ($file.Name -like $Exclude) { continue } + + Write-PSFMessage -Level Significant -Message " Executing $($file.Name)" + $TestOuputFile = Join-Path "$PSScriptRoot\..\..\TestResults" "TEST-$($file.BaseName).xml" + $results = Invoke-Pester -Script $file.FullName -Show $Show -PassThru -OutputFile $TestOuputFile -OutputFormat NUnitXml + foreach ($result in $results) + { + $totalRun += $result.TotalCount + $totalFailed += $result.FailedCount + $result.TestResult | Where-Object { -not $_.Passed } | ForEach-Object { + $name = $_.Name + $testresults += [pscustomobject]@{ + Describe = $_.Describe + Context = $_.Context + Name = "It $name" + Result = $_.Result + Message = $_.FailureMessage + } + } + } +} +#endregion Test Commands + +$testresults | Sort-Object Describe, Context, Name, Result, Message | Format-List + +if ($totalFailed -eq 0) { Write-PSFMessage -Level Critical -Message "All $totalRun tests executed without a single failure!" } +else { Write-PSFMessage -Level Critical -Message "$totalFailed tests out of $totalRun tests failed!" } + +if ($totalFailed -gt 0) +{ + throw "$totalFailed / $totalRun tests failed!" +} \ No newline at end of file diff --git a/d365fo.tools/tests/readme.md b/d365fo.tools/tests/readme.md new file mode 100644 index 00000000..bc36928e --- /dev/null +++ b/d365fo.tools/tests/readme.md @@ -0,0 +1,31 @@ +# Description + +This is the folder, where all the tests go. + +Those are subdivided in two categories: + + - General + - Function + +## General Tests + +General tests are function generic and test for general policies. + +These test scan answer questions such as: + + - Is my module following my style guides? + - Does any of my scripts have a syntax error? + - Do my scripts use commands I do not want them to use? + - Do my commands follow best practices? + - Do my commands have proper help? + +Basically, these allow a general module health check. + +These tests are already provided as part of the template. + +## Function Tests + +A healthy module should provide unit and integration tests for the commands & components it ships. +Only then can be guaranteed, that they will actually perform as promised. + +However, as each such test must be specific to the function it tests, there cannot be much in the way of templates. \ No newline at end of file diff --git a/d365fo.tools/xml/d365fo.tools.Format.ps1xml b/d365fo.tools/xml/d365fo.tools.Format.ps1xml new file mode 100644 index 00000000..489acd33 --- /dev/null +++ b/d365fo.tools/xml/d365fo.tools.Format.ps1xml @@ -0,0 +1,31 @@ + + + + + + Foo.Bar + + Foo.Bar + + + + + + + + + + + + Foo + + + Bar + + + + + + + + \ No newline at end of file diff --git a/d365fo.tools/xml/d365fo.tools.Types.ps1xml b/d365fo.tools/xml/d365fo.tools.Types.ps1xml new file mode 100644 index 00000000..f96239de --- /dev/null +++ b/d365fo.tools/xml/d365fo.tools.Types.ps1xml @@ -0,0 +1,37 @@ + + + + + Deserialized.Foo.Bar + + + PSStandardMembers + + + + TargetTypeForDeserialization + + + Foo.Bar + + + + + + + + Foo.Bar + + + SerializationData + + PSFramework.Serialization.SerializationTypeConverter + GetSerializationData + + + + + PSFramework.Serialization.SerializationTypeConverter + + + \ No newline at end of file diff --git a/d365fo.tools/xml/readme.md b/d365fo.tools/xml/readme.md new file mode 100644 index 00000000..d5d7bb0c --- /dev/null +++ b/d365fo.tools/xml/readme.md @@ -0,0 +1,43 @@ +# XML + +This is the folder where project XML files go, notably: + + - Format XML + - Type Extension XML + +External help files should _not_ be placed in this folder! + +## Notes on Files and Naming + +There should be only one format file and one type extension file per project, as importing them has a notable impact on import times. + + - The Format XML should be named `d365fo.tools.Format.ps1xml` + - The Type Extension XML should be named `d365fo.tools.Types.ps1xml` + +## Tools + +### New-PSMDFormatTableDefinition + +This function will take an input object and generate format xml for an auto-sized table. + +It provides a simple way to get started with formats. + +### Get-PSFTypeSerializationData + +``` +C# Warning! +This section is only interest if you're using C# together with PowerShell. +``` + +This function generates type extension XML that allows PowerShell to convert types written in C# to be written to file and restored from it without being 'Deserialized'. Also works for jobs or remoting, if both sides have the `PSFramework` module and type extension loaded. + +In order for a class to be eligible for this, it needs to conform to the following rules: + + - Have the `[Serializable]` attribute + - Be public + - Have an empty constructor + - Allow all public properties/fields to be set (even if setting it doesn't do anything) without throwing an exception. + +``` +non-public properties and fields will be lost in this process! +``` \ No newline at end of file diff --git a/install.ps1 b/install.ps1 new file mode 100644 index 00000000..cebea019 --- /dev/null +++ b/install.ps1 @@ -0,0 +1,2414 @@ +<# + .SYNOPSIS + Installs the d365fo.tools Module from github + + .DESCRIPTION + This script installs the d365fo.tools Module from github. + + It does so by ... + - downloading the specified branch as zip to $env:TEMP + - Unpacking that zip file to a folder in $env:TEMP + - Moving that content to a module folder in either program files (default) or the user profile + + .PARAMETER Branch + The branch to install. Installs master by default. + Unknown branches will terminate the script in error. + + .PARAMETER UserMode + The downloaded module will be moved to the user profile, rather than program files. + + .PARAMETER Force + The install script will overwrite an existing module. +#> +[CmdletBinding()] +Param ( + [string] + $Branch = "master", + + [switch] + $UserMode, + + [switch] + $Force +) + +#region Configuration for cloning script +# Name of the module that is being cloned +$ModuleName = "d365fo.tools" + +# Base path to the github repository +$BaseUrl = "https://github.com//d365fo.tools" + +# If the module is in a subfolder of the cloned repository, specify relative path here. Empty string to skip. +$SubFolder = "d365fo.tools" +#endregion Configuration for cloning script + +#region Utility Functions +function Compress-Archive +{ + <# + .SYNOPSIS + Creates an archive, or zipped file, from specified files and folders. + + .DESCRIPTION + The Compress-Archive cmdlet creates a zipped (or compressed) archive file from one or more specified files or folders. An archive file allows multiple files to be packaged, and optionally compressed, into a single zipped file for easier distribution and storage. An archive file can be compressed by using the compression algorithm specified by the CompressionLevel parameter. + + Because Compress-Archive relies upon the Microsoft .NET Framework API System.IO.Compression.ZipArchive to compress files, the maximum file size that you can compress by using Compress-Archive is currently 2 GB. This is a limitation of the underlying API. + + .PARAMETER Path + Specifies the path or paths to the files that you want to add to the archive zipped file. This parameter can accept wildcard characters. Wildcard characters allow you to add all files in a folder to your zipped archive file. To specify multiple paths, and include files in multiple locations in your output zipped file, use commas to separate the paths. + + .PARAMETER LiteralPath + Specifies the path or paths to the files that you want to add to the archive zipped file. Unlike the Path parameter, the value of LiteralPath is used exactly as it is typed. No characters are interpreted as wildcards. If the path includes escape characters, enclose each escape character in single quotation marks, to instruct Windows PowerShell not to interpret any characters as escape sequences. To specify multiple paths, and include files in multiple locations in your output zipped file, use commas to separate the paths. + + .PARAMETER DestinationPath + Specifies the path to the archive output file. This parameter is required. The specified DestinationPath value should include the desired name of the output zipped file; it specifies either the absolute or relative path to the zipped file. If the file name specified in DestinationPath does not have a .zip file name extension, the cmdlet adds a .zip file name extension. + + .PARAMETER CompressionLevel + Specifies how much compression to apply when you are creating the archive file. Faster compression requires less time to create the file, but can result in larger file sizes. The acceptable values for this parameter are: + + - Fastest. Use the fastest compression method available to decrease processing time; this can result in larger file sizes. + - NoCompression. Do not compress the source files. + - Optimal. Processing time is dependent on file size. + + If this parameter is not specified, the command uses the default value, Optimal. + + .PARAMETER Update + Updates the specified archive by replacing older versions of files in the archive with newer versions of files that have the same names. You can also add this parameter to add files to an existing archive. + + .PARAMETER Force + @{Text=} + + .PARAMETER Confirm + Prompts you for confirmation before running the cmdlet. + + .PARAMETER WhatIf + Shows what would happen if the cmdlet runs. The cmdlet is not run. + + .EXAMPLE + Example 1: Create an archive file + + PS C:\>Compress-Archive -LiteralPath C:\Reference\Draftdoc.docx, C:\Reference\Images\diagram2.vsd -CompressionLevel Optimal -DestinationPath C:\Archives\Draft.Zip + + This command creates a new archive file, Draft.zip, by compressing two files, Draftdoc.docx and diagram2.vsd, specified by the LiteralPath parameter. The compression level specified for this operation is Optimal. + + .EXAMPLE + Example 2: Create an archive with wildcard characters + + PS C:\>Compress-Archive -Path C:\Reference\* -CompressionLevel Fastest -DestinationPath C:\Archives\Draft + + This command creates a new archive file, Draft.zip, in the C:\Archives folder. Note that though the file name extension .zip was not added to the value of the DestinationPath parameter, Windows PowerShell appends this to the specified archive file name automatically. The new archive file contains every file in the C:\Reference folder, because a wildcard character was used in place of specific file names in the Path parameter. The specified compression level is Fastest, which might result in a larger output file, but compresses a large number of files faster. + + .EXAMPLE + Example 3: Update an existing archive file + + PS C:\>Compress-Archive -Path C:\Reference\* -Update -DestinationPath C:\Archives\Draft.Zip + + This command updates an existing archive file, Draft.Zip, in the C:\Archives folder. The command is run to update Draft.Zip with newer versions of existing files that came from the C:\Reference folder, and also to add new files that have been added to C:\Reference since Draft.Zip was initially created. + + .EXAMPLE + Example 4: Create an archive from an entire folder + + PS C:\>Compress-Archive -Path C:\Reference -DestinationPath C:\Archives\Draft + + This command creates an archive from an entire folder, C:\Reference. Note that though the file name extension .zip was not added to the value of the DestinationPath parameter, Windows PowerShell appends this to the specified archive file name automatically. + #> + [CmdletBinding(DefaultParameterSetName = "Path", SupportsShouldProcess = $true, HelpUri = "http://go.microsoft.com/fwlink/?LinkID=393252")] + param + ( + [parameter (mandatory = $true, Position = 0, ParameterSetName = "Path", ValueFromPipeline = $true, ValueFromPipelineByPropertyName = $true)] + [parameter (mandatory = $true, Position = 0, ParameterSetName = "PathWithForce", ValueFromPipeline = $true, ValueFromPipelineByPropertyName = $true)] + [parameter (mandatory = $true, Position = 0, ParameterSetName = "PathWithUpdate", ValueFromPipeline = $true, ValueFromPipelineByPropertyName = $true)] + [ValidateNotNullOrEmpty()] + [string[]] + $Path, + + [parameter (mandatory = $true, ParameterSetName = "LiteralPath", ValueFromPipeline = $false, ValueFromPipelineByPropertyName = $true)] + [parameter (mandatory = $true, ParameterSetName = "LiteralPathWithForce", ValueFromPipeline = $false, ValueFromPipelineByPropertyName = $true)] + [parameter (mandatory = $true, ParameterSetName = "LiteralPathWithUpdate", ValueFromPipeline = $false, ValueFromPipelineByPropertyName = $true)] + [ValidateNotNullOrEmpty()] + [Alias("PSPath")] + [string[]] + $LiteralPath, + + [parameter (mandatory = $true, + Position = 1, + ValueFromPipeline = $false, + ValueFromPipelineByPropertyName = $false)] + [ValidateNotNullOrEmpty()] + [string] + $DestinationPath, + + [parameter ( + mandatory = $false, + ValueFromPipeline = $false, + ValueFromPipelineByPropertyName = $false)] + [ValidateSet("Optimal", "NoCompression", "Fastest")] + [string] + $CompressionLevel = "Optimal", + + [parameter(mandatory = $true, ParameterSetName = "PathWithUpdate", ValueFromPipeline = $false, ValueFromPipelineByPropertyName = $false)] + [parameter(mandatory = $true, ParameterSetName = "LiteralPathWithUpdate", ValueFromPipeline = $false, ValueFromPipelineByPropertyName = $false)] + [switch] + $Update = $false, + + [parameter(mandatory = $true, ParameterSetName = "PathWithForce", ValueFromPipeline = $false, ValueFromPipelineByPropertyName = $false)] + [parameter(mandatory = $true, ParameterSetName = "LiteralPathWithForce", ValueFromPipeline = $false, ValueFromPipelineByPropertyName = $false)] + [switch] + $Force = $false + ) + + BEGIN + { + Add-Type -AssemblyName System.IO.Compression -ErrorAction Ignore + Add-Type -AssemblyName System.IO.Compression.FileSystem -ErrorAction Ignore + + $zipFileExtension = ".zip" + + $LocalizedData = ConvertFrom-StringData @' +PathNotFoundError=The path '{0}' either does not exist or is not a valid file system path. +ExpandArchiveInValidDestinationPath=The path '{0}' is not a valid file system directory path. +InvalidZipFileExtensionError={0} is not a supported archive file format. {1} is the only supported archive file format. +ArchiveFileIsReadOnly=The attributes of the archive file {0} is set to 'ReadOnly' hence it cannot be updated. If you intend to update the existing archive file, remove the 'ReadOnly' attribute on the archive file else use -Force parameter to override and create a new archive file. +ZipFileExistError=The archive file {0} already exists. Use the -Update parameter to update the existing archive file or use the -Force parameter to overwrite the existing archive file. +DuplicatePathFoundError=The input to {0} parameter contains a duplicate path '{1}'. Provide a unique set of paths as input to {2} parameter. +ArchiveFileIsEmpty=The archive file {0} is empty. +CompressProgressBarText=The archive file '{0}' creation is in progress... +ExpandProgressBarText=The archive file '{0}' expansion is in progress... +AppendArchiveFileExtensionMessage=The archive file path '{0}' supplied to the DestinationPath patameter does not include .zip extension. Hence .zip is appended to the supplied DestinationPath path and the archive file would be created at '{1}'. +AddItemtoArchiveFile=Adding '{0}'. +CreateFileAtExpandedPath=Created '{0}'. +InvalidArchiveFilePathError=The archive file path '{0}' specified as input to the {1} parameter is resolving to multiple file system paths. Provide a unique path to the {2} parameter where the archive file has to be created. +InvalidExpandedDirPathError=The directory path '{0}' specified as input to the DestinationPath parameter is resolving to multiple file system paths. Provide a unique path to the Destination parameter where the archive file contents have to be expanded. +FileExistsError=Failed to create file '{0}' while expanding the archive file '{1}' contents as the file '{2}' already exists. Use the -Force parameter if you want to overwrite the existing directory '{3}' contents when expanding the archive file. +DeleteArchiveFile=The partially created archive file '{0}' is deleted as it is not usable. +InvalidDestinationPath=The destination path '{0}' does not contain a valid archive file name. +PreparingToCompressVerboseMessage=Preparing to compress... +PreparingToExpandVerboseMessage=Preparing to expand... +'@ + + #region Utility Functions + function GetResolvedPathHelper + { + param + ( + [string[]] + $path, + + [boolean] + $isLiteralPath, + + [System.Management.Automation.PSCmdlet] + $callerPSCmdlet + ) + + $resolvedPaths = @() + + # null and empty check are are already done on Path parameter at the cmdlet layer. + foreach ($currentPath in $path) + { + try + { + if ($isLiteralPath) + { + $currentResolvedPaths = Resolve-Path -LiteralPath $currentPath -ErrorAction Stop + } + else + { + $currentResolvedPaths = Resolve-Path -Path $currentPath -ErrorAction Stop + } + } + catch + { + $errorMessage = ($LocalizedData.PathNotFoundError -f $currentPath) + $exception = New-Object System.InvalidOperationException $errorMessage, $_.Exception + $errorRecord = CreateErrorRecordHelper "ArchiveCmdletPathNotFound" $null ([System.Management.Automation.ErrorCategory]::InvalidArgument) $exception $currentPath + $callerPSCmdlet.ThrowTerminatingError($errorRecord) + } + + foreach ($currentResolvedPath in $currentResolvedPaths) + { + $resolvedPaths += $currentResolvedPath.ProviderPath + } + } + + $resolvedPaths + } + + function Add-CompressionAssemblies + { + + if ($PSEdition -eq "Desktop") + { + Add-Type -AssemblyName System.IO.Compression + Add-Type -AssemblyName System.IO.Compression.FileSystem + } + } + + function IsValidFileSystemPath + { + param + ( + [string[]] + $path + ) + + $result = $true; + + # null and empty check are are already done on Path parameter at the cmdlet layer. + foreach ($currentPath in $path) + { + if (!([System.IO.File]::Exists($currentPath) -or [System.IO.Directory]::Exists($currentPath))) + { + $errorMessage = ($LocalizedData.PathNotFoundError -f $currentPath) + ThrowTerminatingErrorHelper "PathNotFound" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $currentPath + } + } + + return $result; + } + + + function ValidateDuplicateFileSystemPath + { + param + ( + [string] + $inputParameter, + + [string[]] + $path + ) + + $uniqueInputPaths = @() + + # null and empty check are are already done on Path parameter at the cmdlet layer. + foreach ($currentPath in $path) + { + $currentInputPath = $currentPath.ToUpper() + if ($uniqueInputPaths.Contains($currentInputPath)) + { + $errorMessage = ($LocalizedData.DuplicatePathFoundError -f $inputParameter, $currentPath, $inputParameter) + ThrowTerminatingErrorHelper "DuplicatePathFound" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $currentPath + } + else + { + $uniqueInputPaths += $currentInputPath + } + } + } + + function CompressionLevelMapper + { + param + ( + [string] + $compressionLevel + ) + + $compressionLevelFormat = [System.IO.Compression.CompressionLevel]::Optimal + + # CompressionLevel format is already validated at the cmdlet layer. + switch ($compressionLevel.ToString()) + { + "Fastest" + { + $compressionLevelFormat = [System.IO.Compression.CompressionLevel]::Fastest + } + "NoCompression" + { + $compressionLevelFormat = [System.IO.Compression.CompressionLevel]::NoCompression + } + } + + return $compressionLevelFormat + } + + function CompressArchiveHelper + { + param + ( + [string[]] + $sourcePath, + + [string] + $destinationPath, + + [string] + $compressionLevel, + + [bool] + $isUpdateMode + ) + + $numberOfItemsArchived = 0 + $sourceFilePaths = @() + $sourceDirPaths = @() + + foreach ($currentPath in $sourcePath) + { + $result = Test-Path -LiteralPath $currentPath -PathType Leaf + if ($result -eq $true) + { + $sourceFilePaths += $currentPath + } + else + { + $sourceDirPaths += $currentPath + } + } + + # The Soure Path contains one or more directory (this directory can have files under it) and no files to be compressed. + if ($sourceFilePaths.Count -eq 0 -and $sourceDirPaths.Count -gt 0) + { + $currentSegmentWeight = 100/[double]$sourceDirPaths.Count + $previousSegmentWeight = 0 + foreach ($currentSourceDirPath in $sourceDirPaths) + { + $count = CompressSingleDirHelper $currentSourceDirPath $destinationPath $compressionLevel $true $isUpdateMode $previousSegmentWeight $currentSegmentWeight + $numberOfItemsArchived += $count + $previousSegmentWeight += $currentSegmentWeight + } + } + + # The Soure Path contains only files to be compressed. + elseIf ($sourceFilePaths.Count -gt 0 -and $sourceDirPaths.Count -eq 0) + { + # $previousSegmentWeight is equal to 0 as there are no prior segments. + # $currentSegmentWeight is set to 100 as all files have equal weightage. + $previousSegmentWeight = 0 + $currentSegmentWeight = 100 + + $numberOfItemsArchived = CompressFilesHelper $sourceFilePaths $destinationPath $compressionLevel $isUpdateMode $previousSegmentWeight $currentSegmentWeight + } + # The Soure Path contains one or more files and one or more directories (this directory can have files under it) to be compressed. + elseif ($sourceFilePaths.Count -gt 0 -and $sourceDirPaths.Count -gt 0) + { + # each directory is considered as an individual segments & all the individual files are clubed in to a separate sgemnet. + $currentSegmentWeight = 100/[double]($sourceDirPaths.Count + 1) + $previousSegmentWeight = 0 + + foreach ($currentSourceDirPath in $sourceDirPaths) + { + $count = CompressSingleDirHelper $currentSourceDirPath $destinationPath $compressionLevel $true $isUpdateMode $previousSegmentWeight $currentSegmentWeight + $numberOfItemsArchived += $count + $previousSegmentWeight += $currentSegmentWeight + } + + $count = CompressFilesHelper $sourceFilePaths $destinationPath $compressionLevel $isUpdateMode $previousSegmentWeight $currentSegmentWeight + $numberOfItemsArchived += $count + } + + return $numberOfItemsArchived + } + + function CompressFilesHelper + { + param + ( + [string[]] + $sourceFilePaths, + + [string] + $destinationPath, + + [string] + $compressionLevel, + + [bool] + $isUpdateMode, + + [double] + $previousSegmentWeight, + + [double] + $currentSegmentWeight + ) + + $numberOfItemsArchived = ZipArchiveHelper $sourceFilePaths $destinationPath $compressionLevel $isUpdateMode $null $previousSegmentWeight $currentSegmentWeight + + return $numberOfItemsArchived + } + + function CompressSingleDirHelper + { + param + ( + [string] + $sourceDirPath, + + [string] + $destinationPath, + + [string] + $compressionLevel, + + [bool] + $useParentDirAsRoot, + + [bool] + $isUpdateMode, + + [double] + $previousSegmentWeight, + + [double] + $currentSegmentWeight + ) + + [System.Collections.Generic.List[System.String]]$subDirFiles = @() + + if ($useParentDirAsRoot) + { + $sourceDirInfo = New-Object -TypeName System.IO.DirectoryInfo -ArgumentList $sourceDirPath + $sourceDirFullName = $sourceDirInfo.Parent.FullName + + # If the directory is present at the drive level the DirectoryInfo.Parent include '\' example: C:\ + # On the other hand if the directory exists at a deper level then DirectoryInfo.Parent + # has just the path (without an ending '\'). example C:\source + if ($sourceDirFullName.Length -eq 3) + { + $modifiedSourceDirFullName = $sourceDirFullName + } + else + { + $modifiedSourceDirFullName = $sourceDirFullName + "\" + } + } + else + { + $sourceDirFullName = $sourceDirPath + $modifiedSourceDirFullName = $sourceDirFullName + "\" + } + + $dirContents = Get-ChildItem -LiteralPath $sourceDirPath -Recurse + foreach ($currentContent in $dirContents) + { + $isContainer = $currentContent -is [System.IO.DirectoryInfo] + if (!$isContainer) + { + $subDirFiles.Add($currentContent.FullName) + } + else + { + # The currentContent points to a directory. + # We need to check if the directory is an empty directory, if so such a + # directory has to be explictly added to the archive file. + # if there are no files in the directory the GetFiles() API returns an empty array. + $files = $currentContent.GetFiles() + if ($files.Count -eq 0) + { + $subDirFiles.Add($currentContent.FullName + "\") + } + } + } + + $numberOfItemsArchived = ZipArchiveHelper $subDirFiles.ToArray() $destinationPath $compressionLevel $isUpdateMode $modifiedSourceDirFullName $previousSegmentWeight $currentSegmentWeight + + return $numberOfItemsArchived + } + + function ZipArchiveHelper + { + param + ( + [System.Collections.Generic.List[System.String]] + $sourcePaths, + + [string] + $destinationPath, + + [string] + $compressionLevel, + + [bool] + $isUpdateMode, + + [string] + $modifiedSourceDirFullName, + + [double] + $previousSegmentWeight, + + [double] + $currentSegmentWeight + ) + + $numberOfItemsArchived = 0 + $fileMode = [System.IO.FileMode]::Create + $result = Test-Path -LiteralPath $DestinationPath -PathType Leaf + if ($result -eq $true) + { + $fileMode = [System.IO.FileMode]::Open + } + + Add-CompressionAssemblies + + try + { + # At this point we are sure that the archive file has write access. + $archiveFileStreamArgs = @($destinationPath, $fileMode) + $archiveFileStream = New-Object -TypeName System.IO.FileStream -ArgumentList $archiveFileStreamArgs + + $zipArchiveArgs = @($archiveFileStream, [System.IO.Compression.ZipArchiveMode]::Update, $false) + $zipArchive = New-Object -TypeName System.IO.Compression.ZipArchive -ArgumentList $zipArchiveArgs + + $currentEntryCount = 0 + $progressBarStatus = ($LocalizedData.CompressProgressBarText -f $destinationPath) + $bufferSize = 4kb + $buffer = New-Object Byte[] $bufferSize + + foreach ($currentFilePath in $sourcePaths) + { + if ($modifiedSourceDirFullName -ne $null -and $modifiedSourceDirFullName.Length -gt 0) + { + $index = $currentFilePath.IndexOf($modifiedSourceDirFullName, [System.StringComparison]::OrdinalIgnoreCase) + $currentFilePathSubString = $currentFilePath.Substring($index, $modifiedSourceDirFullName.Length) + $relativeFilePath = $currentFilePath.Replace($currentFilePathSubString, "").Trim() + } + else + { + $relativeFilePath = [System.IO.Path]::GetFileName($currentFilePath) + } + + # Update mode is selected. + # Check to see if archive file already contains one or more zip files in it. + if ($isUpdateMode -eq $true -and $zipArchive.Entries.Count -gt 0) + { + $entryToBeUpdated = $null + + # Check if the file already exists in the archive file. + # If so replace it with new file from the input source. + # If the file does not exist in the archive file then default to + # create mode and create the entry in the archive file. + + foreach ($currentArchiveEntry in $zipArchive.Entries) + { + if ($currentArchiveEntry.FullName -eq $relativeFilePath) + { + $entryToBeUpdated = $currentArchiveEntry + break + } + } + + if ($entryToBeUpdated -ne $null) + { + $addItemtoArchiveFileMessage = ($LocalizedData.AddItemtoArchiveFile -f $currentFilePath) + $entryToBeUpdated.Delete() + } + } + + $compression = CompressionLevelMapper $compressionLevel + + # If a directory needs to be added to an archive file, + # by convention the .Net API's expect the path of the diretcory + # to end with '\' to detect the path as an directory. + if (!$relativeFilePath.EndsWith("\", [StringComparison]::OrdinalIgnoreCase)) + { + try + { + try + { + $currentFileStream = [System.IO.File]::Open($currentFilePath, [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read) + } + catch + { + # Failed to access the file. Write a non terminating error to the pipeline + # and move on with the remaining files. + $exception = $_.Exception + if ($null -ne $_.Exception -and + $null -ne $_.Exception.InnerException) + { + $exception = $_.Exception.InnerException + } + $errorRecord = CreateErrorRecordHelper "CompressArchiveUnauthorizedAccessError" $null ([System.Management.Automation.ErrorCategory]::PermissionDenied) $exception $currentFilePath + Write-Error -ErrorRecord $errorRecord + } + + if ($null -ne $currentFileStream) + { + $srcStream = New-Object System.IO.BinaryReader $currentFileStream + + $currentArchiveEntry = $zipArchive.CreateEntry($relativeFilePath, $compression) + + # Updating the File Creation time so that the same timestamp would be retained after expanding the compressed file. + # At this point we are sure that Get-ChildItem would succeed. + $currentArchiveEntry.LastWriteTime = (Get-Item -LiteralPath $currentFilePath).LastWriteTime + + $destStream = New-Object System.IO.BinaryWriter $currentArchiveEntry.Open() + + while ($numberOfBytesRead = $srcStream.Read($buffer, 0, $bufferSize)) + { + $destStream.Write($buffer, 0, $numberOfBytesRead) + $destStream.Flush() + } + + $numberOfItemsArchived += 1 + $addItemtoArchiveFileMessage = ($LocalizedData.AddItemtoArchiveFile -f $currentFilePath) + } + } + finally + { + If ($null -ne $currentFileStream) + { + $currentFileStream.Dispose() + } + If ($null -ne $srcStream) + { + $srcStream.Dispose() + } + If ($null -ne $destStream) + { + $destStream.Dispose() + } + } + } + else + { + $currentArchiveEntry = $zipArchive.CreateEntry("$relativeFilePath", $compression) + $numberOfItemsArchived += 1 + $addItemtoArchiveFileMessage = ($LocalizedData.AddItemtoArchiveFile -f $currentFilePath) + } + + if ($null -ne $addItemtoArchiveFileMessage) + { + Write-Verbose $addItemtoArchiveFileMessage + } + + $currentEntryCount += 1 + ProgressBarHelper "Compress-Archive" $progressBarStatus $previousSegmentWeight $currentSegmentWeight $sourcePaths.Count $currentEntryCount + } + } + finally + { + If ($null -ne $zipArchive) + { + $zipArchive.Dispose() + } + + If ($null -ne $archiveFileStream) + { + $archiveFileStream.Dispose() + } + + # Complete writing progress. + Write-Progress -Activity "Compress-Archive" -Completed + } + + return $numberOfItemsArchived + } + +<############################################################################################ +# ValidateArchivePathHelper: This is a helper function used to validate the archive file +# path & its file format. The only supported archive file format is .zip +############################################################################################> + function ValidateArchivePathHelper + { + param + ( + [string] + $archiveFile + ) + + if ([System.IO.File]::Exists($archiveFile)) + { + $extension = [system.IO.Path]::GetExtension($archiveFile) + + # Invalid file extension is specifed for the zip file. + if ($extension -ne $zipFileExtension) + { + $errorMessage = ($LocalizedData.InvalidZipFileExtensionError -f $extension, $zipFileExtension) + ThrowTerminatingErrorHelper "NotSupportedArchiveFileExtension" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $extension + } + } + else + { + $errorMessage = ($LocalizedData.PathNotFoundError -f $archiveFile) + ThrowTerminatingErrorHelper "PathNotFound" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $archiveFile + } + } + +<############################################################################################ +# ExpandArchiveHelper: This is a helper function used to expand the archive file contents +# to the specified directory. +############################################################################################> + function ExpandArchiveHelper + { + param + ( + [string] + $archiveFile, + + [string] + $expandedDir, + + [ref] + $expandedItems, + + [boolean] + $force, + + [boolean] + $isVerbose, + + [boolean] + $isConfirm + ) + + Add-CompressionAssemblies + + try + { + # The existance of archive file has already been validated by ValidateArchivePathHelper + # before calling this helper function. + $archiveFileStreamArgs = @($archiveFile, [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read) + $archiveFileStream = New-Object -TypeName System.IO.FileStream -ArgumentList $archiveFileStreamArgs + + $zipArchiveArgs = @($archiveFileStream, [System.IO.Compression.ZipArchiveMode]::Read, $false) + $zipArchive = New-Object -TypeName System.IO.Compression.ZipArchive -ArgumentList $zipArchiveArgs + + if ($zipArchive.Entries.Count -eq 0) + { + $archiveFileIsEmpty = ($LocalizedData.ArchiveFileIsEmpty -f $archiveFile) + Write-Verbose $archiveFileIsEmpty + return + } + + $currentEntryCount = 0 + $progressBarStatus = ($LocalizedData.ExpandProgressBarText -f $archiveFile) + + # The archive entries can either be empty directories or files. + foreach ($currentArchiveEntry in $zipArchive.Entries) + { + $currentArchiveEntryPath = Join-Path -Path $expandedDir -ChildPath $currentArchiveEntry.FullName + $extension = [system.IO.Path]::GetExtension($currentArchiveEntryPath) + + # The current archive entry is an empty directory + # The FullName of the Archive Entry representing a directory would end with a trailing '\'. + if ($extension -eq [string]::Empty -and + $currentArchiveEntryPath.EndsWith("\", [StringComparison]::OrdinalIgnoreCase)) + { + $pathExists = Test-Path -LiteralPath $currentArchiveEntryPath + + # The current archive entry expects an empty directory. + # Check if the existing directory is empty. If its not empty + # then it means that user has added this directory by other means. + if ($pathExists -eq $false) + { + New-Item $currentArchiveEntryPath -ItemType Directory -Confirm:$isConfirm | Out-Null + + if (Test-Path -LiteralPath $currentArchiveEntryPath -PathType Container) + { + $addEmptyDirectorytoExpandedPathMessage = ($LocalizedData.AddItemtoArchiveFile -f $currentArchiveEntryPath) + Write-Verbose $addEmptyDirectorytoExpandedPathMessage + + $expandedItems.Value += $currentArchiveEntryPath + } + } + } + else + { + try + { + $currentArchiveEntryFileInfo = New-Object -TypeName System.IO.FileInfo -ArgumentList $currentArchiveEntryPath + $parentDirExists = Test-Path -LiteralPath $currentArchiveEntryFileInfo.DirectoryName -PathType Container + + # If the Parent directory of the current entry in the archive file does not exist, then create it. + if ($parentDirExists -eq $false) + { + New-Item $currentArchiveEntryFileInfo.DirectoryName -ItemType Directory -Confirm:$isConfirm | Out-Null + + if (!(Test-Path -LiteralPath $currentArchiveEntryFileInfo.DirectoryName -PathType Container)) + { + # The directory referred by $currentArchiveEntryFileInfo.DirectoryName was not successfully created. + # This could be because the user has specified -Confirm paramter when Expand-Archive was invoked + # and authorization was not provided when confirmation was prompted. In such a scenario, + # we skip the current file in the archive and continue with the remaining archive file contents. + Continue + } + + $expandedItems.Value += $currentArchiveEntryFileInfo.DirectoryName + } + + $hasNonTerminatingError = $false + + # Check if the file in to which the current archive entry contents + # would be expanded already exists. + if ($currentArchiveEntryFileInfo.Exists) + { + if ($force) + { + Remove-Item -LiteralPath $currentArchiveEntryFileInfo.FullName -Force -ErrorVariable ev -Verbose:$isVerbose -Confirm:$isConfirm + if ($ev -ne $null) + { + $hasNonTerminatingError = $true + } + + if (Test-Path -LiteralPath $currentArchiveEntryFileInfo.FullName -PathType Leaf) + { + # The file referred by $currentArchiveEntryFileInfo.FullName was not successfully removed. + # This could be because the user has specified -Confirm paramter when Expand-Archive was invoked + # and authorization was not provided when confirmation was prompted. In such a scenario, + # we skip the current file in the archive and continue with the remaining archive file contents. + Continue + } + } + else + { + # Write non-terminating error to the pipeline. + $errorMessage = ($LocalizedData.FileExistsError -f $currentArchiveEntryFileInfo.FullName, $archiveFile, $currentArchiveEntryFileInfo.FullName, $currentArchiveEntryFileInfo.FullName) + $errorRecord = CreateErrorRecordHelper "ExpandArchiveFileExists" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidOperation) $null $currentArchiveEntryFileInfo.FullName + Write-Error -ErrorRecord $errorRecord + $hasNonTerminatingError = $true + } + } + + if (!$hasNonTerminatingError) + { + [System.IO.Compression.ZipFileExtensions]::ExtractToFile($currentArchiveEntry, $currentArchiveEntryPath, $false) + + # Add the expanded file path to the $expandedItems array, + # to keep track of all the expanded files created while expanding the archive file. + # If user enters CTRL + C then at that point of time, all these expanded files + # would be deleted as part of the clean up process. + $expandedItems.Value += $currentArchiveEntryPath + + $addFiletoExpandedPathMessage = ($LocalizedData.CreateFileAtExpandedPath -f $currentArchiveEntryPath) + Write-Verbose $addFiletoExpandedPathMessage + } + } + finally + { + If ($null -ne $destStream) + { + $destStream.Dispose() + } + + If ($null -ne $srcStream) + { + $srcStream.Dispose() + } + } + } + + $currentEntryCount += 1 + # $currentSegmentWeight is Set to 100 giving equal weightage to each file that is getting expanded. + # $previousSegmentWeight is set to 0 as there are no prior segments. + $previousSegmentWeight = 0 + $currentSegmentWeight = 100 + ProgressBarHelper "Expand-Archive" $progressBarStatus $previousSegmentWeight $currentSegmentWeight $zipArchive.Entries.Count $currentEntryCount + } + } + finally + { + If ($null -ne $zipArchive) + { + $zipArchive.Dispose() + } + + If ($null -ne $archiveFileStream) + { + $archiveFileStream.Dispose() + } + + # Complete writing progress. + Write-Progress -Activity "Expand-Archive" -Completed + } + } + +<############################################################################################ +# ProgressBarHelper: This is a helper function used to display progress message. +# This function is used by both Compress-Archive & Expand-Archive to display archive file +# creation/expansion progress. +############################################################################################> + function ProgressBarHelper + { + param + ( + [string] + $cmdletName, + + [string] + $status, + + [double] + $previousSegmentWeight, + + [double] + $currentSegmentWeight, + + [int] + $totalNumberofEntries, + + [int] + $currentEntryCount + ) + + if ($currentEntryCount -gt 0 -and + $totalNumberofEntries -gt 0 -and + $previousSegmentWeight -ge 0 -and + $currentSegmentWeight -gt 0) + { + $entryDefaultWeight = $currentSegmentWeight/[double]$totalNumberofEntries + + $percentComplete = $previousSegmentWeight + ($entryDefaultWeight * $currentEntryCount) + Write-Progress -Activity $cmdletName -Status $status -PercentComplete $percentComplete + } + } + +<############################################################################################ +# CSVHelper: This is a helper function used to append comma after each path specifid by +# the SourcePath array. This helper function is used to display all the user supplied paths +# in the WhatIf message. +############################################################################################> + function CSVHelper + { + param + ( + [string[]] + $sourcePath + ) + + # SourcePath has already been validated by the calling funcation. + if ($sourcePath.Count -gt 1) + { + $sourcePathInCsvFormat = "`n" + for ($currentIndex = 0; $currentIndex -lt $sourcePath.Count; $currentIndex++) + { + if ($currentIndex -eq $sourcePath.Count - 1) + { + $sourcePathInCsvFormat += $sourcePath[$currentIndex] + } + else + { + $sourcePathInCsvFormat += $sourcePath[$currentIndex] + "`n" + } + } + } + else + { + $sourcePathInCsvFormat = $sourcePath + } + + return $sourcePathInCsvFormat + } + +<############################################################################################ +# ThrowTerminatingErrorHelper: This is a helper function used to throw terminating error. +############################################################################################> + function ThrowTerminatingErrorHelper + { + param + ( + [string] + $errorId, + + [string] + $errorMessage, + + [System.Management.Automation.ErrorCategory] + $errorCategory, + + [object] + $targetObject, + + [Exception] + $innerException + ) + + if ($innerException -eq $null) + { + $exception = New-object System.IO.IOException $errorMessage + } + else + { + $exception = New-Object System.IO.IOException $errorMessage, $innerException + } + + $exception = New-Object System.IO.IOException $errorMessage + $errorRecord = New-Object System.Management.Automation.ErrorRecord $exception, $errorId, $errorCategory, $targetObject + $PSCmdlet.ThrowTerminatingError($errorRecord) + } + +<############################################################################################ +# CreateErrorRecordHelper: This is a helper function used to create an ErrorRecord +############################################################################################> + function CreateErrorRecordHelper + { + param + ( + [string] + $errorId, + + [string] + $errorMessage, + + [System.Management.Automation.ErrorCategory] + $errorCategory, + + [Exception] + $exception, + + [object] + $targetObject + ) + + if ($null -eq $exception) + { + $exception = New-Object System.IO.IOException $errorMessage + } + + $errorRecord = New-Object System.Management.Automation.ErrorRecord $exception, $errorId, $errorCategory, $targetObject + return $errorRecord + } + #endregion Utility Functions + + $inputPaths = @() + $destinationParentDir = [system.IO.Path]::GetDirectoryName($DestinationPath) + if ($null -eq $destinationParentDir) + { + $errorMessage = ($LocalizedData.InvalidDestinationPath -f $DestinationPath) + ThrowTerminatingErrorHelper "InvalidArchiveFilePath" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $DestinationPath + } + + if ($destinationParentDir -eq [string]::Empty) + { + $destinationParentDir = '.' + } + + $achiveFileName = [system.IO.Path]::GetFileName($DestinationPath) + $destinationParentDir = GetResolvedPathHelper $destinationParentDir $false $PSCmdlet + + if ($destinationParentDir.Count -gt 1) + { + $errorMessage = ($LocalizedData.InvalidArchiveFilePathError -f $DestinationPath, "DestinationPath", "DestinationPath") + ThrowTerminatingErrorHelper "InvalidArchiveFilePath" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $DestinationPath + } + + IsValidFileSystemPath $destinationParentDir | Out-Null + $DestinationPath = Join-Path -Path $destinationParentDir -ChildPath $achiveFileName + + # GetExtension API does not validate for the actual existance of the path. + $extension = [system.IO.Path]::GetExtension($DestinationPath) + + # If user does not specify .Zip extension, we append it. + If ($extension -eq [string]::Empty) + { + $DestinationPathWithOutExtension = $DestinationPath + $DestinationPath = $DestinationPathWithOutExtension + $zipFileExtension + $appendArchiveFileExtensionMessage = ($LocalizedData.AppendArchiveFileExtensionMessage -f $DestinationPathWithOutExtension, $DestinationPath) + Write-Verbose $appendArchiveFileExtensionMessage + } + else + { + # Invalid file extension is specified for the zip file to be created. + if ($extension -ne $zipFileExtension) + { + $errorMessage = ($LocalizedData.InvalidZipFileExtensionError -f $extension, $zipFileExtension) + ThrowTerminatingErrorHelper "NotSupportedArchiveFileExtension" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $extension + } + } + + $archiveFileExist = Test-Path -LiteralPath $DestinationPath -PathType Leaf + + if ($archiveFileExist -and ($Update -eq $false -and $Force -eq $false)) + { + $errorMessage = ($LocalizedData.ZipFileExistError -f $DestinationPath) + ThrowTerminatingErrorHelper "ArchiveFileExists" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $DestinationPath + } + + # If archive file already exists and if -Update is specified, then we check to see + # if we have write access permission to update the existing archive file. + if ($archiveFileExist -and $Update -eq $true) + { + $item = Get-Item -Path $DestinationPath + if ($item.Attributes.ToString().Contains("ReadOnly")) + { + $errorMessage = ($LocalizedData.ArchiveFileIsReadOnly -f $DestinationPath) + ThrowTerminatingErrorHelper "ArchiveFileIsReadOnly" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidOperation) $DestinationPath + } + } + + $isWhatIf = $psboundparameters.ContainsKey("WhatIf") + if (!$isWhatIf) + { + $preparingToCompressVerboseMessage = ($LocalizedData.PreparingToCompressVerboseMessage) + Write-Verbose $preparingToCompressVerboseMessage + + $progressBarStatus = ($LocalizedData.CompressProgressBarText -f $DestinationPath) + ProgressBarHelper "Compress-Archive" $progressBarStatus 0 100 100 1 + } + } + PROCESS + { + if ($PsCmdlet.ParameterSetName -eq "Path" -or + $PsCmdlet.ParameterSetName -eq "PathWithForce" -or + $PsCmdlet.ParameterSetName -eq "PathWithUpdate") + { + $inputPaths += $Path + } + + if ($PsCmdlet.ParameterSetName -eq "LiteralPath" -or + $PsCmdlet.ParameterSetName -eq "LiteralPathWithForce" -or + $PsCmdlet.ParameterSetName -eq "LiteralPathWithUpdate") + { + $inputPaths += $LiteralPath + } + } + END + { + # If archive file already exists and if -Force is specified, we delete the + # existing artchive file and create a brand new one. + if (($PsCmdlet.ParameterSetName -eq "PathWithForce" -or + $PsCmdlet.ParameterSetName -eq "LiteralPathWithForce") -and $archiveFileExist) + { + Remove-Item -Path $DestinationPath -Force -ErrorAction Stop + } + + # Validate Source Path depeding on parameter set being used. + # The specified source path conatins one or more files or directories that needs + # to be compressed. + $isLiteralPathUsed = $false + if ($PsCmdlet.ParameterSetName -eq "LiteralPath" -or + $PsCmdlet.ParameterSetName -eq "LiteralPathWithForce" -or + $PsCmdlet.ParameterSetName -eq "LiteralPathWithUpdate") + { + $isLiteralPathUsed = $true + } + + ValidateDuplicateFileSystemPath $PsCmdlet.ParameterSetName $inputPaths + $resolvedPaths = GetResolvedPathHelper $inputPaths $isLiteralPathUsed $PSCmdlet + IsValidFileSystemPath $resolvedPaths | Out-Null + + $sourcePath = $resolvedPaths; + + # CSVHelper: This is a helper function used to append comma after each path specifid by + # the $sourcePath array. The comma saperated paths are displayed in the -WhatIf message. + $sourcePathInCsvFormat = CSVHelper $sourcePath + if ($pscmdlet.ShouldProcess($sourcePathInCsvFormat)) + { + try + { + # StopProcessing is not avaliable in Script cmdlets. However the pipleline execution + # is terminated when ever 'CTRL + C' is entered by user to terminate the cmdlet execution. + # The finally block is executed whenever pipleline is terminated. + # $isArchiveFileProcessingComplete variable is used to track if 'CTRL + C' is entered by the + # user. + $isArchiveFileProcessingComplete = $false + + $numberOfItemsArchived = CompressArchiveHelper $sourcePath $DestinationPath $CompressionLevel $Update + + $isArchiveFileProcessingComplete = $true + } + finally + { + # The $isArchiveFileProcessingComplete would be set to $false if user has typed 'CTRL + C' to + # terminate the cmdlet execution or if an unhandled exception is thrown. + # $numberOfItemsArchived contains the count of number of files or directories add to the archive file. + # If the newly created archive file is empty then we delete it as its not usable. + if (($isArchiveFileProcessingComplete -eq $false) -or + ($numberOfItemsArchived -eq 0)) + { + $DeleteArchiveFileMessage = ($LocalizedData.DeleteArchiveFile -f $DestinationPath) + Write-Verbose $DeleteArchiveFileMessage + + # delete the partial archive file created. + if (Test-Path $DestinationPath) + { + Remove-Item -LiteralPath $DestinationPath -Force -Recurse -ErrorAction SilentlyContinue + } + } + } + } + } +} + +function Expand-Archive +{ + <# + .SYNOPSIS + Extracts files from a specified archive (zipped) file. + + .DESCRIPTION + The Expand-Archive cmdlet extracts files from a specified zipped archive file to a specified destination folder. An archive file allows multiple files to be packaged, and optionally compressed, into a single zipped file for easier distribution and storage. + + .PARAMETER Path + Specifies the path to the archive file. + + .PARAMETER LiteralPath + Specifies the path to an archive file. Unlike the Path parameter, the value of LiteralPath is used exactly as it is typed. Wildcard characters are not supported. If the path includes escape characters, enclose each escape character in single quotation marks, to instruct Windows PowerShell not to interpret any characters as escape sequences. + + .PARAMETER DestinationPath + Specifies the path to the folder in which you want the command to save extracted files. Enter the path to a folder, but do not specify a file name or file name extension. This parameter is required. + + .PARAMETER Force + Forces the command to run without asking for user confirmation. + + .PARAMETER Confirm + Prompts you for confirmation before running the cmdlet. + + .PARAMETER WhatIf + Shows what would happen if the cmdlet runs. The cmdlet is not run. + + .EXAMPLE + Example 1: Extract the contents of an archive + + PS C:\>Expand-Archive -LiteralPath C:\Archives\Draft.Zip -DestinationPath C:\Reference + + This command extracts the contents of an existing archive file, Draft.zip, into the folder specified by the DestinationPath parameter, C:\Reference. + + .EXAMPLE + Example 2: Extract the contents of an archive in the current folder + + PS C:\>Expand-Archive -Path Draft.Zip -DestinationPath C:\Reference + + This command extracts the contents of an existing archive file in the current folder, Draft.zip, into the folder specified by the DestinationPath parameter, C:\Reference. + #> + [CmdletBinding( + DefaultParameterSetName = "Path", + SupportsShouldProcess = $true, + HelpUri = "http://go.microsoft.com/fwlink/?LinkID=393253")] + param + ( + [parameter ( + mandatory = $true, + Position = 0, + ParameterSetName = "Path", + ValueFromPipeline = $true, + ValueFromPipelineByPropertyName = $true)] + [ValidateNotNullOrEmpty()] + [string] + $Path, + + [parameter ( + mandatory = $true, + ParameterSetName = "LiteralPath", + ValueFromPipelineByPropertyName = $true)] + [ValidateNotNullOrEmpty()] + [Alias("PSPath")] + [string] + $LiteralPath, + + [parameter (mandatory = $false, + Position = 1, + ValueFromPipeline = $false, + ValueFromPipelineByPropertyName = $false)] + [ValidateNotNullOrEmpty()] + [string] + $DestinationPath, + + [parameter (mandatory = $false, + ValueFromPipeline = $false, + ValueFromPipelineByPropertyName = $false)] + [switch] + $Force + ) + + BEGIN + { + Add-Type -AssemblyName System.IO.Compression -ErrorAction Ignore + Add-Type -AssemblyName System.IO.Compression.FileSystem -ErrorAction Ignore + + $zipFileExtension = ".zip" + + $LocalizedData = ConvertFrom-StringData @' +PathNotFoundError=The path '{0}' either does not exist or is not a valid file system path. +ExpandArchiveInValidDestinationPath=The path '{0}' is not a valid file system directory path. +InvalidZipFileExtensionError={0} is not a supported archive file format. {1} is the only supported archive file format. +ArchiveFileIsReadOnly=The attributes of the archive file {0} is set to 'ReadOnly' hence it cannot be updated. If you intend to update the existing archive file, remove the 'ReadOnly' attribute on the archive file else use -Force parameter to override and create a new archive file. +ZipFileExistError=The archive file {0} already exists. Use the -Update parameter to update the existing archive file or use the -Force parameter to overwrite the existing archive file. +DuplicatePathFoundError=The input to {0} parameter contains a duplicate path '{1}'. Provide a unique set of paths as input to {2} parameter. +ArchiveFileIsEmpty=The archive file {0} is empty. +CompressProgressBarText=The archive file '{0}' creation is in progress... +ExpandProgressBarText=The archive file '{0}' expansion is in progress... +AppendArchiveFileExtensionMessage=The archive file path '{0}' supplied to the DestinationPath patameter does not include .zip extension. Hence .zip is appended to the supplied DestinationPath path and the archive file would be created at '{1}'. +AddItemtoArchiveFile=Adding '{0}'. +CreateFileAtExpandedPath=Created '{0}'. +InvalidArchiveFilePathError=The archive file path '{0}' specified as input to the {1} parameter is resolving to multiple file system paths. Provide a unique path to the {2} parameter where the archive file has to be created. +InvalidExpandedDirPathError=The directory path '{0}' specified as input to the DestinationPath parameter is resolving to multiple file system paths. Provide a unique path to the Destination parameter where the archive file contents have to be expanded. +FileExistsError=Failed to create file '{0}' while expanding the archive file '{1}' contents as the file '{2}' already exists. Use the -Force parameter if you want to overwrite the existing directory '{3}' contents when expanding the archive file. +DeleteArchiveFile=The partially created archive file '{0}' is deleted as it is not usable. +InvalidDestinationPath=The destination path '{0}' does not contain a valid archive file name. +PreparingToCompressVerboseMessage=Preparing to compress... +PreparingToExpandVerboseMessage=Preparing to expand... +'@ + + #region Utility Functions + function GetResolvedPathHelper + { + param + ( + [string[]] + $path, + + [boolean] + $isLiteralPath, + + [System.Management.Automation.PSCmdlet] + $callerPSCmdlet + ) + + $resolvedPaths = @() + + # null and empty check are are already done on Path parameter at the cmdlet layer. + foreach ($currentPath in $path) + { + try + { + if ($isLiteralPath) + { + $currentResolvedPaths = Resolve-Path -LiteralPath $currentPath -ErrorAction Stop + } + else + { + $currentResolvedPaths = Resolve-Path -Path $currentPath -ErrorAction Stop + } + } + catch + { + $errorMessage = ($LocalizedData.PathNotFoundError -f $currentPath) + $exception = New-Object System.InvalidOperationException $errorMessage, $_.Exception + $errorRecord = CreateErrorRecordHelper "ArchiveCmdletPathNotFound" $null ([System.Management.Automation.ErrorCategory]::InvalidArgument) $exception $currentPath + $callerPSCmdlet.ThrowTerminatingError($errorRecord) + } + + foreach ($currentResolvedPath in $currentResolvedPaths) + { + $resolvedPaths += $currentResolvedPath.ProviderPath + } + } + + $resolvedPaths + } + + function Add-CompressionAssemblies + { + + if ($PSEdition -eq "Desktop") + { + Add-Type -AssemblyName System.IO.Compression + Add-Type -AssemblyName System.IO.Compression.FileSystem + } + } + + function IsValidFileSystemPath + { + param + ( + [string[]] + $path + ) + + $result = $true; + + # null and empty check are are already done on Path parameter at the cmdlet layer. + foreach ($currentPath in $path) + { + if (!([System.IO.File]::Exists($currentPath) -or [System.IO.Directory]::Exists($currentPath))) + { + $errorMessage = ($LocalizedData.PathNotFoundError -f $currentPath) + ThrowTerminatingErrorHelper "PathNotFound" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $currentPath + } + } + + return $result; + } + + + function ValidateDuplicateFileSystemPath + { + param + ( + [string] + $inputParameter, + + [string[]] + $path + ) + + $uniqueInputPaths = @() + + # null and empty check are are already done on Path parameter at the cmdlet layer. + foreach ($currentPath in $path) + { + $currentInputPath = $currentPath.ToUpper() + if ($uniqueInputPaths.Contains($currentInputPath)) + { + $errorMessage = ($LocalizedData.DuplicatePathFoundError -f $inputParameter, $currentPath, $inputParameter) + ThrowTerminatingErrorHelper "DuplicatePathFound" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $currentPath + } + else + { + $uniqueInputPaths += $currentInputPath + } + } + } + + function CompressionLevelMapper + { + param + ( + [string] + $compressionLevel + ) + + $compressionLevelFormat = [System.IO.Compression.CompressionLevel]::Optimal + + # CompressionLevel format is already validated at the cmdlet layer. + switch ($compressionLevel.ToString()) + { + "Fastest" + { + $compressionLevelFormat = [System.IO.Compression.CompressionLevel]::Fastest + } + "NoCompression" + { + $compressionLevelFormat = [System.IO.Compression.CompressionLevel]::NoCompression + } + } + + return $compressionLevelFormat + } + + function CompressArchiveHelper + { + param + ( + [string[]] + $sourcePath, + + [string] + $destinationPath, + + [string] + $compressionLevel, + + [bool] + $isUpdateMode + ) + + $numberOfItemsArchived = 0 + $sourceFilePaths = @() + $sourceDirPaths = @() + + foreach ($currentPath in $sourcePath) + { + $result = Test-Path -LiteralPath $currentPath -PathType Leaf + if ($result -eq $true) + { + $sourceFilePaths += $currentPath + } + else + { + $sourceDirPaths += $currentPath + } + } + + # The Soure Path contains one or more directory (this directory can have files under it) and no files to be compressed. + if ($sourceFilePaths.Count -eq 0 -and $sourceDirPaths.Count -gt 0) + { + $currentSegmentWeight = 100/[double]$sourceDirPaths.Count + $previousSegmentWeight = 0 + foreach ($currentSourceDirPath in $sourceDirPaths) + { + $count = CompressSingleDirHelper $currentSourceDirPath $destinationPath $compressionLevel $true $isUpdateMode $previousSegmentWeight $currentSegmentWeight + $numberOfItemsArchived += $count + $previousSegmentWeight += $currentSegmentWeight + } + } + + # The Soure Path contains only files to be compressed. + elseIf ($sourceFilePaths.Count -gt 0 -and $sourceDirPaths.Count -eq 0) + { + # $previousSegmentWeight is equal to 0 as there are no prior segments. + # $currentSegmentWeight is set to 100 as all files have equal weightage. + $previousSegmentWeight = 0 + $currentSegmentWeight = 100 + + $numberOfItemsArchived = CompressFilesHelper $sourceFilePaths $destinationPath $compressionLevel $isUpdateMode $previousSegmentWeight $currentSegmentWeight + } + # The Soure Path contains one or more files and one or more directories (this directory can have files under it) to be compressed. + elseif ($sourceFilePaths.Count -gt 0 -and $sourceDirPaths.Count -gt 0) + { + # each directory is considered as an individual segments & all the individual files are clubed in to a separate sgemnet. + $currentSegmentWeight = 100/[double]($sourceDirPaths.Count + 1) + $previousSegmentWeight = 0 + + foreach ($currentSourceDirPath in $sourceDirPaths) + { + $count = CompressSingleDirHelper $currentSourceDirPath $destinationPath $compressionLevel $true $isUpdateMode $previousSegmentWeight $currentSegmentWeight + $numberOfItemsArchived += $count + $previousSegmentWeight += $currentSegmentWeight + } + + $count = CompressFilesHelper $sourceFilePaths $destinationPath $compressionLevel $isUpdateMode $previousSegmentWeight $currentSegmentWeight + $numberOfItemsArchived += $count + } + + return $numberOfItemsArchived + } + + function CompressFilesHelper + { + param + ( + [string[]] + $sourceFilePaths, + + [string] + $destinationPath, + + [string] + $compressionLevel, + + [bool] + $isUpdateMode, + + [double] + $previousSegmentWeight, + + [double] + $currentSegmentWeight + ) + + $numberOfItemsArchived = ZipArchiveHelper $sourceFilePaths $destinationPath $compressionLevel $isUpdateMode $null $previousSegmentWeight $currentSegmentWeight + + return $numberOfItemsArchived + } + + function CompressSingleDirHelper + { + param + ( + [string] + $sourceDirPath, + + [string] + $destinationPath, + + [string] + $compressionLevel, + + [bool] + $useParentDirAsRoot, + + [bool] + $isUpdateMode, + + [double] + $previousSegmentWeight, + + [double] + $currentSegmentWeight + ) + + [System.Collections.Generic.List[System.String]]$subDirFiles = @() + + if ($useParentDirAsRoot) + { + $sourceDirInfo = New-Object -TypeName System.IO.DirectoryInfo -ArgumentList $sourceDirPath + $sourceDirFullName = $sourceDirInfo.Parent.FullName + + # If the directory is present at the drive level the DirectoryInfo.Parent include '\' example: C:\ + # On the other hand if the directory exists at a deper level then DirectoryInfo.Parent + # has just the path (without an ending '\'). example C:\source + if ($sourceDirFullName.Length -eq 3) + { + $modifiedSourceDirFullName = $sourceDirFullName + } + else + { + $modifiedSourceDirFullName = $sourceDirFullName + "\" + } + } + else + { + $sourceDirFullName = $sourceDirPath + $modifiedSourceDirFullName = $sourceDirFullName + "\" + } + + $dirContents = Get-ChildItem -LiteralPath $sourceDirPath -Recurse + foreach ($currentContent in $dirContents) + { + $isContainer = $currentContent -is [System.IO.DirectoryInfo] + if (!$isContainer) + { + $subDirFiles.Add($currentContent.FullName) + } + else + { + # The currentContent points to a directory. + # We need to check if the directory is an empty directory, if so such a + # directory has to be explictly added to the archive file. + # if there are no files in the directory the GetFiles() API returns an empty array. + $files = $currentContent.GetFiles() + if ($files.Count -eq 0) + { + $subDirFiles.Add($currentContent.FullName + "\") + } + } + } + + $numberOfItemsArchived = ZipArchiveHelper $subDirFiles.ToArray() $destinationPath $compressionLevel $isUpdateMode $modifiedSourceDirFullName $previousSegmentWeight $currentSegmentWeight + + return $numberOfItemsArchived + } + + function ZipArchiveHelper + { + param + ( + [System.Collections.Generic.List[System.String]] + $sourcePaths, + + [string] + $destinationPath, + + [string] + $compressionLevel, + + [bool] + $isUpdateMode, + + [string] + $modifiedSourceDirFullName, + + [double] + $previousSegmentWeight, + + [double] + $currentSegmentWeight + ) + + $numberOfItemsArchived = 0 + $fileMode = [System.IO.FileMode]::Create + $result = Test-Path -LiteralPath $DestinationPath -PathType Leaf + if ($result -eq $true) + { + $fileMode = [System.IO.FileMode]::Open + } + + Add-CompressionAssemblies + + try + { + # At this point we are sure that the archive file has write access. + $archiveFileStreamArgs = @($destinationPath, $fileMode) + $archiveFileStream = New-Object -TypeName System.IO.FileStream -ArgumentList $archiveFileStreamArgs + + $zipArchiveArgs = @($archiveFileStream, [System.IO.Compression.ZipArchiveMode]::Update, $false) + $zipArchive = New-Object -TypeName System.IO.Compression.ZipArchive -ArgumentList $zipArchiveArgs + + $currentEntryCount = 0 + $progressBarStatus = ($LocalizedData.CompressProgressBarText -f $destinationPath) + $bufferSize = 4kb + $buffer = New-Object Byte[] $bufferSize + + foreach ($currentFilePath in $sourcePaths) + { + if ($modifiedSourceDirFullName -ne $null -and $modifiedSourceDirFullName.Length -gt 0) + { + $index = $currentFilePath.IndexOf($modifiedSourceDirFullName, [System.StringComparison]::OrdinalIgnoreCase) + $currentFilePathSubString = $currentFilePath.Substring($index, $modifiedSourceDirFullName.Length) + $relativeFilePath = $currentFilePath.Replace($currentFilePathSubString, "").Trim() + } + else + { + $relativeFilePath = [System.IO.Path]::GetFileName($currentFilePath) + } + + # Update mode is selected. + # Check to see if archive file already contains one or more zip files in it. + if ($isUpdateMode -eq $true -and $zipArchive.Entries.Count -gt 0) + { + $entryToBeUpdated = $null + + # Check if the file already exists in the archive file. + # If so replace it with new file from the input source. + # If the file does not exist in the archive file then default to + # create mode and create the entry in the archive file. + + foreach ($currentArchiveEntry in $zipArchive.Entries) + { + if ($currentArchiveEntry.FullName -eq $relativeFilePath) + { + $entryToBeUpdated = $currentArchiveEntry + break + } + } + + if ($entryToBeUpdated -ne $null) + { + $addItemtoArchiveFileMessage = ($LocalizedData.AddItemtoArchiveFile -f $currentFilePath) + $entryToBeUpdated.Delete() + } + } + + $compression = CompressionLevelMapper $compressionLevel + + # If a directory needs to be added to an archive file, + # by convention the .Net API's expect the path of the diretcory + # to end with '\' to detect the path as an directory. + if (!$relativeFilePath.EndsWith("\", [StringComparison]::OrdinalIgnoreCase)) + { + try + { + try + { + $currentFileStream = [System.IO.File]::Open($currentFilePath, [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read) + } + catch + { + # Failed to access the file. Write a non terminating error to the pipeline + # and move on with the remaining files. + $exception = $_.Exception + if ($null -ne $_.Exception -and + $null -ne $_.Exception.InnerException) + { + $exception = $_.Exception.InnerException + } + $errorRecord = CreateErrorRecordHelper "CompressArchiveUnauthorizedAccessError" $null ([System.Management.Automation.ErrorCategory]::PermissionDenied) $exception $currentFilePath + Write-Error -ErrorRecord $errorRecord + } + + if ($null -ne $currentFileStream) + { + $srcStream = New-Object System.IO.BinaryReader $currentFileStream + + $currentArchiveEntry = $zipArchive.CreateEntry($relativeFilePath, $compression) + + # Updating the File Creation time so that the same timestamp would be retained after expanding the compressed file. + # At this point we are sure that Get-ChildItem would succeed. + $currentArchiveEntry.LastWriteTime = (Get-Item -LiteralPath $currentFilePath).LastWriteTime + + $destStream = New-Object System.IO.BinaryWriter $currentArchiveEntry.Open() + + while ($numberOfBytesRead = $srcStream.Read($buffer, 0, $bufferSize)) + { + $destStream.Write($buffer, 0, $numberOfBytesRead) + $destStream.Flush() + } + + $numberOfItemsArchived += 1 + $addItemtoArchiveFileMessage = ($LocalizedData.AddItemtoArchiveFile -f $currentFilePath) + } + } + finally + { + If ($null -ne $currentFileStream) + { + $currentFileStream.Dispose() + } + If ($null -ne $srcStream) + { + $srcStream.Dispose() + } + If ($null -ne $destStream) + { + $destStream.Dispose() + } + } + } + else + { + $currentArchiveEntry = $zipArchive.CreateEntry("$relativeFilePath", $compression) + $numberOfItemsArchived += 1 + $addItemtoArchiveFileMessage = ($LocalizedData.AddItemtoArchiveFile -f $currentFilePath) + } + + if ($null -ne $addItemtoArchiveFileMessage) + { + Write-Verbose $addItemtoArchiveFileMessage + } + + $currentEntryCount += 1 + ProgressBarHelper "Compress-Archive" $progressBarStatus $previousSegmentWeight $currentSegmentWeight $sourcePaths.Count $currentEntryCount + } + } + finally + { + If ($null -ne $zipArchive) + { + $zipArchive.Dispose() + } + + If ($null -ne $archiveFileStream) + { + $archiveFileStream.Dispose() + } + + # Complete writing progress. + Write-Progress -Activity "Compress-Archive" -Completed + } + + return $numberOfItemsArchived + } + +<############################################################################################ +# ValidateArchivePathHelper: This is a helper function used to validate the archive file +# path & its file format. The only supported archive file format is .zip +############################################################################################> + function ValidateArchivePathHelper + { + param + ( + [string] + $archiveFile + ) + + if ([System.IO.File]::Exists($archiveFile)) + { + $extension = [system.IO.Path]::GetExtension($archiveFile) + + # Invalid file extension is specifed for the zip file. + if ($extension -ne $zipFileExtension) + { + $errorMessage = ($LocalizedData.InvalidZipFileExtensionError -f $extension, $zipFileExtension) + ThrowTerminatingErrorHelper "NotSupportedArchiveFileExtension" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $extension + } + } + else + { + $errorMessage = ($LocalizedData.PathNotFoundError -f $archiveFile) + ThrowTerminatingErrorHelper "PathNotFound" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $archiveFile + } + } + +<############################################################################################ +# ExpandArchiveHelper: This is a helper function used to expand the archive file contents +# to the specified directory. +############################################################################################> + function ExpandArchiveHelper + { + param + ( + [string] + $archiveFile, + + [string] + $expandedDir, + + [ref] + $expandedItems, + + [boolean] + $force, + + [boolean] + $isVerbose, + + [boolean] + $isConfirm + ) + + Add-CompressionAssemblies + + try + { + # The existance of archive file has already been validated by ValidateArchivePathHelper + # before calling this helper function. + $archiveFileStreamArgs = @($archiveFile, [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read) + $archiveFileStream = New-Object -TypeName System.IO.FileStream -ArgumentList $archiveFileStreamArgs + + $zipArchiveArgs = @($archiveFileStream, [System.IO.Compression.ZipArchiveMode]::Read, $false) + $zipArchive = New-Object -TypeName System.IO.Compression.ZipArchive -ArgumentList $zipArchiveArgs + + if ($zipArchive.Entries.Count -eq 0) + { + $archiveFileIsEmpty = ($LocalizedData.ArchiveFileIsEmpty -f $archiveFile) + Write-Verbose $archiveFileIsEmpty + return + } + + $currentEntryCount = 0 + $progressBarStatus = ($LocalizedData.ExpandProgressBarText -f $archiveFile) + + # The archive entries can either be empty directories or files. + foreach ($currentArchiveEntry in $zipArchive.Entries) + { + $currentArchiveEntryPath = Join-Path -Path $expandedDir -ChildPath $currentArchiveEntry.FullName + $extension = [system.IO.Path]::GetExtension($currentArchiveEntryPath) + + # The current archive entry is an empty directory + # The FullName of the Archive Entry representing a directory would end with a trailing '\'. + if ($extension -eq [string]::Empty -and + $currentArchiveEntryPath.EndsWith("\", [StringComparison]::OrdinalIgnoreCase)) + { + $pathExists = Test-Path -LiteralPath $currentArchiveEntryPath + + # The current archive entry expects an empty directory. + # Check if the existing directory is empty. If its not empty + # then it means that user has added this directory by other means. + if ($pathExists -eq $false) + { + New-Item $currentArchiveEntryPath -ItemType Directory -Confirm:$isConfirm | Out-Null + + if (Test-Path -LiteralPath $currentArchiveEntryPath -PathType Container) + { + $addEmptyDirectorytoExpandedPathMessage = ($LocalizedData.AddItemtoArchiveFile -f $currentArchiveEntryPath) + Write-Verbose $addEmptyDirectorytoExpandedPathMessage + + $expandedItems.Value += $currentArchiveEntryPath + } + } + } + else + { + try + { + $currentArchiveEntryFileInfo = New-Object -TypeName System.IO.FileInfo -ArgumentList $currentArchiveEntryPath + $parentDirExists = Test-Path -LiteralPath $currentArchiveEntryFileInfo.DirectoryName -PathType Container + + # If the Parent directory of the current entry in the archive file does not exist, then create it. + if ($parentDirExists -eq $false) + { + New-Item $currentArchiveEntryFileInfo.DirectoryName -ItemType Directory -Confirm:$isConfirm | Out-Null + + if (!(Test-Path -LiteralPath $currentArchiveEntryFileInfo.DirectoryName -PathType Container)) + { + # The directory referred by $currentArchiveEntryFileInfo.DirectoryName was not successfully created. + # This could be because the user has specified -Confirm paramter when Expand-Archive was invoked + # and authorization was not provided when confirmation was prompted. In such a scenario, + # we skip the current file in the archive and continue with the remaining archive file contents. + Continue + } + + $expandedItems.Value += $currentArchiveEntryFileInfo.DirectoryName + } + + $hasNonTerminatingError = $false + + # Check if the file in to which the current archive entry contents + # would be expanded already exists. + if ($currentArchiveEntryFileInfo.Exists) + { + if ($force) + { + Remove-Item -LiteralPath $currentArchiveEntryFileInfo.FullName -Force -ErrorVariable ev -Verbose:$isVerbose -Confirm:$isConfirm + if ($ev -ne $null) + { + $hasNonTerminatingError = $true + } + + if (Test-Path -LiteralPath $currentArchiveEntryFileInfo.FullName -PathType Leaf) + { + # The file referred by $currentArchiveEntryFileInfo.FullName was not successfully removed. + # This could be because the user has specified -Confirm paramter when Expand-Archive was invoked + # and authorization was not provided when confirmation was prompted. In such a scenario, + # we skip the current file in the archive and continue with the remaining archive file contents. + Continue + } + } + else + { + # Write non-terminating error to the pipeline. + $errorMessage = ($LocalizedData.FileExistsError -f $currentArchiveEntryFileInfo.FullName, $archiveFile, $currentArchiveEntryFileInfo.FullName, $currentArchiveEntryFileInfo.FullName) + $errorRecord = CreateErrorRecordHelper "ExpandArchiveFileExists" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidOperation) $null $currentArchiveEntryFileInfo.FullName + Write-Error -ErrorRecord $errorRecord + $hasNonTerminatingError = $true + } + } + + if (!$hasNonTerminatingError) + { + [System.IO.Compression.ZipFileExtensions]::ExtractToFile($currentArchiveEntry, $currentArchiveEntryPath, $false) + + # Add the expanded file path to the $expandedItems array, + # to keep track of all the expanded files created while expanding the archive file. + # If user enters CTRL + C then at that point of time, all these expanded files + # would be deleted as part of the clean up process. + $expandedItems.Value += $currentArchiveEntryPath + + $addFiletoExpandedPathMessage = ($LocalizedData.CreateFileAtExpandedPath -f $currentArchiveEntryPath) + Write-Verbose $addFiletoExpandedPathMessage + } + } + finally + { + If ($null -ne $destStream) + { + $destStream.Dispose() + } + + If ($null -ne $srcStream) + { + $srcStream.Dispose() + } + } + } + + $currentEntryCount += 1 + # $currentSegmentWeight is Set to 100 giving equal weightage to each file that is getting expanded. + # $previousSegmentWeight is set to 0 as there are no prior segments. + $previousSegmentWeight = 0 + $currentSegmentWeight = 100 + ProgressBarHelper "Expand-Archive" $progressBarStatus $previousSegmentWeight $currentSegmentWeight $zipArchive.Entries.Count $currentEntryCount + } + } + finally + { + If ($null -ne $zipArchive) + { + $zipArchive.Dispose() + } + + If ($null -ne $archiveFileStream) + { + $archiveFileStream.Dispose() + } + + # Complete writing progress. + Write-Progress -Activity "Expand-Archive" -Completed + } + } + +<############################################################################################ +# ProgressBarHelper: This is a helper function used to display progress message. +# This function is used by both Compress-Archive & Expand-Archive to display archive file +# creation/expansion progress. +############################################################################################> + function ProgressBarHelper + { + param + ( + [string] + $cmdletName, + + [string] + $status, + + [double] + $previousSegmentWeight, + + [double] + $currentSegmentWeight, + + [int] + $totalNumberofEntries, + + [int] + $currentEntryCount + ) + + if ($currentEntryCount -gt 0 -and + $totalNumberofEntries -gt 0 -and + $previousSegmentWeight -ge 0 -and + $currentSegmentWeight -gt 0) + { + $entryDefaultWeight = $currentSegmentWeight/[double]$totalNumberofEntries + + $percentComplete = $previousSegmentWeight + ($entryDefaultWeight * $currentEntryCount) + Write-Progress -Activity $cmdletName -Status $status -PercentComplete $percentComplete + } + } + +<############################################################################################ +# CSVHelper: This is a helper function used to append comma after each path specifid by +# the SourcePath array. This helper function is used to display all the user supplied paths +# in the WhatIf message. +############################################################################################> + function CSVHelper + { + param + ( + [string[]] + $sourcePath + ) + + # SourcePath has already been validated by the calling funcation. + if ($sourcePath.Count -gt 1) + { + $sourcePathInCsvFormat = "`n" + for ($currentIndex = 0; $currentIndex -lt $sourcePath.Count; $currentIndex++) + { + if ($currentIndex -eq $sourcePath.Count - 1) + { + $sourcePathInCsvFormat += $sourcePath[$currentIndex] + } + else + { + $sourcePathInCsvFormat += $sourcePath[$currentIndex] + "`n" + } + } + } + else + { + $sourcePathInCsvFormat = $sourcePath + } + + return $sourcePathInCsvFormat + } + +<############################################################################################ +# ThrowTerminatingErrorHelper: This is a helper function used to throw terminating error. +############################################################################################> + function ThrowTerminatingErrorHelper + { + param + ( + [string] + $errorId, + + [string] + $errorMessage, + + [System.Management.Automation.ErrorCategory] + $errorCategory, + + [object] + $targetObject, + + [Exception] + $innerException + ) + + if ($innerException -eq $null) + { + $exception = New-object System.IO.IOException $errorMessage + } + else + { + $exception = New-Object System.IO.IOException $errorMessage, $innerException + } + + $exception = New-Object System.IO.IOException $errorMessage + $errorRecord = New-Object System.Management.Automation.ErrorRecord $exception, $errorId, $errorCategory, $targetObject + $PSCmdlet.ThrowTerminatingError($errorRecord) + } + +<############################################################################################ +# CreateErrorRecordHelper: This is a helper function used to create an ErrorRecord +############################################################################################> + function CreateErrorRecordHelper + { + param + ( + [string] + $errorId, + + [string] + $errorMessage, + + [System.Management.Automation.ErrorCategory] + $errorCategory, + + [Exception] + $exception, + + [object] + $targetObject + ) + + if ($null -eq $exception) + { + $exception = New-Object System.IO.IOException $errorMessage + } + + $errorRecord = New-Object System.Management.Automation.ErrorRecord $exception, $errorId, $errorCategory, $targetObject + return $errorRecord + } + #endregion Utility Functions + + $isVerbose = $psboundparameters.ContainsKey("Verbose") + $isConfirm = $psboundparameters.ContainsKey("Confirm") + + $isDestinationPathProvided = $true + if ($DestinationPath -eq [string]::Empty) + { + $resolvedDestinationPath = $pwd + $isDestinationPathProvided = $false + } + else + { + $destinationPathExists = Test-Path -Path $DestinationPath -PathType Container + if ($destinationPathExists) + { + $resolvedDestinationPath = GetResolvedPathHelper $DestinationPath $false $PSCmdlet + if ($resolvedDestinationPath.Count -gt 1) + { + $errorMessage = ($LocalizedData.InvalidExpandedDirPathError -f $DestinationPath) + ThrowTerminatingErrorHelper "InvalidDestinationPath" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $DestinationPath + } + + # At this point we are sure that the provided path resolves to a valid single path. + # Calling Resolve-Path again to get the underlying provider name. + $suppliedDestinationPath = Resolve-Path -Path $DestinationPath + if ($suppliedDestinationPath.Provider.Name -ne "FileSystem") + { + $errorMessage = ($LocalizedData.ExpandArchiveInValidDestinationPath -f $DestinationPath) + ThrowTerminatingErrorHelper "InvalidDirectoryPath" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $DestinationPath + } + } + else + { + $createdItem = New-Item -Path $DestinationPath -ItemType Directory -Confirm:$isConfirm -Verbose:$isVerbose -ErrorAction Stop + if ($createdItem -ne $null -and $createdItem.PSProvider.Name -ne "FileSystem") + { + Remove-Item "$DestinationPath" -Force -Recurse -ErrorAction SilentlyContinue + $errorMessage = ($LocalizedData.ExpandArchiveInValidDestinationPath -f $DestinationPath) + ThrowTerminatingErrorHelper "InvalidDirectoryPath" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $DestinationPath + } + + $resolvedDestinationPath = GetResolvedPathHelper $DestinationPath $true $PSCmdlet + } + } + + $isWhatIf = $psboundparameters.ContainsKey("WhatIf") + if (!$isWhatIf) + { + $preparingToExpandVerboseMessage = ($LocalizedData.PreparingToExpandVerboseMessage) + Write-Verbose $preparingToExpandVerboseMessage + + $progressBarStatus = ($LocalizedData.ExpandProgressBarText -f $DestinationPath) + ProgressBarHelper "Expand-Archive" $progressBarStatus 0 100 100 1 + } + } + PROCESS + { + switch ($PsCmdlet.ParameterSetName) + { + "Path" + { + $resolvedSourcePaths = GetResolvedPathHelper $Path $false $PSCmdlet + + if ($resolvedSourcePaths.Count -gt 1) + { + $errorMessage = ($LocalizedData.InvalidArchiveFilePathError -f $Path, $PsCmdlet.ParameterSetName, $PsCmdlet.ParameterSetName) + ThrowTerminatingErrorHelper "InvalidArchiveFilePath" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $Path + } + } + "LiteralPath" + { + $resolvedSourcePaths = GetResolvedPathHelper $LiteralPath $true $PSCmdlet + + if ($resolvedSourcePaths.Count -gt 1) + { + $errorMessage = ($LocalizedData.InvalidArchiveFilePathError -f $LiteralPath, $PsCmdlet.ParameterSetName, $PsCmdlet.ParameterSetName) + ThrowTerminatingErrorHelper "InvalidArchiveFilePath" $errorMessage ([System.Management.Automation.ErrorCategory]::InvalidArgument) $LiteralPath + } + } + } + + ValidateArchivePathHelper $resolvedSourcePaths + + if ($pscmdlet.ShouldProcess($resolvedSourcePaths)) + { + $expandedItems = @() + + try + { + # StopProcessing is not avaliable in Script cmdlets. However the pipleline execution + # is terminated when ever 'CTRL + C' is entered by user to terminate the cmdlet execution. + # The finally block is executed whenever pipleline is terminated. + # $isArchiveFileProcessingComplete variable is used to track if 'CTRL + C' is entered by the + # user. + $isArchiveFileProcessingComplete = $false + + # The User has not provided a destination path, hence we use '$pwd\ArchiveFileName' as the directory where the + # archive file contents would be expanded. If the path '$pwd\ArchiveFileName' already exists then we use the + # Windows default mechanism of appending a counter value at the end of the directory name where the contents + # would be expanded. + if (!$isDestinationPathProvided) + { + $archiveFile = New-Object System.IO.FileInfo $resolvedSourcePaths + $resolvedDestinationPath = Join-Path -Path $resolvedDestinationPath -ChildPath $archiveFile.BaseName + $destinationPathExists = Test-Path -LiteralPath $resolvedDestinationPath -PathType Container + + if (!$destinationPathExists) + { + New-Item -Path $resolvedDestinationPath -ItemType Directory -Confirm:$isConfirm -Verbose:$isVerbose -ErrorAction Stop | Out-Null + } + } + + ExpandArchiveHelper $resolvedSourcePaths $resolvedDestinationPath ([ref]$expandedItems) $Force $isVerbose $isConfirm + + $isArchiveFileProcessingComplete = $true + } + finally + { + # The $isArchiveFileProcessingComplete would be set to $false if user has typed 'CTRL + C' to + # terminate the cmdlet execution or if an unhandled exception is thrown. + if ($isArchiveFileProcessingComplete -eq $false) + { + if ($expandedItems.Count -gt 0) + { + # delete the expanded file/directory as the archive + # file was not completly expanded. + $expandedItems | ForEach-Object { Remove-Item $_ -Force -Recurse } + } + } + } + } + } +} + +function Write-LocalMessage +{ + [CmdletBinding()] + Param ( + [string]$Message + ) + + if (Test-Path function:Write-PSFMessage) { Write-PSFMessage -Level Important -Message $Message } + else { Write-Host $Message } +} +#endregion Utility Functions + +try +{ + [System.Net.ServicePointManager]::SecurityProtocol = "Tls12" + + Write-LocalMessage -Message "Downloading repository from '$($BaseUrl)/archive/$($Branch).zip'" + Invoke-WebRequest -Uri "$($BaseUrl)/archive/$($Branch).zip" -UseBasicParsing -OutFile "$($env:TEMP)\$($ModuleName).zip" -ErrorAction Stop + + Write-LocalMessage -Message "Creating temporary project folder: '$($env:TEMP)\$($ModuleName)'" + $null = New-Item -Path $env:TEMP -Name $ModuleName -ItemType Directory -Force -ErrorAction Stop + + Write-LocalMessage -Message "Extracting archive to '$($env:TEMP)\$($ModuleName)'" + Expand-Archive -Path "$($env:TEMP)\$($ModuleName).zip" -DestinationPath "$($env:TEMP)\$($ModuleName)" -ErrorAction Stop + + $basePath = Get-ChildItem "$($env:TEMP)\$($ModuleName)\*" | Select-Object -First 1 + if ($SubFolder) { $basePath = "$($basePath)\$($SubFolder)" } + + # Only needed for PS v5+ but doesn't hurt anyway + $manifest = "$($basePath)\$($ModuleName).psd1" + $manifestData = Invoke-Expression ([System.IO.File]::ReadAllText($manifest)) + $moduleVersion = $manifestData.ModuleVersion + Write-LocalMessage -Message "Download concluded: $($ModuleName) | Branch $($Branch) | Version $($moduleVersion)" + + # Determine output path + $path = "$($env:ProgramFiles)\WindowsPowerShell\Modules\$($ModuleName)" + if ($doUserMode) { $path = "$(Split-Path $profile.CurrentUserAllHosts)\Modules\$($ModuleName)" } + if ($PSVersionTable.PSVersion.Major -ge 5) { $path += "\$moduleVersion" } + + if ((Test-Path $path) -and (-not $Force)) + { + Write-LocalMessage -Message "Module already installed, interrupting installation" + return + } + + Write-LocalMessage -Message "Creating folder: $($path)" + $null = New-Item -Path $path -ItemType Directory -Force -ErrorAction Stop + + Write-LocalMessage -Message "Copying files to $($path)" + foreach ($file in (Get-ChildItem -Path $basePath)) + { + Move-Item -Path $file.FullName -Destination $path -ErrorAction Stop + } + + Write-LocalMessage -Message "Cleaning up temporary files" + Remove-Item -Path "$($env:TEMP)\$($ModuleName)" -Force -Recurse + Remove-Item -Path "$($env:TEMP)\$($ModuleName).zip" -Force + + Write-LocalMessage -Message "Installation of the module $($ModuleName), Branch $($Branch), Version $($moduleVersion) completed successfully!" +} +catch +{ + Write-LocalMessage -Message "Installation of the module $($ModuleName) failed!" + + Write-LocalMessage -Message "Cleaning up temporary files" + Remove-Item -Path "$($env:TEMP)\$($ModuleName)" -Force -Recurse + Remove-Item -Path "$($env:TEMP)\$($ModuleName).zip" -Force + + throw +} \ No newline at end of file diff --git a/library/d365fo.tools/d365fo.tools.sln b/library/d365fo.tools/d365fo.tools.sln new file mode 100644 index 00000000..5f532324 --- /dev/null +++ b/library/d365fo.tools/d365fo.tools.sln @@ -0,0 +1,25 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.27130.2010 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{361C84EE-64AA-4E88-94D8-2104C3670C0C}") = "d365fo.tools", "d365fo.tools\d365fo.tools.csproj", "{426BC3CD-F2B9-4B1E-95EF-C5299426A72A}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {426BC3CD-F2B9-4B1E-95EF-C5299426A72A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {426BC3CD-F2B9-4B1E-95EF-C5299426A72A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {426BC3CD-F2B9-4B1E-95EF-C5299426A72A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {426BC3CD-F2B9-4B1E-95EF-C5299426A72A}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {3A54BDDB-A20F-4A50-9C82-E62C507D0415} + EndGlobalSection +EndGlobal diff --git a/library/d365fo.tools/d365fo.tools/Class1.cs b/library/d365fo.tools/d365fo.tools/Class1.cs new file mode 100644 index 00000000..417e2a87 --- /dev/null +++ b/library/d365fo.tools/d365fo.tools/Class1.cs @@ -0,0 +1,8 @@ +using System; + +namespace d365fo.tools +{ + public class Class1 + { + } +} diff --git a/library/d365fo.tools/d365fo.tools/d365fo.tools.csproj b/library/d365fo.tools/d365fo.tools/d365fo.tools.csproj new file mode 100644 index 00000000..6f9ffaa2 --- /dev/null +++ b/library/d365fo.tools/d365fo.tools/d365fo.tools.csproj @@ -0,0 +1,21 @@ + + + + net4.5.2 + + + + ..\..\..\d365fo.tools\bin + ..\..\..\d365fo.tools\bin\d365fo.tools.xml + + + + ..\..\..\d365fo.tools\bin + ..\..\..\d365fo.tools\bin\d365fo.tools.xml + + + + false + + +