I am using powershell script below to read and process one 17 MB text file. The input file contains around 200 000 rows and 12 columns. Currently the script takes almost 1 hour to process the input file. How to optimize the processing time?
Script:
$fields = Get-Content Temp.txt
$results = @()
foreach($i in $fields)
{
$field = $i -split '\t' -replace '^\s*|\s*$'
$field1 = $field[0]
$field2 = $field[1]
$field3 = $field[2]
$field4 = $field[3]
$field5 = $field[4]
$field6 = $field[5]
$field7 = $field[6]
$field8 = $field[7]
$field9 = $field[8]
$field10 = $field[9]
$field11 = $field[10]
$field12 = $field[11]
if ($field1 -eq "4803" -and $field[2].substring(0,2) -eq "60")
{
$field2 = "5000000"
}
else
{
$field2 = $field[1]
}
$details = @{
Column1 = $field1
Column2 = $field2
Column3 = $field3
Column4 = $field4
Column5 = $field5
Column6 = $field6
Column7 = $field7
Column8 = $field8
Column9 = $field9
Column10 = $field10
Column11 = $field11
Column12 = $field12
}
$results += New-Object PSObject -Property $details
}
$results | ForEach-Object { '{0} {1} ... {11}' -f $_.Column1,$_. Column1,... $_.Column12 } | Set-Content -path Temp.txt
[Environment]::Exit(0)