DNSServer.DebugLogParser.psm1
|
$script:ModuleRoot = $PSScriptRoot function ConvertFrom-DnsLogLine { <# .SYNOPSIS Parses a single DNS debug log line into structured data. .DESCRIPTION Internal helper function that parses a DNS Server debug log line and extracts structured information including timestamp, protocol, query type, client IP, and domain. Supports culture-aware date/time parsing to handle DNS debug logs from servers with different regional settings (e.g., German DD.MM.YYYY, US MM/DD/YYYY, Swedish YYYY-MM-DD). Supports filtering by context type: PACKET, EVENT, Note, DSPOLL, INIT, LOOKUP, RECURSE, REMOTE, and TOMBSTN. .PARAMETER Line The log line string to parse. .PARAMETER Culture The culture to use for parsing date/time values. Defaults to CurrentCulture. DNS Server debug logs use the date format of the Windows locale on the source server. .PARAMETER ContextFilter Filters the log lines by context type. Accepts single or multiple values. Valid values: 'All', 'Packet', 'Event', 'Note', 'DSPoll', 'Init', 'Lookup', 'Recurse', 'Remote', 'Tombstone' Default is 'All' which processes all context types. When multiple values are specified (not including 'All'), only log lines matching one of the specified contexts are returned. .EXAMPLE PS C:\> ConvertFrom-DnsLogLine -Line "20.01.2026 23:00:18 0FE0 PACKET 000002C5307CFCD0 UDP Rcv 10.0.0.2 ede1 Q [0001 D NOERROR] A (4)ocsp(8)digicert(3)com(0)" DateTime : 1/20/2026 11:00:18 PM ThreadId : 0FE0 Context : PACKET PacketId : 000002C5307CFCD0 Protocol : UDP Direction : Rcv RemoteIP : 10.0.0.2 Xid : ede1 QueryResponse : Opcode : Q FlagsHex : 0001 FlagsChar : D ResponseCode : NOERROR QuestionType : A QuestionName : ocsp.digicert.com Information : .EXAMPLE PS C:\> ConvertFrom-DnsLogLine -Line "20.01.2026 23:00:18 0518 EVENT The DNS server has started." DateTime : 1/20/2026 11:00:18 PM ThreadId : 0518 Context : EVENT PacketId : Protocol : Direction : RemoteIP : Xid : QueryResponse : Opcode : FlagsHex : FlagsChar : ResponseCode : QuestionType : QuestionName : Information : The DNS server has started. .EXAMPLE PS C:\> ConvertFrom-DnsLogLine -Line "20.01.2026 23:00:18 5C8 Note: got GQCS failure on a dead socket context status=995, socket=612, pcon=00000020F4B18490, state=-1, IP=::" DateTime : 1/20/2026 11:00:18 PM ThreadId : 5C8 Context : NOTE PacketId : Protocol : Direction : RemoteIP : Xid : QueryResponse : Opcode : FlagsHex : FlagsChar : ResponseCode : QuestionType : QuestionName : Information : got GQCS failure on a dead socket context status=995, socket=612, pcon=00000020F4B18490, state=-1, IP=:: .NOTES Internal function not exported from module. Version: 1.4.0.0 Author: Andi Bellstedt, Copilot Date: 2026-01-25 Keywords: DNS, DebugLog, Parser, LogParser, Internal .LINK https://github.com/AndiBellstedt/DNSServer.DebugLogParser #> [CmdletBinding()] [OutputType([PSCustomObject])] param( [string] $Line, [System.Globalization.CultureInfo] $Culture = [System.Globalization.CultureInfo]::CurrentCulture, [ValidateSet('All', 'Packet', 'Event', 'Note', 'DSPoll', 'Init', 'Lookup', 'Recurse', 'Remote', 'Tombstone')] [string[]] $ContextFilter = @('All') ) # Skip empty lines if ([string]::IsNullOrWhiteSpace($Line)) { return $null } # Skip lines that starts with any whitespace character (space, tab, etc.) if ([char]::IsWhiteSpace($Line[0])) { return $null } # Skip lines that starts with "TCP" or "UDP" (non-standard format) if ($Line.StartsWith('TCP') -or $Line.StartsWith('UDP')) { return $null } # Skip orphaned "Response packet" lines (continuation lines without date prefix) if ($Line.StartsWith('Response packet')) { return $null } # Minimum line length check (date + time + minimal data) if ($Line.Length -lt 25) { return $null } # Parse fixed-position fields for maximum performance # The date/time portion occupies positions 0-18 or 0-19 depending on format # Supported formats (based on Windows locale): # DD.MM.YYYY HH:MM:SS (German, position 20) # DD/MM/YYYY HH:MM:SS (UK/Austrian, position 20) # YYYY-MM-DD HH:MM:SS (Swedish/ISO, position 20) # M/D/YYYY H:MM:SS or MM/DD/YYYY HH:MM:SS (US, variable length) # Find the first whitespace after position 8 to locate the date/time boundary # The time portion always ends before the thread ID (hex like 0FE0) $firstSpace = $Line.IndexOf(' ') if ($firstSpace -lt 6 -or $firstSpace -gt 12) { return $null } # Extract date string $dateStr = $Line.Substring(0, $firstSpace) # Find second space to get time portion $secondSpace = $Line.IndexOf(' ', $firstSpace + 1) if ($secondSpace -eq -1 -or $secondSpace - $firstSpace -lt 6) { return $null } $timeStr = $Line.Substring($firstSpace + 1, $secondSpace - $firstSpace - 1) # Track where the datetime portion ends for parsing the remaining fields $dateTimeEndPosition = $secondSpace # Check for AM/PM designator (12-hour clock format, e.g., en-US culture) # AM/PM follows immediately after the time with a space separator $thirdSpace = $Line.IndexOf(' ', $secondSpace + 1) if ($thirdSpace -ne -1) { $potentialAmPm = $Line.Substring($secondSpace + 1, $thirdSpace - $secondSpace - 1) if ($potentialAmPm -eq 'AM' -or $potentialAmPm -eq 'PM') { $timeStr = "$($timeStr) $($potentialAmPm)" $dateTimeEndPosition = $thirdSpace } } # Parse DateTime using culture-aware approach without regex # Use TryParse with the specified culture for maximum compatibility $dateTime = [datetime]::MinValue $dateTimeStr = "$($dateStr) $($timeStr)" # Try culture-specific parsing first (high performance path) if (-not [datetime]::TryParse($dateTimeStr, $Culture.DateTimeFormat, [System.Globalization.DateTimeStyles]::None, [ref]$dateTime)) { # Fallback: Try invariant culture for ISO format (YYYY-MM-DD) if (-not [datetime]::TryParse($dateTimeStr, [System.Globalization.CultureInfo]::InvariantCulture.DateTimeFormat, [System.Globalization.DateTimeStyles]::None, [ref]$dateTime)) { return $null } } # Remaining part after datetime (uses correct position whether AM/PM was present or not) $remaining = $Line.Substring($dateTimeEndPosition).TrimStart() # Split on whitespace for remaining fields to detect context type $parts = $remaining.Split([char[]]@(' ', "`t"), [StringSplitOptions]::RemoveEmptyEntries) # Assume invalid if less than 2 parts. At least Thread ID and Context are required. if ($parts.Count -lt 2) { return $null } # Field 3: Thread ID (always first part) $threadId = $parts[0] # Detect context type from the second part $contextRaw = $parts[1] $context = [string]::Empty $information = [string]::Empty # Initialize all PACKET-specific fields as empty $packetId = [string]::Empty $protocol = [string]::Empty $direction = [string]::Empty $remoteIp = [string]::Empty $xid = [string]::Empty $queryResponse = [string]::Empty $opcode = [string]::Empty $flagsHex = [string]::Empty $flagsChar = [string]::Empty $responseCode = [string]::Empty $questionType = [string]::Empty $questionName = [string]::Empty # Context type mapping for information-extraction contexts: Raw context keyword -> Context name and keyword length # Note: PACKET is handled separately due to completely different parsing logic $contextMap = @{ 'EVENT' = @{ Name = 'Event'; KeywordLength = 5 } 'DSPOLL' = @{ Name = 'DSPoll'; KeywordLength = 6 } 'INIT' = @{ Name = 'Init'; KeywordLength = 4 } 'LOOKUP' = @{ Name = 'Lookup'; KeywordLength = 6 } 'RECURSE' = @{ Name = 'Recurse'; KeywordLength = 7 } 'REMOTE' = @{ Name = 'Remote'; KeywordLength = 6 } 'TOMBSTN' = @{ Name = 'Tombstone'; KeywordLength = 7 } 'Note:' = @{ Name = 'Note'; KeywordLength = 5 } } # Determine context type and apply filter if ($contextRaw -eq 'PACKET') { $context = 'Packet' # Apply context filter - check if 'All' is in array or if current context is in filter array if ($ContextFilter -notcontains 'All' -and $ContextFilter -notcontains $context) { return $null } # Check for information-only PACKET records (e.g., "Response packet XXX does not match any outstanding query") # Standard PACKET format requires at least 7 parts: ThreadId, PACKET, PacketId, Protocol, Direction, RemoteIP, Xid # Information-only packets have format: ThreadId, PACKET, followed by message text # Detect by checking if parts[3] is NOT a valid protocol indicator (UDP/TCP) if ($parts.Count -lt 7 -or ($parts[3] -ne 'UDP' -and $parts[3] -ne 'TCP')) { # Information-only PACKET record - extract everything after "PACKET" as information $packetIndex = $remaining.IndexOf('PACKET') if ($packetIndex -gt -1) { $information = $remaining.Substring($packetIndex + 6).TrimStart() } # Return with empty packet-specific fields but with information populated return [PSCustomObject]@{ DateTime = $dateTime ThreadId = $threadId Context = $context PacketId = $packetId Protocol = $protocol Direction = $direction RemoteIP = $remoteIp Xid = $xid QueryResponse = $queryResponse Opcode = $opcode FlagsHex = $flagsHex FlagsChar = $flagsChar ResponseCode = $responseCode QuestionType = $questionType QuestionName = $questionName Information = $information } } # Process standard PACKET context # Field 5: Internal packet identifier $packetId = $parts[2] # Field 6: UDP/TCP indicator $protocol = $parts[3] # Field 7: Send/Receive indicator $direction = $parts[4] # Field 8: Remote IP $remoteIp = $parts[5] # Field 9: Xid (hex) $xid = $parts[6] # Fields 10-16: Variable position based on Query/Response $partIndex = 7 # Check for Response indicator "R" if ($partIndex -lt $parts.Count -and $parts[$partIndex] -eq 'R') { $queryResponse = 'R' $partIndex++ } # Field 11: Opcode (Q, N, U, ?) if ($partIndex -lt $parts.Count) { $opcode = $parts[$partIndex] $partIndex++ } # Fields 12-14: Flags section in brackets [FlagsHex FlagsChar ResponseCode] # Format: [8081 DR NOERROR] or [8085 A DR NOERROR] # Find the bracket section $bracketStart = $remaining.IndexOf('[') $bracketEnd = $remaining.IndexOf(']') if ($bracketStart -gt -1 -and $bracketEnd -gt $bracketStart) { $bracketContent = $remaining.Substring($bracketStart + 1, $bracketEnd - $bracketStart - 1).Trim() $flagParts = $bracketContent.Split([char[]]@(' ', "`t"), [StringSplitOptions]::RemoveEmptyEntries) if ($flagParts.Count -ge 1) { $flagsHex = $flagParts[0] } # ResponseCode is always last (NOERROR, NXDOMAIN, etc.) # FlagsChar is everything between FlagsHex and ResponseCode if ($flagParts.Count -ge 2) { $responseCode = $flagParts[$flagParts.Count - 1] if ($flagParts.Count -gt 2) { $flagsChar = [string]::Join('', $flagParts[1..($flagParts.Count - 2)]) } } } # Field 15 & 16: Question Type and Name (after the bracket) if ($bracketEnd -gt -1 -and $bracketEnd + 1 -lt $remaining.Length) { $afterBracket = $remaining.Substring($bracketEnd + 1).TrimStart() $afterParts = $afterBracket.Split([char[]]@(' ', "`t"), 2, [StringSplitOptions]::RemoveEmptyEntries) if ($afterParts.Count -ge 1) { $questionType = $afterParts[0] } if ($afterParts.Count -ge 2) { $questionName = $afterParts[1].Trim() } } # Convert question name to FQDN if ($questionName) { $questionName = ConvertTo-Fqdn -EncodedName $questionName } } elseif ($contextMap.ContainsKey($contextRaw)) { # Handle all information-extraction context types uniformly $contextInfo = $contextMap[$contextRaw] $context = $contextInfo.Name # Apply context filter - check if 'All' is in array or if current context is in filter array if ($ContextFilter -notcontains 'All' -and $ContextFilter -notcontains $context) { return $null } # Extract information text (everything after the context keyword with leading whitespace trimmed) $keywordIndex = $remaining.IndexOf($contextRaw) if ($keywordIndex -gt -1) { $information = $remaining.Substring($keywordIndex + $contextInfo.KeywordLength).TrimStart() } } else { # Unknown context type - treat as "raw" generic information # Apply context filter - only include unknown types if 'All' is specified if ($ContextFilter -notcontains 'All') { return $null } $context = $contextRaw # Remove Thread ID and Context from parts to get remaining information $parts = $parts[2..($parts.Count - 1)] # Put all in the information block $information = ($parts -join ' ').Trim() } # Return parsed object using ordered hashtable for performance return [PSCustomObject]@{ DateTime = $dateTime ThreadId = $threadId Context = $context PacketId = $packetId Protocol = $protocol Direction = $direction RemoteIP = $remoteIp Xid = $xid QueryResponse = $queryResponse Opcode = $opcode FlagsHex = $flagsHex FlagsChar = $flagsChar ResponseCode = $responseCode QuestionType = $questionType QuestionName = $questionName Information = $information } } function ConvertTo-Fqdn { <# .SYNOPSIS Converts a DNS name format to Fully Qualified Domain Name (FQDN). .DESCRIPTION Internal helper function that converts DNS server log name format to standard FQDN. Handles DNS log format with length prefixes and converts to dotted notation. .PARAMETER EncodedName DNS name string in encoded log format to convert. Format uses length-prefixed labels like (7)example(3)com(0). .EXAMPLE PS C:\> ConvertTo-Fqdn -EncodedName "(7)example(3)com(0)" Returns: example.com .NOTES Internal function not exported from module. Version: 1.0.1.1 Author: Andi Bellstedt, Copilot Date: 2026-01-25 Keywords: DNS, DebugLog, Parser, LogParser, Internal, FQDN .LINK https://github.com/AndiBellstedt/DNSServer.DebugLogParser #> [CmdletBinding()] [OutputType([string])] param( [string] $EncodedName ) if ([string]::IsNullOrWhiteSpace($EncodedName)) { return [string]::Empty } # Fast string parsing without regex $result = [System.Text.StringBuilder]::new(256) $i = 0 $len = $EncodedName.Length while ($i -lt $len) { # Find opening parenthesis $openParen = $EncodedName.IndexOf('(', $i) if ($openParen -eq -1) { break } # Find closing parenthesis $closeParen = $EncodedName.IndexOf(')', $openParen) if ($closeParen -eq -1) { break } # Extract the number (length indicator) $lengthStr = $EncodedName.Substring($openParen + 1, $closeParen - $openParen - 1) # Check if it's the terminating (0) if ($lengthStr -eq '0') { break } # Extract the label that follows $labelStart = $closeParen + 1 $nextParen = $EncodedName.IndexOf('(', $labelStart) if ($nextParen -eq -1) { $label = $EncodedName.Substring($labelStart) } else { $label = $EncodedName.Substring($labelStart, $nextParen - $labelStart) } # Append with dot separator if ($result.Length -gt 0) { $null = $result.Append('.') } $null = $result.Append($label) $i = if ($nextParen -eq -1) { $len } else { $nextParen } } return $result.ToString() } function ConvertTo-PacketDetailJson { <# .SYNOPSIS Converts DNS PACKET detail lines into a structured JSON object. .DESCRIPTION Internal helper function that parses the indented detail block following a DNS PACKET log entry and converts it into a structured JSON object for storage and analysis. The detail block contains TCP/UDP connection information, message structure, and DNS section data (Question, Answer, Authority, Additional sections). PERFORMANCE: Uses StringBuilder for efficient string concatenation and manual parsing instead of regex for maximum throughput when processing large log files. .PARAMETER DetailLines An array of detail lines to parse. Relative leading indentation must be preserved exactly as in the original log, as the parser uses indentation to determine nesting. .EXAMPLE PS C:\> $detailLines = @( 'Socket = 848', 'Remote addr 10.10.0.11, port 60580', 'Message:', ' XID 0x0001' ) PS C:\> ConvertTo-PacketDetailJson -DetailLines $detailLines Returns a JSON string representing the parsed detail structure. .NOTES Internal function not exported from module. Version: 1.0.1.0 Author: Andi Bellstedt, Copilot Date: 2026-01-25 Keywords: DNS, DebugLog, Parser, LogParser, Internal, JSON .LINK https://github.com/AndiBellstedt/DNSServer.DebugLogParser #> [CmdletBinding()] [OutputType([string])] param( [Parameter(Mandatory = $true)] [AllowNull()] [AllowEmptyCollection()] [AllowEmptyString()] [System.Collections.Generic.List[string]] $DetailLines ) # Early return for empty input if ($null -eq $DetailLines -or $DetailLines.Count -eq 0) { return [string]::Empty } # Build structured data object using ordered hashtable for predictable JSON output $detailObject = [ordered]@{} $messageObject = [ordered]@{} $flagsObject = $null # Track current section for DNS sections (QUESTION, ANSWER, etc.) $currentSectionName = $null $currentSectionData = $null $currentRecord = $null # State tracking $inMessage = $false for ($i = 0; $i -lt $DetailLines.Count; $i++) { $rawLine = $DetailLines[$i] # Skip empty lines if ([string]::IsNullOrWhiteSpace($rawLine)) { continue } # Calculate indentation level (each 2 spaces = 1 level) $trimmedLine = $rawLine.TrimStart() $indentSpaces = $rawLine.Length - $trimmedLine.Length $indentLevel = [Math]::Floor($indentSpaces / 2) # Check for Message section start (top-level) if ($trimmedLine -eq 'Message:') { $inMessage = $true continue } # Check for DNS section headers (inside Message) if ($inMessage -and $trimmedLine -match '^(QUESTION|ANSWER|AUTHORITY|ADDITIONAL) SECTION:$') { # Save previous section if exists if ($null -ne $currentSectionName -and $null -ne $currentSectionData) { $messageObject[$currentSectionName] = $currentSectionData.ToArray() } $currentSectionName = $Matches[1] $currentSectionData = [System.Collections.Generic.List[object]]::new() $currentRecord = $null continue } # Handle "empty" marker for sections if ($trimmedLine -eq 'empty' -and $null -ne $currentSectionData) { # Empty section - keep it as empty array continue } # Parse based on context if ($null -ne $currentSectionData) { # Inside a DNS section (QUESTION, ANSWER, etc.) if ($trimmedLine.StartsWith('Offset = ')) { # New record in section: "Offset = 0x000c, RR count = 0" $currentRecord = [ordered]@{} # Parse offset and RR count $parts = $trimmedLine.Split(',', [StringSplitOptions]::RemoveEmptyEntries) foreach ($part in $parts) { $kvp = $part.Trim().Split('=', 2) if ($kvp.Count -eq 2) { $currentRecord[$kvp[0].Trim() -replace ' ', ''] = $kvp[1].Trim() } } $currentSectionData.Add($currentRecord) } elseif ($trimmedLine.StartsWith('Name = ') -and $null -ne $currentRecord) { # Name property for current record $nameValue = $trimmedLine.Substring(7) # Convert encoded name to FQDN $currentRecord['Name'] = ConvertTo-Fqdn -EncodedName $nameValue } elseif ($trimmedLine -match '^(\S+)\s+(.+)$' -and $null -ne $currentRecord) { # Other record properties (QTYPE, QCLASS, etc.) $currentRecord[$Matches[1]] = $Matches[2].Trim() } } elseif ($inMessage) { # Inside Message section but before any DNS section # Check if this is a Flags sub-property (higher indent level) if ($indentLevel -ge 2 -and $null -ne $flagsObject) { # Sub-property of Flags (QR, OPCODE, AA, TC, RD, RA, Z, CD, AD, RCODE) if ($trimmedLine -match '^(\S+)\s+(.+)$') { $flagsObject[$Matches[1]] = $Matches[2].Trim() } } elseif ($trimmedLine -match '^(\S+)\s+(.+)$') { # Message-level property $key = $Matches[1] $value = $Matches[2].Trim() if ($key -eq 'Flags') { # Start collecting flag sub-properties $flagsObject = [ordered]@{ 'Value' = $value } $messageObject[$key] = $flagsObject } else { $messageObject[$key] = $value } } } else { # Top-level connection properties (before Message:) $eqPos = $trimmedLine.IndexOf(' = ') if ($eqPos -gt 0) { # Simple "Key = Value" format $key = $trimmedLine.Substring(0, $eqPos).Trim() $value = $trimmedLine.Substring($eqPos + 3).Trim() # Normalize key to remove spaces for JSON compatibility $jsonKey = $key -replace ' ', '' $detailObject[$jsonKey] = $value } elseif ($trimmedLine.Contains('=') -and $trimmedLine.Contains(',')) { # Multiple key=value pairs: "Time Query=179992, Queued=0, Expire=0" $spacePos = $trimmedLine.IndexOf(' ') if ($spacePos -gt 0) { $keyPart = $trimmedLine.Substring(0, $spacePos) $valuePart = $trimmedLine.Substring($spacePos + 1) # Parse comma-separated key=value pairs $pairs = $valuePart.Split(',', [StringSplitOptions]::RemoveEmptyEntries) $pairObject = [ordered]@{} foreach ($pair in $pairs) { $kvp = $pair.Trim().Split('=', 2) if ($kvp.Count -eq 2) { $pairObject[$kvp[0].Trim()] = $kvp[1].Trim() } } $detailObject[$keyPart] = $pairObject } } elseif ($trimmedLine -match '^(\S+)\s+(.+)$') { # "Key Value" format (e.g., "Remote addr 10.10.0.11, port 60580") $detailObject[$Matches[1]] = $Matches[2].Trim() } } } # Save last section if exists if ($null -ne $currentSectionName -and $null -ne $currentSectionData) { $messageObject[$currentSectionName] = $currentSectionData.ToArray() } # Add Message object if we collected any message data if ($messageObject.Count -gt 0) { $detailObject['Message'] = $messageObject } # Convert to JSON using ConvertTo-Json for proper escaping # Use -Compress for minimal output size (important for CSV storage) # Use -Depth 5 to handle nested structures if ($detailObject.Count -gt 0) { try { return ConvertTo-Json -InputObject $detailObject -Compress -Depth 5 -ErrorAction Stop } catch { # Fallback: Return empty string if JSON conversion fails return [string]::Empty } } return [string]::Empty } function Test-DnsDebugLogHeader { <# .SYNOPSIS Validates if a file contains a valid DNS debug log header and returns header line count. .DESCRIPTION Internal helper function that checks if a file starts with a valid DNS Server debug log header. Checks the first few lines of the file to ensure they match the standard DNS debug log format: - Line 1: "DNS Server log file creation at <timestamp>" - Line 2: "Log file wrap at <timestamp>" or empty line - Line 3: Empty line (or line 4 if line 2 had wrap message) - Next: "Message logging key (for packets - other items use a subset of these fields):" - Next: Tab + "Field # Information Values" The function validates the essential header components and returns the number of header lines to skip (typically 30 for standard logs). Returns 0 if validation fails. Supports both standard DNS debug logs and detailed logs with additional UDP/TCP information. .PARAMETER Path Path to the file to validate. .EXAMPLE PS C:\> Test-DnsDebugLogHeader -Path "C:\Windows\System32\dns\dns.log" Returns 30 if the file has a valid DNS debug log header, 0 if invalid. .NOTES Internal function not exported from module. Version: 1.0.0 Author: Andi Bellstedt Date: 2026-01-23 #> [CmdletBinding()] [OutputType([int])] param( [Parameter(Mandatory = $true)] [string] $Path ) try { $reader = [System.IO.StreamReader]::new($Path, [System.Text.Encoding]::UTF8, $true) # Read first several lines to validate header structure $lines = @() for ($i = 0; $i -lt 10; $i++) { $line = $reader.ReadLine() if ($null -eq $line) { break } $lines += $line } $reader.Close() $reader.Dispose() # Need at least 5 lines to validate if ($lines.Count -lt 5) { return 0 } # Line 1: Must start with "DNS Server log file creation at" with timestamp # Use StartsWith for better performance than regex if (-not $lines[0].StartsWith('DNS Server log file creation at ')) { return 0 } # Quick validation: line should be at least 43 chars to account for different date formats # Shortest format: "DNS Server log file creation at M/D/YYYY H:MM:SS" (43 chars) # Longest format: "DNS Server log file creation at DD.MM.YYYY HH:MM:SS" (51 chars) if ($lines[0].Length -lt 43) { return 0 } # Line 2: Can be "Log file wrap at" or empty line $lineOffset = 0 if ($lines[1].StartsWith('Log file wrap at')) { $lineOffset = 1 # If line 2 is wrap message, line 3 should be empty if (-not [string]::IsNullOrWhiteSpace($lines[2])) { return 0 } } elseif (-not [string]::IsNullOrWhiteSpace($lines[1])) { return 0 } # Find the "Message logging key" line (should be at index 2+offset or 3+offset) $messageKeyIndex = -1 for ($i = (2 + $lineOffset); $i -lt $lines.Count; $i++) { if ($lines[$i].StartsWith('Message logging key (for packets')) { $messageKeyIndex = $i break } } if ($messageKeyIndex -eq -1) { return 0 } # Next line after "Message logging key" should contain "Field #" $fieldHeaderIndex = $messageKeyIndex + 1 if ($fieldHeaderIndex -ge $lines.Count) { return 0 } # Use Contains for better performance than regex with \s+ pattern $fieldHeaderLine = $lines[$fieldHeaderIndex].TrimStart() if (-not $fieldHeaderLine.StartsWith('Field #') -or -not $fieldHeaderLine.Contains('Information')) { return 0 } # Valid DNS debug log - return standard header line count # Standard DNS debug log has 29 lines of header + 1 empty line = 30 total return 30 } catch { return 0 } } function Convert-DNSDebugLogFile { <# .SYNOPSIS Transforms Windows DNS Server debug logs into structured CSV format for analysis and reporting. .DESCRIPTION Converts Windows DNS Server debug log files into structured CSV data that can be analyzed in Excel, Power BI, SQL databases, or SIEM tools. Designed for security analysis, performance monitoring, troubleshooting, and compliance reporting. The cmdlet parses DNS debug logs and writes a consistent CSV output for analysis. The CSV output contains 18 columns, including an `Information` column for event/diagnostic text, an optional `Details` JSON column for Packet detail blocks, and an always-present `ComputerName` column (empty unless specified). KEY FEATURES: - Streaming processing avoids loading the full file into memory (suitable for very large logs) - High-performance parsing optimized for large files (100MB+) - Customizable CSV delimiter (default: semicolon) - Optional statistical summaries with aggregated metrics - Context filtering (Packet, Event, Note, and additional contexts) to focus on specific log entry types - Culture-aware date parsing and formatting for international servers - Pipeline support for batch processing multiple files - Optional compression of output files (ZIP format) - Optional automatic removal of source files after processing - Header validation to ensure data integrity OUTPUT FORMAT: The ComputerName column is always included at the end of each record. If the -ComputerName parameter is not specified, the column will be empty. This ensures consistent output structure for multi-server consolidation scenarios. PERFORMANCE: Optimized using StreamReader/StreamWriter with 64KB buffers, streaming processing for memory-efficient handling of large files, string operations instead of regex, manual CSV generation, and efficient hashtable-based statistics collection. COMPATIBILITY: - PowerShell 5.1+ (Desktop and Core editions) - Windows Server 2016+ - DNS Server 2012 R2 through 2025 log formats .PARAMETER InputFile Specifies the path to the DNS debug log file to parse. Supports arrays for processing multiple files. Accepts pipeline input from Get-ChildItem or other file-producing cmdlets. .PARAMETER OutputFile Specifies the path for the output CSV file. If not specified, uses the input filename with .csv extension in the same directory as the input file. Important: Must be a file path, not a directory. If you want to use the input file's directory with a custom name, specify the full path including filename. .PARAMETER Delimiter Specifies the delimiter character for the CSV output. Default: Semicolon (;) Common alternatives: Comma (,), Tab (`t), Pipe (|) Use semicolon in regions where comma is the decimal separator (Europe). Use comma for standard CSV tools and databases that expect comma-separated values. .PARAMETER ComputerName Specifies the value for the ComputerName column in the CSV output. The ComputerName column is always present in the output - if this parameter is not specified, the column will be empty. Use this when consolidating logs from multiple DNS servers to identify the source server in combined datasets. Note: This is NOT a remoting parameter. It only labels the output. If you point -InputFile to a UNC path, the file is read from that path (no WinRM/remote execution is performed). .PARAMETER OutputType Specifies the type of output to generate. Valid values: - 'CSV': Generate only the data file with all parsed log entries - 'Statistic': Generate only the statistics files with aggregated metrics - 'Both': Generate both data and statistics files (default) Default: Both When statistics are generated, two separate files are created: - '_Statistic.csv': Summary counts per context type per day (Date, Context, Count, ComputerName) - '_PacketStatistic.csv': Detailed PACKET counts per day by client IP, protocol, direction, and query type (Date, ClientIP, Protocol, Direction, QuestionType, Count, ComputerName) .PARAMETER SkipHeaderValidation Bypasses the DNS debug log header validation check. By default, the cmdlet validates that input files have a valid DNS Server debug log header. Use this switch to process files without validation, which can be useful for: - Modified or custom log formats - Troubleshooting validation issues - Non-standard or pre-processed logs Warning: May result in processing errors if the file is not a valid DNS log. .PARAMETER RemoveSourceFile Removes the source DNS debug log file after successful processing. Use this for automated log processing pipelines or disk space management. The source file is only removed if processing completes successfully and all output files are created. Safety: Cannot be used with -SkipHeaderValidation to prevent accidental deletion of invalid files. Warning: Source files are permanently deleted. Ensure output files are valid before using this option. .PARAMETER CompressOutput Compresses output CSV files into a ZIP archive after creation. Creates a .zip file containing the generated CSV file(s), then removes the uncompressed CSV(s). The ZIP file is created in the same directory as the output CSV with the same base name. Benefits: - Significantly reduces disk space (CSV files typically compress 90%+) - Simplifies file management and archival - Suitable for long-term storage Example: Input 'dns.log' generates 'dns.csv' compressed to 'dns.zip', then 'dns.csv' is removed. .PARAMETER ContextFilter Filters which log entry types to include in the output. Accepts single or multiple values. DNS debug logs contain different context types: - PACKET: DNS query and response packet information (primary data) - EVENT: DNS server events (e.g., "The DNS server has started.") - Note: Diagnostic notes and warnings (e.g., socket errors, internal states) - DSPoll, Init, Lookup, Recurse, Remote, Tombstone: Additional context types Valid values: - 'All': Include all context types (default) - 'Packet': Include only PACKET entries (DNS queries/responses) - 'Event': Include only EVENT entries (server events) - 'Note': Include only Note entries (diagnostic information) - Any combination: Specify multiple values to include specific context types Default: All Examples: - 'Packet' filters to only DNS traffic - 'Packet','Event' includes both DNS traffic and server events - 'Note','Event' includes diagnostic notes and server events Note: When filtering to 'Event' or 'Note', only DateTime, ThreadId, Context, and Information columns will contain data. Other columns (Protocol, ClientIP, etc.) will be empty. .PARAMETER InputCulture Specifies the culture/locale to use for parsing date/time values in the DNS debug log. DNS Server debug logs use the date format of the Windows locale on the server where the log was generated. Use this parameter when processing logs from servers with different regional settings. Default: Current culture Common examples: - 'de-DE' or 'de-AT': German format (DD.MM.YYYY or DD/MM/YYYY) - 'en-US': US format (MM/DD/YYYY with AM/PM) - 'en-GB': UK format (DD/MM/YYYY with 24-hour time) - 'sv-SE': Swedish/ISO format (YYYY-MM-DD) .PARAMETER OutputCulture Specifies the culture/locale to use for formatting date/time values in the output CSV files. Controls how DateTime values are written to the CSV. Use this when CSV files will be consumed by applications or systems with specific regional settings. Default: Current culture Common examples: - 'en-US': US format (MM/DD/YYYY) - 'de-DE': German format (DD.MM.YYYY) - 'sv-SE' or InvariantCulture: ISO format (YYYY-MM-DD) for maximum compatibility .PARAMETER NoDetailsParsing Skips parsing PACKET detail blocks into structured JSON format. When specified, PACKET records with detail blocks will have the TCP/UDP info line in the Information column, but the Details column will remain empty. This significantly improves processing performance for large log files when detailed packet structure analysis is not needed. Use this switch when: - Processing very large log files (100MB+) and only need basic query information - Detail structure (Message flags, DNS sections) is not required for analysis - Maximizing parsing speed is more important than data completeness Performance impact: Can improve processing speed by 30-50% for logs with many PACKET detail blocks. .PARAMETER WhatIf Shows what would happen if the cmdlet runs. The cmdlet is not run. When specified, displays detailed information about the operations that would be performed without actually executing them. Useful for: - Previewing which files would be processed - Verifying output file paths before processing - Testing scripts before running in production .PARAMETER Confirm Prompts you for confirmation before running the cmdlet. When specified, prompts for confirmation before: - Processing each DNS debug log file - Removing source files (when -RemoveSourceFile is specified) - Overwriting existing output files Useful for interactive processing when you want to control which files are processed. .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\dns.log" Converts the DNS debug log using default settings (both data and statistics files with semicolon delimiter). Output: - C:\Logs\dns.csv - C:\Logs\dns_Statistic.csv - C:\Logs\dns_PacketStatistic.csv .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\dns.log" -OutputType CSV Generates only the data file without statistics. Output: C:\Logs\dns.csv .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\dns.log" -OutputType Statistic Generates only the statistics file with aggregated metrics. Output: - C:\Logs\dns_Statistic.csv - C:\Logs\dns_PacketStatistic.csv .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\dns.log" -OutputFile "C:\Output\parsed.csv" Converts the log to a custom output location. Output: C:\Output\parsed.csv .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\dns.log" -Delimiter "," -ComputerName "DNS01" -OutputType Both Converts with comma delimiter and adds ComputerName column with value "DNS01". Output: - C:\Logs\dns.csv - C:\Logs\dns_Statistic.csv - C:\Logs\dns_PacketStatistic.csv .EXAMPLE PS C:\> Get-ChildItem "C:\Logs\*.log" | Convert-DNSDebugLogFile -OutputType Both Batch processes multiple DNS debug log files via pipeline. Output: For each .log file, generates .csv and _statistic.csv files .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\dns.log" -CompressOutput Converts and compresses output to ZIP archive. Output: C:\Logs\dns.zip (containing dns.csv + statistics files) .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\dns.log" -RemoveSourceFile -Verbose Converts the log and removes the source file after successful processing. Verbose output confirms file removal. .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\dns.log" -InputCulture 'de-DE' -OutputCulture 'en-US' Parses German date format (DD.MM.YYYY) and outputs in US format (MM/DD/YYYY). Use when processing logs from servers with different regional settings. .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\dns.log" -ContextFilter 'Packet' Converts only DNS query/response packet entries, excluding EVENT and Note entries. Use to focus analysis on actual DNS traffic. .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\dns.log" -ContextFilter 'Packet','Event' Converts both DNS query/response packets and server events, excluding Note and other entries. Use to analyze DNS traffic along with server event context. .EXAMPLE PS C:\> Get-ChildItem "C:\Logs\*.log" | Convert-DNSDebugLogFile -RemoveSourceFile -CompressOutput Automated log archival: processes all logs, compresses output, and removes source files. Ideal for scheduled log processing pipelines. .EXAMPLE PS C:\> Convert-DNSDebugLogFile -InputFile "C:\Logs\large-dns.log" -NoDetailsParsing Processes a large log file with detail parsing disabled for maximum performance. PACKET detail blocks are skipped, keeping the Details column empty. Use when processing very large files and detailed packet structure is not needed. .NOTES Version : 1.7.1.1 Author : Andi Bellstedt, Copilot Date : 2026-01-26 Keywords : Microsoft Windows Server, DNSServer, DNS, DebugLog, LogParser .LINK https://github.com/AndiBellstedt/DNSServer.DebugLogParser #> [CmdletBinding( SupportsShouldProcess = $true, ConfirmImpact = 'Medium' )] param( [Parameter( Mandatory = $true, ValueFromPipeline = $true, ValueFromPipelineByPropertyName = $true )] [Alias('FullName', 'FilePath', 'InputPath', 'File', 'Path')] [string[]] $InputFile, [Parameter()] [Alias('Output', 'Destination', 'OutFile', 'OutputPath')] [string] $OutputFile, [Parameter()] [ValidateLength(1, 1)] [string] $Delimiter = ';', [Parameter()] [Alias('Server', 'DNSServer', 'HostName')] [string] $ComputerName, [Parameter()] [ValidateSet('CSV', 'Statistic', 'Both')] [string] $OutputType = 'Both', [Parameter()] [switch] $SkipHeaderValidation, [Parameter()] [switch] $RemoveSourceFile, [Parameter()] [switch] $CompressOutput, [Parameter()] [ValidateSet('All', 'Packet', 'Event', 'Note', 'DSPoll', 'Init', 'Lookup', 'Recurse', 'Remote', 'Tombstone')] [string[]] $ContextFilter = @('All'), [Parameter()] [ArgumentCompleter({ [System.Diagnostics.CodeAnalysis.SuppressMessageAttribute('PSReviewUnusedParameter', '')] param($_commandName, $_parameterName, $wordToComplete, $_commandAst, $_fakeBoundParameters) [System.Globalization.CultureInfo]::GetCultures([System.Globalization.CultureTypes]::AllCultures) | Where-Object { $_.Name -like "$wordToComplete*" -and -not [string]::IsNullOrEmpty($_.Name) } | Sort-Object Name | ForEach-Object { [System.Management.Automation.CompletionResult]::new( $_.Name, $_.Name, [System.Management.Automation.CompletionResultType]::ParameterValue, "$($_.Name) - $($_.DisplayName)" ) } })] [System.Globalization.CultureInfo] $InputCulture = [System.Globalization.CultureInfo]::CurrentCulture, [Parameter()] [ArgumentCompleter({ [System.Diagnostics.CodeAnalysis.SuppressMessageAttribute('PSReviewUnusedParameter', '')] param($_commandName, $_parameterName, $wordToComplete, $_commandAst, $_fakeBoundParameters) [System.Globalization.CultureInfo]::GetCultures([System.Globalization.CultureTypes]::AllCultures) | Where-Object { $_.Name -like "$wordToComplete*" -and -not [string]::IsNullOrEmpty($_.Name) } | Sort-Object Name | ForEach-Object { [System.Management.Automation.CompletionResult]::new( $_.Name, $_.Name, [System.Management.Automation.CompletionResultType]::ParameterValue, "$($_.Name) - $($_.DisplayName)" ) } })] [System.Globalization.CultureInfo] $OutputCulture = [System.Globalization.CultureInfo]::CurrentCulture, [Parameter()] [switch] $NoDetailsParsing ) begin { #region -- Initialization # Track if user explicitly provided an OutputFile (for pipeline support) $explicitOutputFile = -not [string]::IsNullOrEmpty($OutputFile) # Validate OutputFile if provided - must not be a directory if ($explicitOutputFile) { # Resolve to absolute path for validation if (-not [System.IO.Path]::IsPathRooted($OutputFile)) { $resolvedOutputFile = Join-Path -Path (Get-Location).Path -ChildPath $OutputFile } else { $resolvedOutputFile = $OutputFile } # Check if path exists and is a directory if (Test-Path -Path $resolvedOutputFile -PathType Container) { throw "OutputFile parameter must be a file path, not a directory: '$OutputFile'. Please specify a file name (e.g., 'C:\Output\result.csv')." } # Check if parent directory exists (if path contains directory) $parentDir = [System.IO.Path]::GetDirectoryName($resolvedOutputFile) if (-not [string]::IsNullOrEmpty($parentDir) -and -not (Test-Path -Path $parentDir -PathType Container)) { throw "Output directory does not exist: '$parentDir'. Please create the directory first or omit OutputFile to use the input file's directory." } } <# DNS Debug Log Field Definitions (from log header lines 5-29): Field 1: Date Field 2: Time Field 3: Thread ID Field 4: Context Field 5: Internal packet identifier Field 6: UDP/TCP indicator Field 7: Send/Receive indicator Field 8: Client IP Field 9: Xid (hex) Field 10: Query/Response (R = Response, blank = Query) Field 11: Opcode (Q = Standard Query, N = Notify, U = Update, ? = Unknown) Field 12: Flags (hex) - starts with [ Field 13: Flags (char codes) - A=Authoritative, T=Truncated, D=Recursion Desired, R=Recursion Available Field 14: ResponseCode - ends with ] Field 15: Question Type Field 16: Question Name #> $headerTemplate = 'DateTime{0}ThreadId{0}Context{0}PacketId{0}Protocol{0}Direction{0}ClientIP{0}Xid{0}Type{0}Opcode{0}FlagsHex{0}FlagsChar{0}ResponseCode{0}QuestionType{0}QuestionName{0}Information{0}Details{0}ComputerName' #endregion Initialization # Start a stopwatch to measure total script runtime and a file counter $dnsParserStopwatch = [System.Diagnostics.Stopwatch]::StartNew() [int]$fileCount = 0 [int]$progressCounter = 0 [int]$progressUpdateInterval = 1000 # Validate parameter combination safety if ($RemoveSourceFile -and $SkipHeaderValidation) { throw "RemoveSourceFile cannot be used with SkipHeaderValidation due to safety concerns. Header validation ensures the file is a valid DNS log before permanent deletion." } } process { #region File Processing foreach ($currentFile in $InputFile) { # Reset progress counter for each file to ensure consistent progress update intervals $progressCounter = 0 # Validate input file exists and is a file (not a directory) if (-not (Test-Path -Path $currentFile)) { $errorRecord = [System.Management.Automation.ErrorRecord]::new( [System.IO.FileNotFoundException]::new("Input file does not exist: '$currentFile'"), 'InputFileNotFound', [System.Management.Automation.ErrorCategory]::ObjectNotFound, $currentFile ) $PSCmdlet.WriteError($errorRecord) continue } # Check if input path is a directory if ((Get-Item -Path $currentFile).PSIsContainer) { $errorRecord = [System.Management.Automation.ErrorRecord]::new( [System.ArgumentException]::new("Input path is a directory, not a file: '$currentFile'. Please specify a file path."), 'InputPathIsDirectory', [System.Management.Automation.ErrorCategory]::InvalidArgument, $currentFile ) $PSCmdlet.WriteError($errorRecord) continue } # Resolve full path $resolvedPath = Resolve-Path -Path $currentFile | Select-Object -ExpandProperty Path # Validate DNS debug log header (unless validation is skipped) if ($SkipHeaderValidation) { # Skip validation - assume standard 30-line header $skipLines = 30 Write-Verbose "Header validation skipped for: '$resolvedPath'" } else { Write-Verbose "Validating DNS debug log header for: '$resolvedPath'" $skipLines = Test-DnsDebugLogHeader -Path $resolvedPath if ($skipLines -eq 0) { $errorRecord = [System.Management.Automation.ErrorRecord]::new( [System.FormatException]::new("File is not a valid DNS Server debug log: '$resolvedPath'. The file header does not match the expected DNS debug log format. Use -SkipHeaderValidation to bypass this check."), 'InvalidDnsLogHeader', [System.Management.Automation.ErrorCategory]::InvalidData, $resolvedPath ) $PSCmdlet.WriteError($errorRecord) continue } Write-Verbose "Header validation successful ($skipLines header lines)" } # Calculate output path for this input file # If user didn't explicitly provide OutputFile, derive from input file if (-not $explicitOutputFile) { $currentOutputPath = [System.IO.Path]::ChangeExtension($resolvedPath, '.csv') } else { # Use the explicitly provided OutputFile (resolved to absolute) if (-not [System.IO.Path]::IsPathRooted($OutputFile)) { $currentOutputPath = Join-Path -Path (Get-Location).Path -ChildPath $OutputFile } else { $currentOutputPath = $OutputFile } } Write-Verbose "Starting processing: '$resolvedPath' (Input culture for date parsing: $($InputCulture.Name) [$($InputCulture.DisplayName)])" Write-Verbose "Output type: $($OutputType) | CSV delimiter: '$Delimiter'" Write-Verbose "Output path: '$currentOutputPath' (Output culture for date formatting: $($OutputCulture.Name) [$($OutputCulture.DisplayName)])" # Build header with specified delimiter (ComputerName is always included at the end) $header = $headerTemplate -f $Delimiter $computerNameValue = if ([string]::IsNullOrEmpty($ComputerName)) { '' } else { $ComputerName } # Use StreamReader for maximum performance with large files $reader = $null $writer = $null $lineCount = 0 $parsedCount = 0 # Initialize statistics dictionaries if requested (using Dictionary for performance) # contextStatistics: summarizes all records by Date|Context # packetStatistics: detailed PACKET records by Date|ClientIP|Protocol|Direction|QuestionType $contextStatistics = $null $packetStatistics = $null if ($OutputType -eq 'Statistic' -or $OutputType -eq 'Both') { $contextStatistics = [System.Collections.Generic.Dictionary[string, int]]::new() $packetStatistics = [System.Collections.Generic.Dictionary[string, int]]::new() } # Determine if we need to write CSV data $writeCsvData = ($OutputType -eq 'CSV' -or $OutputType -eq 'Both') # Pre-build date/time format string for OutputCulture (performance optimization) # Use the culture's short date and long time patterns for consistent output $outputDateTimeFormat = $OutputCulture.DateTimeFormat.ShortDatePattern + ' ' + $OutputCulture.DateTimeFormat.LongTimePattern try { $reader = [System.IO.StreamReader]::new($resolvedPath, [System.Text.Encoding]::UTF8, $true, 65536) # Only create CSV writer if we're outputting CSV data if ($writeCsvData) { # Check if we should process (WhatIf support) if ($PSCmdlet.ShouldProcess($currentOutputPath, "Create CSV output file")) { Write-Verbose "Initializing CSV writer for: '$currentOutputPath'" $writer = [System.IO.StreamWriter]::new($currentOutputPath, $false, [System.Text.Encoding]::UTF8, 65536) # Write CSV header $writer.WriteLine($header) } else { # In WhatIf mode, don't create writer $writeCsvData = $false } } # Skip header lines (validated count from Test-DnsDebugLogHeader) for ($i = 0; $i -lt $skipLines -and -not $reader.EndOfStream; $i++) { $null = $reader.ReadLine() $lineCount++ } #region -- Process data lines # Multi-line record processing: # - PACKET context can have detail blocks (TCP/UDP info + indented detail lines until empty line) # - Other contexts can have continuation lines (indented lines following the main line) # Records are delimited by: empty lines OR lines starting with a date (new record) # Streaming lookahead buffer implementation: # - Maintains a small queue of upcoming lines for multi-line record detection # - Avoids loading entire file into memory (OOM prevention for 100MB+ files) # - Buffer size of 100 lines provides sufficient lookahead for detail blocks $bufferSize = 100 $lookaheadBuffer = [System.Collections.Generic.Queue[string]]::new($bufferSize) # Pre-fill the lookahead buffer while (-not $reader.EndOfStream -and $lookaheadBuffer.Count -lt $bufferSize) { $lookaheadBuffer.Enqueue($reader.ReadLine()) $lineCount++ } # Process lines using streaming approach with lookahead capability while ($lookaheadBuffer.Count -gt 0) { # Dequeue next line for processing $line = $lookaheadBuffer.Dequeue() # Refill buffer to maintain lookahead capability if (-not $reader.EndOfStream) { $lookaheadBuffer.Enqueue($reader.ReadLine()) $lineCount++ } # Skip empty lines (record separators) if ([string]::IsNullOrWhiteSpace($line)) { continue } # Parse the main record line $parsed = ConvertFrom-DnsLogLine -Line $line -Culture $InputCulture -ContextFilter $ContextFilter if ($null -eq $parsed) { continue } # Initialize multi-line record fields $information = $parsed.Information $details = [string]::Empty #region -- -- Collect continuation/detail lines if ($parsed.Context -eq 'Packet') { # PACKET context: Check for detail block # Detail blocks start with "TCP " or "UDP " on the next line (no date prefix) # followed by indented lines, terminated by empty line if ($lookaheadBuffer.Count -gt 0) { # Peek at next line without allocating array $nextLine = $lookaheadBuffer.Peek() # Check if next line starts with "TCP " or "UDP " (detail block indicator) if ($nextLine.StartsWith('TCP ') -or $nextLine.StartsWith('UDP ')) { # This is a detail block - extract the TCP/UDP info line $information = $nextLine.TrimEnd() # Consume the TCP/UDP line from buffer $null = $lookaheadBuffer.Dequeue() if (-not $reader.EndOfStream) { $lookaheadBuffer.Enqueue($reader.ReadLine()) $lineCount++ } # Collect all indented detail lines until empty line or new record $detailLineList = [System.Collections.Generic.List[string]]::new() while ($lookaheadBuffer.Count -gt 0) { # Peek at next line without allocating array $detailLine = $lookaheadBuffer.Peek() # Empty line terminates the detail block if ([string]::IsNullOrWhiteSpace($detailLine)) { # Consume empty line $null = $lookaheadBuffer.Dequeue() if (-not $reader.EndOfStream) { $lookaheadBuffer.Enqueue($reader.ReadLine()) $lineCount++ } break } # Check if line starts with whitespace (continuation) or is a new record (starts with date) if ($detailLine.Length -gt 0 -and $detailLine[0] -eq ' ') { # Continuation line - trim leading whitespace (2 spaces indent) but preserve structure $detailLineList.Add($detailLine.TrimStart()) # Consume the detail line from buffer $null = $lookaheadBuffer.Dequeue() if (-not $reader.EndOfStream) { $lookaheadBuffer.Enqueue($reader.ReadLine()) $lineCount++ } } else { # New record detected - don't consume this line break } } # Parse detail lines into JSON structure if we have any (unless NoDetailsParsing is enabled) if ($detailLineList.Count -gt 0 -and -not $NoDetailsParsing) { $details = ConvertTo-PacketDetailJson -DetailLines $detailLineList } } elseif ([string]::IsNullOrWhiteSpace($nextLine)) { # Empty line after PACKET without details - skip it $null = $lookaheadBuffer.Dequeue() if (-not $reader.EndOfStream) { $lookaheadBuffer.Enqueue($reader.ReadLine()) $lineCount++ } } # Otherwise, nextLine is a new record - don't consume it } } else { # Non-PACKET context: Collect continuation lines (indented lines) into Information # Continuation lines start with whitespace and are appended to the main line's information $continuationTextList = [System.Collections.Generic.List[string]]::new() while ($lookaheadBuffer.Count -gt 0) { # Peek at next line without allocating array $contLine = $lookaheadBuffer.Peek() # Empty line terminates continuation if ([string]::IsNullOrWhiteSpace($contLine)) { # Consume empty line $null = $lookaheadBuffer.Dequeue() if (-not $reader.EndOfStream) { $lookaheadBuffer.Enqueue($reader.ReadLine()) $lineCount++ } break } # Check if line starts with whitespace (continuation) if ($contLine.Length -gt 0 -and $contLine[0] -eq ' ') { # Continuation line - trim whitespace and add to list $continuationTextList.Add($contLine.Trim()) # Consume the continuation line from buffer $null = $lookaheadBuffer.Dequeue() if (-not $reader.EndOfStream) { $lookaheadBuffer.Enqueue($reader.ReadLine()) $lineCount++ } } else { # New record detected - don't consume this line break } } # Combine continuation lines with original information if ($continuationTextList.Count -gt 0) { if ([string]::IsNullOrEmpty($information)) { $information = [string]::Join(' ', $continuationTextList) } else { $information = $information + ' ' + [string]::Join(' ', $continuationTextList) } } } #endregion -- -- Collect continuation/detail lines #region -- -- Build CSV line if ($writeCsvData) { # Format DateTime using OutputCulture for culture-aware output $formattedDateTime = $parsed.DateTime.ToString($outputDateTimeFormat, $OutputCulture) # Escape double quotes in text fields for proper CSV formatting # Standard CSV escaping: replace " with "" $escapedQuestionName = $parsed.QuestionName -replace '"', '""' $escapedInformation = $information -replace '"', '""' $escapedDetails = $details -replace '"', '""' # Build CSV line with all 18 columns including Details $csvLine = ('{0}' + $Delimiter + '{1}' + $Delimiter + '{2}' + $Delimiter + '{3}' + $Delimiter + '{4}' + $Delimiter + '{5}' + $Delimiter + '{6}' + $Delimiter + '{7}' + $Delimiter + '{8}' + $Delimiter + '{9}' + $Delimiter + '{10}' + $Delimiter + '{11}' + $Delimiter + '{12}' + $Delimiter + '{13}' + $Delimiter + '"{14}"' + $Delimiter + '"{15}"' + $Delimiter + '"{16}"' + $Delimiter + '{17}') -f @( $formattedDateTime, $parsed.ThreadId, $parsed.Context, $parsed.PacketId, $parsed.Protocol, $parsed.Direction, $parsed.RemoteIP, $parsed.Xid, $( if ($parsed.QueryResponse -eq 'R') { 'Response' } elseif ($parsed.Context -eq 'Packet') { 'Query' } else { '' } ), $( switch ($parsed.Opcode) { 'Q' { 'Standard' } 'N' { 'Notify' } 'U' { 'Update' } '?' { 'Unknown' } default { $parsed.Opcode } } ), $parsed.FlagsHex, $( switch ($parsed.FlagsChar) { 'A' { 'Authoritative' } 'T' { 'Truncated' } 'D' { 'RecursionDesired' } 'R' { 'RecursionAvailable' } default { $parsed.FlagsChar } } ), $parsed.ResponseCode, $parsed.QuestionType, $escapedQuestionName, $escapedInformation, $escapedDetails, $computerNameValue ) # Write CSV line $writer.WriteLine($csvLine) } #endregion -- -- Build CSV line $parsedCount++ $progressCounter++ # Update progress every 1000 records for performance efficiency if ($progressCounter -ge $progressUpdateInterval) { Write-Progress -Activity "Processing DNS log file: $([System.IO.Path]::GetFileName($resolvedPath))" -Status "Parsed $parsedCount records ($lineCount lines read)" -PercentComplete -1 $progressCounter = 0 } #region -- -- Collect statistics if ($null -ne $contextStatistics) { # Extract date portion only (no time) for daily grouping $dateOnly = $parsed.DateTime.Date.ToString('yyyy-MM-dd', [System.Globalization.CultureInfo]::InvariantCulture) # Context statistics: count all records by Date|Context $contextKey = $dateOnly + '|' + $parsed.Context if ($contextStatistics.ContainsKey($contextKey)) { $contextStatistics[$contextKey]++ } else { $contextStatistics[$contextKey] = 1 } # Packet statistics: count only standard PACKET records (with RemoteIP) by Date|ClientIP|Protocol|Direction|QuestionType # Info-only PACKET records (e.g., "Response packet does not match") have empty RemoteIP and are excluded if ($parsed.Context -eq 'Packet' -and -not [string]::IsNullOrEmpty($parsed.RemoteIP)) { $packetKey = $dateOnly + '|' + $parsed.RemoteIP + '|' + $parsed.Protocol + '|' + $parsed.Direction + '|' + $parsed.QuestionType if ($packetStatistics.ContainsKey($packetKey)) { $packetStatistics[$packetKey]++ } else { $packetStatistics[$packetKey] = 1 } } } #endregion -- -- Collect statistics } #endregion Process data lines # Complete progress bar Write-Progress -Activity "Processing DNS log file: $([System.IO.Path]::GetFileName($resolvedPath))" -Completed Write-Verbose "Completed parsing: $lineCount total lines, $parsedCount valid entries" if ($writeCsvData) { Write-Verbose "Successfully exported $parsedCount DNS log entries to: '$currentOutputPath'" } # Write context statistics file if requested (_Statistic) if ($null -ne $contextStatistics -and $contextStatistics.Count -gt 0) { Write-Verbose "Generating context statistics file with $($contextStatistics.Count) unique groups" $contextStatPath = [System.IO.Path]::Combine( [System.IO.Path]::GetDirectoryName($currentOutputPath), [System.IO.Path]::GetFileNameWithoutExtension($currentOutputPath) + '_Statistic' + [System.IO.Path]::GetExtension($currentOutputPath) ) # Check if we should process (WhatIf support) if ($PSCmdlet.ShouldProcess($contextStatPath, "Create context statistics output file")) { $statWriter = $null try { $statWriter = [System.IO.StreamWriter]::new($contextStatPath, $false, [System.Text.Encoding]::UTF8, 65536) # Write context statistics header $statWriter.WriteLine('Date' + $Delimiter + 'Context' + $Delimiter + 'Count' + $Delimiter + 'ComputerName') # Write context statistics data (sort by Date, then Context for better readability) foreach ($kvp in ($contextStatistics.GetEnumerator() | Sort-Object -Property Key)) { $keyParts = $kvp.Key.Split('|') # Key format: Date|Context $statLine = $keyParts[0] + $Delimiter + $keyParts[1] + $Delimiter + $kvp.Value.ToString() + $Delimiter + $computerNameValue $statWriter.WriteLine($statLine) } Write-Verbose "Successfully exported context statistics to: '$contextStatPath' ($($contextStatistics.Count) unique groups)" } finally { if ($null -ne $statWriter) { $statWriter.Dispose() } } } } # Write packet statistics file if requested (_PacketStatistic) if ($null -ne $packetStatistics -and $packetStatistics.Count -gt 0) { Write-Verbose "Generating packet statistics file with $($packetStatistics.Count) unique groups" $packetStatPath = [System.IO.Path]::Combine( [System.IO.Path]::GetDirectoryName($currentOutputPath), [System.IO.Path]::GetFileNameWithoutExtension($currentOutputPath) + '_PacketStatistic' + [System.IO.Path]::GetExtension($currentOutputPath) ) # Check if we should process (WhatIf support) if ($PSCmdlet.ShouldProcess($packetStatPath, "Create packet statistics output file")) { $statWriter = $null try { $statWriter = [System.IO.StreamWriter]::new($packetStatPath, $false, [System.Text.Encoding]::UTF8, 65536) # Write packet statistics header (ComputerName is always included at the end) $statWriter.WriteLine('Date' + $Delimiter + 'ClientIP' + $Delimiter + 'Protocol' + $Delimiter + 'Direction' + $Delimiter + 'QuestionType' + $Delimiter + 'Count' + $Delimiter + 'ComputerName') # Write packet statistics data (sort by Date, then ClientIP for better readability) foreach ($kvp in ($packetStatistics.GetEnumerator() | Sort-Object -Property Key)) { $keyParts = $kvp.Key.Split('|') # Key format: Date|ClientIP|Protocol|Direction|QuestionType $statLine = $keyParts[0] + $Delimiter + $keyParts[1] + $Delimiter + $keyParts[2] + $Delimiter + $keyParts[3] + $Delimiter + $keyParts[4] + $Delimiter + $kvp.Value.ToString() + $Delimiter + $computerNameValue $statWriter.WriteLine($statLine) } Write-Verbose "Successfully exported packet statistics to: '$packetStatPath' ($($packetStatistics.Count) unique groups)" } finally { if ($null -ne $statWriter) { $statWriter.Dispose() } } } } } finally { if ($null -ne $reader) { $reader.Dispose() } if ($null -ne $writer) { $writer.Dispose() } } # Compress output files if requested (process after each file for disk space management) if ($CompressOutput) { $filesToCompress = @() $zipPath = [System.IO.Path]::ChangeExtension($currentOutputPath, '.zip') # Collect files to compress if ($writeCsvData -and (Test-Path -Path $currentOutputPath)) { $filesToCompress += $currentOutputPath } if ($null -ne $contextStatistics -and $contextStatistics.Count -gt 0) { $contextStatPath = [System.IO.Path]::Combine( [System.IO.Path]::GetDirectoryName($currentOutputPath), [System.IO.Path]::GetFileNameWithoutExtension($currentOutputPath) + '_Statistic' + [System.IO.Path]::GetExtension($currentOutputPath) ) if (Test-Path -Path $contextStatPath) { $filesToCompress += $contextStatPath } } if ($null -ne $packetStatistics -and $packetStatistics.Count -gt 0) { $packetStatPath = [System.IO.Path]::Combine( [System.IO.Path]::GetDirectoryName($currentOutputPath), [System.IO.Path]::GetFileNameWithoutExtension($currentOutputPath) + '_PacketStatistic' + [System.IO.Path]::GetExtension($currentOutputPath) ) if (Test-Path -Path $packetStatPath) { $filesToCompress += $packetStatPath } } if ($filesToCompress.Count -gt 0) { # Check if we should process (WhatIf support) if ($PSCmdlet.ShouldProcess($zipPath, "Compress output files and remove originals")) { try { Write-Verbose "Starting compression: $($filesToCompress.Count) file(s) to '$zipPath'" # Remove existing ZIP if present if (Test-Path -Path $zipPath) { Write-Verbose "Removing existing ZIP file: '$zipPath'" Remove-Item -Path $zipPath -Force -ErrorAction Stop } # Compress output files Compress-Archive -Path $filesToCompress -DestinationPath $zipPath -CompressionLevel Optimal -ErrorAction Stop Write-Verbose "Successfully compressed output to: '$zipPath'" # Remove uncompressed CSV files after successful compression foreach ($file in $filesToCompress) { Remove-Item -Path $file -Force -ErrorAction Stop Write-Verbose "Removed uncompressed file: '$file'" } } catch { $errorRecord = [System.Management.Automation.ErrorRecord]::new( [System.IO.IOException]::new("Failed to compress output files: $_"), 'CompressionFailed', [System.Management.Automation.ErrorCategory]::WriteError, $zipPath ) $PSCmdlet.WriteError($errorRecord) } } } } # Remove source file if requested (only after successful processing) if ($RemoveSourceFile) { # Check if we should process (WhatIf support) if ($PSCmdlet.ShouldProcess($resolvedPath, "Remove source file")) { try { Write-Verbose "Removing source file: '$resolvedPath'" Remove-Item -Path $resolvedPath -Force -ErrorAction Stop Write-Verbose "Successfully removed source file: '$resolvedPath'" } catch { $errorRecord = [System.Management.Automation.ErrorRecord]::new( [System.IO.IOException]::new("Failed to remove source file '$resolvedPath': $_"), 'SourceFileRemovalFailed', [System.Management.Automation.ErrorCategory]::WriteError, $resolvedPath ) $PSCmdlet.WriteError($errorRecord) } } } # Increment file counter for each processed input $fileCount++ } #endregion File Processing } end { #region Completion if ($null -ne $dnsParserStopwatch) { $dnsParserStopwatch.Stop() Write-Verbose "Processing complete: $fileCount file(s) processed in $($dnsParserStopwatch.Elapsed.ToString())" } #endregion Completion } } # Commands run on module import go here # E.g. Argument Completers could be placed here # Module-wide variables go here # For example if you want to cache some data, have some module-wide config settings, etc. ... those could go here # Example: # $script:config = @{ } Export-ModuleMember -Function 'Convert-DNSDebugLogFile' |