modules/teamcityExtensions/MetaRunners/PSCI_GenerateJMeterAggregateReport.xml

<?xml version="1.0" encoding="UTF-8"?>
<meta-runner name="PSCI: JMeter Aggregate Report">
  <description>Generates aggregate report basing on JMeter JTL file</description>
  <settings>
    <parameters>
      <param name="jmeter.aggregate.options" value="DetailedReport,AggregateReport,PublishTeamCityTests" spec="select label_1='Detailed Report (.jtl file must be in xml format)' data_1='DetailedReport' display='normal' label='Report generation options' data_3='AggregateReport' label_3='Aggregate Report' multiple='true' label_5='Publish Aggregate Report as Teamcity Tests' data_5='PublishTeamCityTests'" />
      <param name="jmeter.aggregate.path" value="" spec="text validationMode='not_empty' label='Path to JMeter' description='Path to root JMeter directory' display='normal'" />
      <param name="jmeter.aggregate.javaPath" value="" spec="text description='Path to java.exe that will be used by JMeter CMDRunner (leave empty for java.exe from path).' validationMode='any' label='Path to java' display='normal'" />
      <param name="jmeter.aggregate.jtl.path" value="" spec="text validationMode='not_empty' label='Path to JTL file' description='Input JTL file that will be parsed - relative to TeamCity working directory. Can be .xml or .csv, but Detailed report will not work for .csv' display='normal'" />
      <param name="jmeter.aggregate.perfmon.path" value="" spec="text validationMode='any' label='Path to PerfMon results (optional)' description='Input PerfMon file that will be parsed to generate PerfMon graph' display='normal'" />
      <param name="jmeter.aggregate.warning.thresholds" value="Average=3000,Median=3000,90% Line=3000,Max=30000,Error %=0" spec="text validationMode='any' label='Aggregate Report - warning thresholds' description='Thresholds that will be used to mark cells red in aggregate report table' display='normal'" />
      <param name="jmeter.aggregate.failure.thresholds" value="0" spec="text validationMode='not_empty' label='Teamcity Tests - maximum error threshold' display='normal' description='If error% is greater than this value, test will be marked as failed'" />
      <param name="jmeter.aggregate.include.total" value="$true" spec="checkbox checkedValue='$true' description='If unchecked, TOTAL row will be ignored' uncheckedValue='$false' label='Teamcity Tests - include TOTAL row' display='normal'" />
      <param name="jmeter.aggregate.timeout" value="0" spec="text validationMode='not_empty' label='Timeout (in seconds) for CMDRunner process' display='normal' description='CMDRunner process will be killed when timeout expires'" />
      <param name="jmeter.aggregate.columns" value="average,aggregate_report_median,aggregate_report_90%_line" spec="select display='normal' label='Teamcity Tests - columns to report as categories' multiple='true' label_1='Count' data_1='aggregate_report_count' label_2='Average' data_2='average' label_3='Median' data_3='aggregate_report_median' label_4='90% Line' data_4='aggregate_report_90%_line' label_5='Min' data_5='aggregate_report_min' label_6='Max' data_6='aggregate_report_max' label_7='Error%' data_7='aggregate_report_error%' label_8='Rate' data_8='aggregate_report_rate' label_9='Bandwidth' data_9='aggregate_report_bandwidth' label_10='StdDev' data_10='aggregate_report_stddev'" />
      <param name="jmeter.aggregate.additional.cmdLines" spec="text validationMode='any' label='CMDRunner cmd lines' display='normal' description='Newline-delimited list of additional command lines for CMDRunner, e.g. to generate additional graphs. You can use {jtlPath}, {perfMonPath} and {outputDir} placeholders.'"><![CDATA[--input-jtl "{jtlPath}" --generate-png "{outputDir}\ResponseTimesOverTime.png" --plugin-type ResponseTimesOverTime --width 667 --height 500
--input-jtl "{jtlPath}" --generate-png "{outputDir}\BytesThroughputOverTime.png" --plugin-type BytesThroughputOverTime --width 667 --height 500
--input-jtl "{jtlPath}" --generate-png "{outputDir}\LatenciesOverTime.png" --plugin-type LatenciesOverTime --width 667 --height 500
--input-jtl "{jtlPath}" --generate-png "{outputDir}\ResponseCodesPerSecond.png" --plugin-type ResponseCodesPerSecond --width 667 --height 500
--input-jtl "{jtlPath}" --generate-png "{outputDir}\ResponseTimesDistribution.png" --plugin-type ResponseTimesDistribution --width 667 --height 500
--input-jtl "{jtlPath}" --generate-png "{outputDir}\ResponseTimesPercentiles.png" --plugin-type ResponseTimesPercentiles --width 667 --height 500
--input-jtl "{jtlPath}" --generate-png "{outputDir}\TransactionsPerSecond.png" --plugin-type TransactionsPerSecond --width 667 --height 500
--input-jtl "{jtlPath}" --generate-png "{outputDir}\ThreadsStateOverTime.png" --plugin-type ThreadsStateOverTime --width 667 --height 500
--input-jtl "{perfMonPath}" --generate-png "{outputDir}\PerfMon.png" --plugin-type PerfMon --width 667 --height 500]]></param>
      <param name="jmeter.aggregate.enabled" value="true" spec="text description='Put false if this whole step should not run.' validationMode='any' label='Enabled' display='normal'" />
    </parameters>
    <build-runners>
      <runner name="Generate JMeter reports" type="jetbrains_powershell">
        <parameters>
          <param name="jetbrains_powershell_bitness" value="x86" />
          <param name="jetbrains_powershell_execution" value="PS1" />
          <param name="jetbrains_powershell_script_code"><![CDATA[if ('%jmeter.aggregate.enabled%' -and '%jmeter.aggregate.enabled%' -ine 'true') {
  Write-Host "jmeter.enabled is set to %jmeter.aggregate.enabled% - skipping."
  exit 0
}

. c:\PSCI\Boot\PSCI.boot.ps1
try {
if ('%jmeter.aggregate.options%' -match 'DetailedReport') {
  New-JMeterDetailedReport -JMeterDir '%jmeter.aggregate.path%' -InputJtlFilePath '%jmeter.aggregate.jtl.path%' -DetailReport -OutputDir 'JMeter'
}
if ('%jmeter.aggregate.options%' -match 'AggregateReport|PublishTeamCityTests') {
  $cmdLines = ('%jmeter.aggregate.additional.cmdLines%' -replace "`r",'') -split "`n"
  New-JMeterAggregateReport -JMeterDir "%jmeter.aggregate.path%" -InputJtlFilePath '%jmeter.aggregate.jtl.path%' -InputPerfMonFilePath '%jmeter.aggregate.perfmon.path%' -OutputDir 'JMeter' -WarningThresholds '%jmeter.aggregate.warning.thresholds%' -CustomCMDRunnerCommandLines $cmdLines -JavaPath '%jmeter.aggregate.javaPath%' -TimeoutInSeconds %jmeter.aggregate.timeout%
}
if ('%jmeter.aggregate.options%' -match 'DetailedReport|AggregateReport') {
  Write-Host "##teamcity[publishArtifacts 'JMeter => JMeter']"
}
if ('%jmeter.aggregate.options%' -match 'PublishTeamCityTests') {
  New-JMeterTeamcityTests -JMeterAggregateReportCsvPath 'JMeter\JMeter-AggregateReport.csv' -FailureThreshold %jmeter.aggregate.failure.thresholds% -ColumnsToReportAsTests ('%jmeter.aggregate.columns%' -split ',') -IncludeTotal:%jmeter.aggregate.include.total%
}

} catch {
  Write-ErrorRecord
}]]></param>
          <param name="jetbrains_powershell_script_mode" value="CODE" />
          <param name="teamcity.step.mode" value="default" />
        </parameters>
      </runner>
    </build-runners>
    <requirements />
  </settings>
</meta-runner>