<?xml version="1.0"?>
<!--Phoronix Test Suite v10.8.4-->
<PhoronixTestSuite>
  <SuiteInformation>
    <Title>rok1 Suite</Title>
    <Version>1.0.0</Version>
    <TestType>System</TestType>
    <Description>Test suite extracted from rok1.</Description>
    <Maintainer> </Maintainer>
  </SuiteInformation>
  <Execute>
    <Test>pts/encode-wavpack-1.4.1</Test>
    <Description>WAV To WavPack</Description>
  </Execute>
  <Execute>
    <Test>pts/encode-opus-1.4.0</Test>
    <Description>WAV To Opus Encode</Description>
  </Execute>
  <Execute>
    <Test>pts/encode-mp3-1.7.4</Test>
    <Description>WAV To MP3</Description>
  </Execute>
  <Execute>
    <Test>pts/encode-flac-1.8.1</Test>
    <Description>WAV To FLAC</Description>
  </Execute>
  <Execute>
    <Test>pts/encode-ape-1.4.0</Test>
    <Description>WAV To APE</Description>
  </Execute>
  <Execute>
    <Test>pts/stargate-1.1.0</Test>
    <Arguments>48000 1024</Arguments>
    <Description>Sample Rate: 480000 - Buffer Size: 1024</Description>
  </Execute>
  <Execute>
    <Test>pts/stargate-1.1.0</Test>
    <Arguments>192000 1024</Arguments>
    <Description>Sample Rate: 192000 - Buffer Size: 1024</Description>
  </Execute>
  <Execute>
    <Test>pts/stargate-1.1.0</Test>
    <Arguments>96000 1024</Arguments>
    <Description>Sample Rate: 96000 - Buffer Size: 1024</Description>
  </Execute>
  <Execute>
    <Test>pts/stargate-1.1.0</Test>
    <Arguments>48000 512</Arguments>
    <Description>Sample Rate: 480000 - Buffer Size: 512</Description>
  </Execute>
  <Execute>
    <Test>pts/stargate-1.1.0</Test>
    <Arguments>44100 1024</Arguments>
    <Description>Sample Rate: 44100 - Buffer Size: 1024</Description>
  </Execute>
  <Execute>
    <Test>pts/stargate-1.1.0</Test>
    <Arguments>192000 512</Arguments>
    <Description>Sample Rate: 192000 - Buffer Size: 512</Description>
  </Execute>
  <Execute>
    <Test>pts/stargate-1.1.0</Test>
    <Arguments>96000 512</Arguments>
    <Description>Sample Rate: 96000 - Buffer Size: 512</Description>
  </Execute>
  <Execute>
    <Test>pts/stargate-1.1.0</Test>
    <Arguments>44100 512</Arguments>
    <Description>Sample Rate: 44100 - Buffer Size: 512</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Geometric Mean Of All Queries</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 2000</Arguments>
    <Description>Row Count: 40000000 - Partitions: 2000 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 2000</Arguments>
    <Description>Row Count: 40000000 - Partitions: 2000 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 2000</Arguments>
    <Description>Row Count: 40000000 - Partitions: 2000 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 1000</Arguments>
    <Description>Row Count: 40000000 - Partitions: 1000 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 1000</Arguments>
    <Description>Row Count: 40000000 - Partitions: 1000 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 1000</Arguments>
    <Description>Row Count: 40000000 - Partitions: 1000 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 2000</Arguments>
    <Description>Row Count: 20000000 - Partitions: 2000 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 2000</Arguments>
    <Description>Row Count: 20000000 - Partitions: 2000 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 2000</Arguments>
    <Description>Row Count: 20000000 - Partitions: 2000 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 1000</Arguments>
    <Description>Row Count: 20000000 - Partitions: 1000 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 1000</Arguments>
    <Description>Row Count: 20000000 - Partitions: 1000 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 1000</Arguments>
    <Description>Row Count: 20000000 - Partitions: 1000 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 1000</Arguments>
    <Description>Row Count: 20000000 - Partitions: 1000 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 1000</Arguments>
    <Description>Row Count: 20000000 - Partitions: 1000 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 1000</Arguments>
    <Description>Row Count: 20000000 - Partitions: 1000 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 2000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 2000 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 2000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 2000 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 2000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 2000 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 2000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 2000 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 2000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 2000 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 2000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 2000 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 2000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 2000 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 1000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 1000 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 1000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 1000 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 1000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 1000 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 1000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 1000 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 1000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 1000 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 1000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 1000 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 1000</Arguments>
    <Description>Row Count: 10000000 - Partitions: 1000 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 500</Arguments>
    <Description>Row Count: 40000000 - Partitions: 500 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 500</Arguments>
    <Description>Row Count: 40000000 - Partitions: 500 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 500</Arguments>
    <Description>Row Count: 40000000 - Partitions: 500 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 100</Arguments>
    <Description>Row Count: 40000000 - Partitions: 100 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 100</Arguments>
    <Description>Row Count: 40000000 - Partitions: 100 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 40000000 -p 100</Arguments>
    <Description>Row Count: 40000000 - Partitions: 100 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 500</Arguments>
    <Description>Row Count: 20000000 - Partitions: 500 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 500</Arguments>
    <Description>Row Count: 20000000 - Partitions: 500 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 500</Arguments>
    <Description>Row Count: 20000000 - Partitions: 500 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 500</Arguments>
    <Description>Row Count: 20000000 - Partitions: 500 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 500</Arguments>
    <Description>Row Count: 20000000 - Partitions: 500 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 500</Arguments>
    <Description>Row Count: 20000000 - Partitions: 500 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 100</Arguments>
    <Description>Row Count: 20000000 - Partitions: 100 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 100</Arguments>
    <Description>Row Count: 20000000 - Partitions: 100 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 100</Arguments>
    <Description>Row Count: 20000000 - Partitions: 100 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 100</Arguments>
    <Description>Row Count: 20000000 - Partitions: 100 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 100</Arguments>
    <Description>Row Count: 20000000 - Partitions: 100 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 100</Arguments>
    <Description>Row Count: 20000000 - Partitions: 100 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 100</Arguments>
    <Description>Row Count: 20000000 - Partitions: 100 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 500</Arguments>
    <Description>Row Count: 10000000 - Partitions: 500 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 500</Arguments>
    <Description>Row Count: 10000000 - Partitions: 500 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 500</Arguments>
    <Description>Row Count: 10000000 - Partitions: 500 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 500</Arguments>
    <Description>Row Count: 10000000 - Partitions: 500 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 500</Arguments>
    <Description>Row Count: 10000000 - Partitions: 500 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 500</Arguments>
    <Description>Row Count: 10000000 - Partitions: 500 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 100</Arguments>
    <Description>Row Count: 10000000 - Partitions: 100 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 100</Arguments>
    <Description>Row Count: 10000000 - Partitions: 100 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 100</Arguments>
    <Description>Row Count: 10000000 - Partitions: 100 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 100</Arguments>
    <Description>Row Count: 10000000 - Partitions: 100 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 100</Arguments>
    <Description>Row Count: 10000000 - Partitions: 100 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 2000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 2000 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 2000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 2000 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 2000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 2000 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 2000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 2000 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 2000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 2000 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 2000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 2000 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 1000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 1000 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 1000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 1000 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 1000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 1000 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 1000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 1000 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 1000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 1000 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 1000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 1000 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 500</Arguments>
    <Description>Row Count: 1000000 - Partitions: 500 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 500</Arguments>
    <Description>Row Count: 1000000 - Partitions: 500 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 500</Arguments>
    <Description>Row Count: 1000000 - Partitions: 500 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 500</Arguments>
    <Description>Row Count: 1000000 - Partitions: 500 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 500</Arguments>
    <Description>Row Count: 1000000 - Partitions: 500 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 500</Arguments>
    <Description>Row Count: 1000000 - Partitions: 500 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 100</Arguments>
    <Description>Row Count: 1000000 - Partitions: 100 - Broadcast Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 100</Arguments>
    <Description>Row Count: 1000000 - Partitions: 100 - Repartition Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 100</Arguments>
    <Description>Row Count: 1000000 - Partitions: 100 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 100</Arguments>
    <Description>Row Count: 1000000 - Partitions: 100 - Calculate Pi Benchmark Using Dataframe</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 100</Arguments>
    <Description>Row Count: 1000000 - Partitions: 100 - Calculate Pi Benchmark</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 100</Arguments>
    <Description>Row Count: 1000000 - Partitions: 100 - SHA-512 Benchmark Time</Description>
  </Execute>
  <Execute>
    <Test>pts/couchdb-1.3.0</Test>
    <Arguments>500 3000 30</Arguments>
    <Description>Bulk Size: 500 - Inserts: 3000 - Rounds: 30</Description>
  </Execute>
  <Execute>
    <Test>pts/couchdb-1.3.0</Test>
    <Arguments>500 1000 30</Arguments>
    <Description>Bulk Size: 500 - Inserts: 1000 - Rounds: 30</Description>
  </Execute>
  <Execute>
    <Test>pts/couchdb-1.3.0</Test>
    <Arguments>300 1000 30</Arguments>
    <Description>Bulk Size: 300 - Inserts: 1000 - Rounds: 30</Description>
  </Execute>
  <Execute>
    <Test>pts/couchdb-1.3.0</Test>
    <Arguments>100 3000 30</Arguments>
    <Description>Bulk Size: 100 - Inserts: 3000 - Rounds: 30</Description>
  </Execute>
  <Execute>
    <Test>pts/couchdb-1.3.0</Test>
    <Arguments>100 1000 30</Arguments>
    <Description>Bulk Size: 100 - Inserts: 1000 - Rounds: 30</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 500 --clients 1000</Arguments>
    <Description>Test: RANGE - Connections: 500 - Clients: 1000</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 500 --clients 100</Arguments>
    <Description>Test: RANGE - Connections: 500 - Clients: 100 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 100 --clients 100</Arguments>
    <Description>Test: RANGE - Connections: 100 - Clients: 100 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 100 --clients 100</Arguments>
    <Description>Test: RANGE - Connections: 100 - Clients: 100</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 500 --clients 1000</Arguments>
    <Description>Test: PUT - Connections: 500 - Clients: 1000 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 500 --clients 1000</Arguments>
    <Description>Test: PUT - Connections: 500 - Clients: 1000</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 500 --clients 100</Arguments>
    <Description>Test: PUT - Connections: 500 - Clients: 100 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 100 --clients 100</Arguments>
    <Description>Test: PUT - Connections: 100 - Clients: 100 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 100 --clients 100</Arguments>
    <Description>Test: PUT - Connections: 100 - Clients: 100</Description>
  </Execute>
  <Execute>
    <Test>pts/sqlite-2.1.0</Test>
    <Arguments>1</Arguments>
    <Description>Threads / Copies: 1</Description>
  </Execute>
  <Execute>
    <Test>pts/leveldb-1.1.0</Test>
    <Arguments>--benchmarks=fillseq --num=500000</Arguments>
    <Description>Benchmark: Sequential Fill</Description>
  </Execute>
  <Execute>
    <Test>pts/leveldb-1.1.0</Test>
    <Arguments>--benchmarks=deleterandom --num=500000</Arguments>
    <Description>Benchmark: Random Delete</Description>
  </Execute>
  <Execute>
    <Test>pts/leveldb-1.1.0</Test>
    <Arguments>--benchmarks=seekrandom --num=1000000</Arguments>
    <Description>Benchmark: Seek Random</Description>
  </Execute>
  <Execute>
    <Test>pts/leveldb-1.1.0</Test>
    <Arguments>--benchmarks=readrandom --num=1000000</Arguments>
    <Description>Benchmark: Random Read</Description>
  </Execute>
  <Execute>
    <Test>pts/leveldb-1.1.0</Test>
    <Arguments>--benchmarks=fillrandom --num=100000</Arguments>
    <Description>Benchmark: Random Fill</Description>
  </Execute>
  <Execute>
    <Test>pts/leveldb-1.1.0</Test>
    <Arguments>--benchmarks=overwrite --num=100000</Arguments>
    <Description>Benchmark: Overwrite</Description>
  </Execute>
  <Execute>
    <Test>pts/leveldb-1.1.0</Test>
    <Arguments>--benchmarks=readhot --num=1000000</Arguments>
    <Description>Benchmark: Hot Read</Description>
  </Execute>
  <Execute>
    <Test>pts/n-queens-1.2.1</Test>
    <Description>Elapsed Time</Description>
  </Execute>
  <Execute>
    <Test>pts/m-queens-1.1.0</Test>
    <Description>Time To Solve</Description>
  </Execute>
  <Execute>
    <Test>pts/asmfish-1.1.2</Test>
    <Description>1024 Hash Memory, 26 Depth</Description>
  </Execute>
  <Execute>
    <Test>pts/tscp-1.2.2</Test>
    <Description>AI Chess Performance</Description>
  </Execute>
  <Execute>
    <Test>pts/lczero-1.7.0</Test>
    <Arguments>-b blas</Arguments>
    <Description>Backend: BLAS</Description>
  </Execute>
  <Execute>
    <Test>pts/build-wasmer-1.2.0</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build-erlang-1.2.0</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build-eigen-1.1.0</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build2-1.2.0</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build-python-1.0.0</Test>
    <Arguments>--enable-optimizations --with-lto</Arguments>
    <Description>Build Configuration: Released Build, PGO + LTO Optimized</Description>
  </Execute>
  <Execute>
    <Test>pts/build-python-1.0.0</Test>
    <Description>Build Configuration: Default</Description>
  </Execute>
  <Execute>
    <Test>pts/build-php-1.6.0</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build-nodejs-1.3.0</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build-mplayer-1.5.0</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build-llvm-1.5.0</Test>
    <Description>Build System: Unix Makefiles</Description>
  </Execute>
  <Execute>
    <Test>pts/build-llvm-1.5.0</Test>
    <Arguments>Ninja</Arguments>
    <Description>Build System: Ninja</Description>
  </Execute>
  <Execute>
    <Test>pts/build-linux-kernel-1.15.0</Test>
    <Arguments>allmodconfig</Arguments>
    <Description>Build: allmodconfig</Description>
  </Execute>
  <Execute>
    <Test>pts/build-linux-kernel-1.15.0</Test>
    <Arguments>defconfig</Arguments>
    <Description>Build: defconfig</Description>
  </Execute>
  <Execute>
    <Test>pts/build-imagemagick-1.7.2</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build-godot-4.0.0</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build-gem5-1.1.0</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build-ffmpeg-6.1.0</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/build-apache-1.6.1</Test>
    <Description>Time To Compile</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 100</Arguments>
    <Description>Scale Factor: 100</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 50</Arguments>
    <Description>Scale Factor: 50</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 10</Arguments>
    <Description>Scale Factor: 10</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q22</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q21</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q20</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q19</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q18</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q17</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q16</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q15</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q14</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q13</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q12</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q11</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q10</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q09</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q08</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q07</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q06</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q05</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q04</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q03</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q02</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-tpch-1.0.0</Test>
    <Arguments>-s 1</Arguments>
    <Description>Scale Factor: 1 - Q01</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 1000</Arguments>
    <Description>Row Count: 20000000 - Partitions: 1000 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 20000000 -p 500</Arguments>
    <Description>Row Count: 20000000 - Partitions: 500 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 500</Arguments>
    <Description>Row Count: 10000000 - Partitions: 500 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 100</Arguments>
    <Description>Row Count: 10000000 - Partitions: 100 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 10000000 -p 100</Arguments>
    <Description>Row Count: 10000000 - Partitions: 100 - Group By Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 2000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 2000 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 1000</Arguments>
    <Description>Row Count: 1000000 - Partitions: 1000 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 500</Arguments>
    <Description>Row Count: 1000000 - Partitions: 500 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/spark-1.0.1</Test>
    <Arguments>-r 1000000 -p 100</Arguments>
    <Description>Row Count: 1000000 - Partitions: 100 - Inner Join Test Time</Description>
  </Execute>
  <Execute>
    <Test>pts/couchdb-1.3.0</Test>
    <Arguments>300 3000 30</Arguments>
    <Description>Bulk Size: 300 - Inserts: 3000 - Rounds: 30</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 500 --clients 1000</Arguments>
    <Description>Test: RANGE - Connections: 500 - Clients: 1000 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 100 --clients 1000</Arguments>
    <Description>Test: RANGE - Connections: 100 - Clients: 1000 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 100 --clients 1000</Arguments>
    <Description>Test: RANGE - Connections: 100 - Clients: 1000</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 500 --clients 100</Arguments>
    <Description>Test: RANGE - Connections: 500 - Clients: 100</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 50 --clients 1000</Arguments>
    <Description>Test: RANGE - Connections: 50 - Clients: 1000 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 50 --clients 1000</Arguments>
    <Description>Test: RANGE - Connections: 50 - Clients: 1000</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 50 --clients 100</Arguments>
    <Description>Test: RANGE - Connections: 50 - Clients: 100 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>range KEY --total=4000000 --conns 50 --clients 100</Arguments>
    <Description>Test: RANGE - Connections: 50 - Clients: 100</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 100 --clients 1000</Arguments>
    <Description>Test: PUT - Connections: 100 - Clients: 1000 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 100 --clients 1000</Arguments>
    <Description>Test: PUT - Connections: 100 - Clients: 1000</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 500 --clients 100</Arguments>
    <Description>Test: PUT - Connections: 500 - Clients: 100</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 50 --clients 1000</Arguments>
    <Description>Test: PUT - Connections: 50 - Clients: 1000 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 50 --clients 1000</Arguments>
    <Description>Test: PUT - Connections: 50 - Clients: 1000</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 50 --clients 100</Arguments>
    <Description>Test: PUT - Connections: 50 - Clients: 100 - Average Latency</Description>
  </Execute>
  <Execute>
    <Test>pts/etcd-1.0.0</Test>
    <Arguments>put --total=4000000 --val-size=256 --key-size=8 --conns 50 --clients 100</Arguments>
    <Description>Test: PUT - Connections: 50 - Clients: 100</Description>
  </Execute>
  <Execute>
    <Test>pts/leveldb-1.1.0</Test>
    <Arguments>--benchmarks=fillsync --num=1000000</Arguments>
    <Description>Benchmark: Fill Sync</Description>
  </Execute>
  <Execute>
    <Test>pts/stockfish-1.4.0</Test>
    <Description>Total Time</Description>
  </Execute>
  <Execute>
    <Test>pts/build-mesa-1.0.0</Test>
  </Execute>
  <Execute>
    <Test>pts/build-gdb-1.1.0</Test>
  </Execute>
  <Execute>
    <Test>pts/build-gcc-1.4.0</Test>
  </Execute>
</PhoronixTestSuite>
