#!/bin/bash

# Default values for options
repetitions=1
min_sleep=1
max_sleep=5
base_url="https://ardzsd.de"
client="default_client"
user="default_user"
network="default_network"

# Output CSV file
output_file="curl_report.csv"

# Urls
css_url="otrs-web/skins/Agent/default/css-cache/CommonCSS_336575c40014056de7baefb417c78be8.css"
png_url="otrs-web/skins/Agent/BR/img/ARD-ServiceDesk_RGB_72dpi.png"

# Function to perform curl and capture timing data
perform_curl() {
    url=$1
    file_type=$2
    iteration=$3

    # Generate a random query parameter to avoid cache
    random_query="nocache=$(date +%s%N)"
    full_url="${url}?${random_query}"

    # Get the current date and time for the test
    timestamp=$(date +"%Y-%m-%d %H:%M:%S")
    
    # Perform the curl request, measure time and bypass cache
    curl_output=$(curl -o /dev/null -s -w "%{time_starttransfer};%{time_total}\n" \
        -H 'Cache-Control: no-cache' \
        "$full_url")
    
    ttfb=$(echo "$curl_output" | cut -d ';' -f 1)
    total_time=$(echo "$curl_output" | cut -d ';' -f 2)
    download_time=$(awk "BEGIN {print $total_time - $ttfb}")

    # Append results to the CSV file
    echo "$timestamp;$url;$ttfb;$download_time;$total_time;$client;$user;$network" >> "$output_file"
    
    echo "$file_type - Iteration $iteration: TTFB: $ttfb, DownloadTime: $download_time, Total Time: $total_time"
}

# Function to get a random sleep duration between MIN and MAX
random_sleep() {
    sleep_time=$(awk -v min="$min_sleep" -v max="$max_sleep" 'BEGIN{srand(); print int(min + rand() * (max - min + 1))}')
    echo "Sleeping for $sleep_time seconds..."
    sleep "$sleep_time"
}

# Parse command-line arguments for number of repetitions, sleep settings, and additional options
while getopts r:m:x:u:c:U:n: flag
do
    case "${flag}" in
        r) repetitions=${OPTARG};;
        m) min_sleep=${OPTARG};;
        x) max_sleep=${OPTARG};;
        u) base_url=${OPTARG};;
        c) client=${OPTARG};;
        U) user=${OPTARG};;
        n) network=${OPTARG};;
        *) echo "Invalid option";;
    esac
done

# Set the CSS and PNG URLs based on the base URL
css_url="${base_url}/$css_url"
png_url="${base_url}/$png_url"

# Initialize the CSV report with headers
echo "timestamp;url;ttfb;download;total;client;user;network" > "$output_file"

# Loop through the number of repetitions
for ((i=1; i<=repetitions; i++))
do
    echo "Test iteration $i of $repetitions"
    
    # Perform the curl for the CSS file
    perform_curl "$css_url" "CSS" $i
    
    # Perform the curl for the PNG file
    perform_curl "$png_url" "PNG" $i
    
    # Sleep for a random duration between the tests
    if [[ $i -lt $repetitions ]]; then
        random_sleep
    fi
done

echo "Test completed. Results saved in $output_file"