package main

import (
	"context"
	"encoding/csv"
	"encoding/json"
	"fmt"
	"math/rand"
	"net/http"
	"os"
	"strconv"
	"strings"
	"time"

	"fyne.io/fyne/v2/app"
	"fyne.io/fyne/v2/container"
	"fyne.io/fyne/v2/widget"
	"github.com/chromedp/chromedp"
	"github.com/chromedp/cdproto/performance"
)

// Config struct for saving and loading user inputs
type Config struct {
	URLs        []string `json:"urls"`
	Element     string   `json:"element"`
	MinInterval int      `json:"min_interval"`
	MaxInterval int      `json:"max_interval"`
}

var configFile = "config.json"
var stopFetching bool
var running bool

// Fetches a PNG file or website and logs the time
func fetchImage(url string) (time.Duration, error) {
	start := time.Now()

	req, err := http.NewRequest("GET", url, nil)
	if err != nil {
		return 0, err
	}

	client := &http.Client{}
	resp, err := client.Do(req)
	if err != nil {
		return 0, err
	}
	defer resp.Body.Close()

	if resp.StatusCode != http.StatusOK {
		return 0, fmt.Errorf("failed to fetch: %s", resp.Status)
	}

	duration := time.Since(start)
	return duration, nil
}

// Fetch website metrics including wait for load, element visibility, LCP, TTFB, and network idle
func fetchWebsiteMetrics(url, element string) (total time.Duration, lcp time.Duration, ttfb time.Duration, err error) {
	start := time.Now()

	// Create a new Chromedp context
	ctx, cancel := chromedp.NewContext(context.Background())
	defer cancel()

	var metrics []*performance.Metric

	// Run Chromedp to navigate to the URL, wait for the load event, wait for element visibility, and ensure network idle
	err = chromedp.Run(ctx,
		chromedp.Navigate(url),                // Navigate to the website
		chromedp.WaitVisible(element),         // Wait for a specific user-defined element to be visible
		chromedp.ActionFunc(func(ctx context.Context) error {
			var err error
			metrics, err = performance.GetMetrics().Do(ctx)
			return err
		}),
		chromedp.Sleep(2*time.Second),         // Ensure network idle by waiting a few seconds after
	)
	if err != nil {
		return 0, 0, 0, err
	}

	// Calculate total load time after waiting for all conditions
	total = time.Since(start)

	// Extract LCP and TTFB from performance metrics
	lcp = performanceMetric(metrics, "LargestContentfulPaint")
	ttfb = performanceMetric(metrics, "NavigationTiming.responseStart") - performanceMetric(metrics, "NavigationTiming.requestStart")

	return total, lcp, ttfb, nil
}

// Helper function to get specific metric from the performance metrics
func performanceMetric(metrics []*performance.Metric, name string) time.Duration {
	for _, metric := range metrics {
		if metric.Name == name {
			return time.Duration(metric.Value) * time.Millisecond
		}
	}
	return 0
}

// Logs the fetch details to CSV including total load time, LCP, and TTFB
func logTimeToCSV(url string, totalDuration, lcp, ttfb time.Duration, filename string) error {
	file, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
	if err != nil {
		return err
	}
	defer file.Close()

	writer := csv.NewWriter(file)
	defer writer.Flush()

	// Log the current timestamp, URL, total load time, LCP, and TTFB
	record := []string{
		time.Now().Format(time.RFC3339),
		url,
		totalDuration.String(), // Total load time
		lcp.String(),           // Largest Contentful Paint (LCP)
		ttfb.String(),          // Time to First Byte (TTFB)
	}
	return writer.Write(record)
}

// Calculate and display metrics: min, max, and average load times
func calculateAndDisplayMetrics(url string, durations []time.Duration, logLabel *widget.Label) {
	if len(durations) == 0 {
		logLabel.SetText(fmt.Sprintf("No data for %s", url))
		return
	}

	var total time.Duration
	min, max := durations[0], durations[0]

	for _, d := range durations {
		total += d
		if d < min {
			min = d
		}
		if d > max {
			max = d
		}
	}

	average := total / time.Duration(len(durations))

	logLabel.SetText(fmt.Sprintf("URL: %s\nMin Load Time: %v\nMax Load Time: %v\nAvg Load Time: %v", url, min, max, average))
}

// Save the user inputs to a JSON config file
func saveConfig(urls []string, element string, minInterval, maxInterval int) error {
	config := Config{
		URLs:        urls,
		Element:     element,
		MinInterval: minInterval,
		MaxInterval: maxInterval,
	}

	file, err := json.MarshalIndent(config, "", "  ")
	if err != nil {
		return err
	}

	err = os.WriteFile(configFile, file, 0644)
	if err != nil {
		return err
	}

	return nil
}

// Load configuration from the config file if it exists
func loadConfig() (*Config, error) {
	if _, err := os.Stat(configFile); os.IsNotExist(err) {
		return nil, nil
	}

	file, err := os.ReadFile(configFile)
	if err != nil {
		return nil, err
	}

	var config Config
	err = json.Unmarshal(file, &config)
	if err != nil {
		return nil, err
	}

	return &config, nil
}

func main() {
	a := app.New()
	w := a.NewWindow("FALCON - Fetch Time Logger with Config")

	// Multi-line input field for URLs
	urlListEntry := widget.NewMultiLineEntry()
	urlListEntry.SetPlaceHolder("Enter URLs, one per line")

	// Input field for specific element to wait for
	elementEntry := widget.NewEntry()
	elementEntry.SetPlaceHolder("Enter CSS selector of element to wait for (e.g., #footer)")

	// Input fields for custom sleep intervals
	minInterval := widget.NewEntry()
	minInterval.SetPlaceHolder("Min seconds")

	maxInterval := widget.NewEntry()
	maxInterval.SetPlaceHolder("Max seconds")

	// Load configuration if available and pre-fill inputs
	config, err := loadConfig()
	if err == nil && config != nil {
		urlListEntry.SetText(strings.Join(config.URLs, "\n"))
		elementEntry.SetText(config.Element)
		minInterval.SetText(strconv.Itoa(config.MinInterval))
		maxInterval.SetText(strconv.Itoa(config.MaxInterval))
	}

	// Log display
	logLabel := widget.NewLabel("Ready")

	// Buttons for start and stop fetching
	startButton := widget.NewButton("Start", func() {
		if running {
			logLabel.SetText("Already running...")
			return
		}
		stopFetching = false
		running = true

		go func() {
			// Parse the list of URLs
			urls := strings.Split(urlListEntry.Text, "\n")
			urls = cleanURLList(urls)

			// Get the specific element to wait for
			elementToWaitFor := elementEntry.Text
			if elementToWaitFor == "" {
				logLabel.SetText("Please enter a valid CSS selector for the element to wait for.")
				running = false
				return
			}

			// Parse sleep interval range
			min, errMin := strconv.Atoi(minInterval.Text)
			max, errMax := strconv.Atoi(maxInterval.Text)
			if errMin != nil || errMax != nil || min <= 0 || max <= min {
				logLabel.SetText("Invalid sleep intervals. Please provide valid min/max seconds.")
				running = false
				return
			}

			// Save the configuration
			err := saveConfig(urls, elementToWaitFor, min, max)
			if err != nil {
				logLabel.SetText(fmt.Sprintf("Error saving config: %v", err))
				running = false
				return
			}

			// Store load times for each URL
			urlMetrics := make(map[string][]time.Duration)

			for !stopFetching {
				for _, url := range urls {
					if stopFetching {
						logLabel.SetText("Stopped fetching.")
						break
					}

					var duration time.Duration
					var lcp, ttfb time.Duration
					var err error

					// Determine if it's a PNG or website
					if len(url) > 4 && url[len(url)-4:] == ".png" {
						// Fetch PNG
						duration, err = fetchImage(url)
					} else {
						// Fetch website with advanced loading metrics
						duration, lcp, ttfb, err = fetchWebsiteMetrics(url, elementToWaitFor)
					}

					if err != nil {
						logLabel.SetText(fmt.Sprintf("Error fetching %s: %v", url, err))
						break
					}

					// Log the fetch time
					err = logTimeToCSV(url, duration, lcp, ttfb,  "fetch_times.csv")
					if err != nil {
						logLabel.SetText(fmt.Sprintf("Error logging time: %v", err))
						break
					}

					// Add the duration to the metrics for this URL
					urlMetrics[url] = append(urlMetrics[url], duration)

					// Calculate and display metrics for this URL
					calculateAndDisplayMetrics(url, urlMetrics[url], logLabel)

					// Sleep between requests
					sleepDuration := min + rand.Intn(max-min+1)
					logLabel.SetText(fmt.Sprintf("Fetched %s in %v, sleeping for %d seconds", url, duration, sleepDuration))
					time.Sleep(time.Duration(sleepDuration) * time.Second)
				}
			}

			running = false
			logLabel.SetText("All URLs processed.")
		}()
	})

	stopButton := widget.NewButton("Stop", func() {
		if running {
			stopFetching = true
			running = false
			logLabel.SetText("Stopped fetching.")
		} else {
			logLabel.SetText("Not running.")
		}
	})

	// Layout: URL List, Element Input, Intervals, Log Label, Start/Stop Buttons
	content := container.NewVBox(
		urlListEntry,
		elementEntry,
		widget.NewLabel("Min Interval (seconds)"),
		minInterval,
		widget.NewLabel("Max Interval (seconds)"),
		maxInterval,
		logLabel,
		container.NewHBox(startButton, stopButton),
	)

	w.SetContent(content)
	w.ShowAndRun()
}

// Helper function to clean the URL list (remove empty lines and trim spaces)
func cleanURLList(urls []string) []string {
	var cleanedURLs []string
	for _, url := range urls {
		trimmed := strings.TrimSpace(url)
		if trimmed != "" {
			cleanedURLs = append(cleanedURLs, trimmed)
		}
	}
	return cleanedURLs
}