#!/bin/bash # Source the configuration file source ./user_ids.conf # URL template for the webpage to extract the data from URL_TEMPLATE="https://www.prismatic-imperium.com/user_page.php?user=" # Directory where CSV files will be stored OUTPUT_DIR="/home/eric/asset_tracker" # Create output directory if it doesn't exist mkdir -p "$OUTPUT_DIR" # Function to process a single user ID process_user() { local USER_ID="$1" local URL="${URL_TEMPLATE}${USER_ID}" local CSV_FILE="${OUTPUT_DIR}/assets_data_${USER_ID}.csv" # Check if the CSV file exists, if not, create it with headers if [ ! -f "$CSV_FILE" ]; then echo "CSV file does not exist. Creating it with headers." echo "date,rank,diamonds,total_assets" > "$CSV_FILE" fi # Fetch the webpage content and extract all the values within the user_assets_number span values=$(curl -s "$URL" | grep -oP '(?<=)[^<]+') # Convert values to an array (this splits the values by newline) IFS=$'\n' read -r -d '' -a value_array <<< "$values" # Remove commas from each value user_rank=$(echo "${value_array[0]}" | tr -d ',') liquid_assets=$(echo "${value_array[1]}" | tr -d ',') total_assets=$(echo "${value_array[2]}" | tr -d ',') # Get the current date and time current_datetime=$(date "+%Y-%m-%d %H:%M:%S") # Check if we have exactly 3 values (rank, diamonds, total_assets) if [ ${#value_array[@]} -eq 3 ]; then # Append the values to the CSV file echo "$current_datetime,$user_rank,$liquid_assets,$total_assets" >> "$CSV_FILE" else # Error handling: print a message if values were not extracted properly echo "Error: Could not extract all values or incorrect number of values for user $USER_ID." >&2 fi } # Process each user ID in the array for USER_ID in "${USER_IDS[@]}"; do echo "Processing user ID: $USER_ID" process_user "$USER_ID" done echo "All users processed."