from flask import Flask, request, jsonify
from redis import Redis
from rq import Queue
from os import environ
import os
import json
import logging
import hashlib
from datetime import datetime
from time import sleep
from redis.exceptions import ConnectionError as RedisConnectionError

# Set up logger
from logger import setup_logger
logger = setup_logger('broker', log_file='broker.log')

app = Flask(__name__)

# Connect to Redis
redis = Redis(host=environ.get('REDIS_HOST', 'localhost'),
              port=environ.get('REDIS_PORT', '6379'),
              db=environ.get('REDIS_DB', 0))
queue = Queue(environ.get('REDIS_QUEUE', 'trello-notifications'), connection=redis)

# Deduplication key prefix in Redis
DEDUPE_KEY_PREFIX = "trello_notification_"
SID_FILE_PATH = f"{os.path.dirname(__file__)}/{environ.get('SID_FILE_PATH', 'sids.json')}"

# Helper function to generate a unique key for each notification
def generate_dedupe_key(data):
    # Serialize the entire data object into a JSON string (ensure it handles all keys consistently)
    data_str = json.dumps(data, sort_keys=True)  # sort_keys ensures consistent ordering of the dictionary keys

    # Generate MD5 hash of the serialized data
    return f"{DEDUPE_KEY_PREFIX}{hashlib.md5(data_str.encode('utf-8')).hexdigest()}"


def store_sid(new_sid):
    data = []
    if os.path.exists(SID_FILE_PATH):
        with open(SID_FILE_PATH, 'r') as file:
            data = json.load(file)
    # Add the new sid
    data.append({'sid': new_sid, 'timestamp': datetime.now().isoformat()})
    with open(SID_FILE_PATH, 'w') as file:
        json.dump(data, file)

@app.route('/webhook', methods=['GET'])
def verification():
    try:
        logger.info(":: WEBHOOK GET Received ::")
        args = request.args.to_dict()
        if args.get('sid', None):
            store_sid(args.get('sid'))
            return jsonify({"success": True}), 200
        else:
            logger.error("Couldn't find SID")
            return jsonify({"success": False}), 400
    except Exception as e:
        logger.error(f"Exception: {e}")
        return jsonify({"success": False}), 500

@app.route('/update-sid', methods=['POST'])
def updateSID():
    try:
        data = request.json
        if data.get('sid', None):
            store_sid(data.get('sid'))
            return jsonify({"success": True, "msg": "Data Updated."}), 200
        else:
            logger.error("Couldn't find SID")
            return jsonify({"success": False, "msg": "Invalid Data..."}), 400
    except Exception as e:
        logger.error(f"Exception: {e}")
        return jsonify({"success": False, "msg": f"Error Occured: {e}"}), 500


# Webhook endpoint to receive tasks from GAS
@app.route('/webhook', methods=['POST'])
def webhook():
    data = request.json
    logger.info("Data Received")
    try:
        if data:
            dedupe_key = generate_dedupe_key(data)

            if not dedupe_key:
                return jsonify({"error": "Invalid notification data"}), 400

            # Check Redis if this notification has been processed before
            if redis.get(dedupe_key):
                logger.info(f"Duplicate notification detected: {dedupe_key}")
                return jsonify({"status": "duplicate", "message": "Notification already processed"}), 200

            # Retry logic in case Redis is temporarily down
            retries = 5
            backoff_time = 1  # start with 1 second
            while retries > 0:
                try:
                    # Mark this notification as processed in Redis (with a TTL of 1 hour)
                    redis.setex(dedupe_key, 3600 + 10, "processed")  # Add a 10-second grace period
                    break
                except RedisConnectionError:
                    logger.warning(f"Redis connection failed. Retrying in {backoff_time}s...")
                    retries -= 1
                    sleep(backoff_time)
                    backoff_time *= 2  # Exponential backoff

            if retries == 0:
                logger.error("Failed to connect to Redis after several attempts.")
                return jsonify({"error": "Service unavailable, please try again later"}), 503

            try:
                # Put the task in the queue
                job = queue.enqueue('worker.process_job', data)
                logger.info(f"Job {job.id} added to the queue with data: {data}")
                return jsonify({"status": "accepted", "job_id": job.get_id()}), 200
            except Exception as e:
                logger.error(f"Failed to enqueue job: {e}")
                return jsonify({"error": "Internal server error"}), 500
        else:
            logger.warning("Invalid data received, missing JSON body")
            return jsonify({"error": "Invalid data"}), 400
    except Exception as e:
        logger.error(f"Exception : {e}")
        return jsonify({"error": "Invalid data"}), 500

@app.route('/')
def index():
    return '<h1>Nothing Here...</h1>'

if __name__ == '__main__':
    app.run(debug=False, host=environ.get('SERVER_HOST', '0.0.0.0'), port=environ.get('SERVER_PORT', 8080))
