#!/usr/bin/env python3

import psycopg2 as pg
import psycopg2.extras as pge
import pytz
import datetime
import copy

from django.utils.timezone import make_aware
from django.db import transaction

from activity.models import ActivityType, Activity
from member.models import Club

from ._import_utils import ImportCommand, BulkCreateManager


class Command(ImportCommand):
    """
    Import command for Activities Base Data (Comptes, and Aliases)
    """

    def add_arguments(self, parser):
        pass
    @timed
    @transaction.atomic
    def import_activities(self, cur, chunk_size):
        cur.execute("SELECT * FROM activites ORDER by id")
        n = cur.rowcount
        bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
        activity_type = ActivityType.objects.get(name="Pot")  # Need to be fixed manually
        kfet = Club.objects.get(name="Kfet")
        for idx, row in enumerate(cur):
            update_line(idx, n, row["alias"])
            organizer = Club.objects.filter(name=row["signature"])
            if organizer.exists():
                # Try to find the club that organizes the activity. If not found, assume it's Kfet (fix manually)
                organizer = organizer.get()
            else:
                organizer = kfet
            obj_dict = {
                "pk": row["id"]
                "name": row["titre"],
                "description": row["description"],
                "activity_type": activity_type,  # By default Pot
                "creater": self.MAP_IDBDE[row["responsable"]],
                "organizer": organizer,
                "attendees_club": kfet,  # Maybe fix manually
                "date_start": row["debut"],
                "date_end": row["fin"],
                "valid": row["validepar"] is not None,
                "open": row["open"],  # Should always be False
            }
            # WARNING: Fields lieu, liste, listeimprimee are missing
            #
            bulk_mgr.add(Activity(**obj_dict))
            MAP_NAMEACTIVITY[activity.name] = activity
        bulk_mgr.done()
        return MAP_IDACTIVITY, MAP_NAMEACTIVITY

    @timed
    @transaction.atomic
    def import_activity_entries(cur):
        bulk_mgr = BulkCreateManager()
        map_idguests = set()
        cur.execute("SELECT * FROM invites ORDER by id")
        n = cur.rowcount
        for idx, row in enumerate(cur):
            update_line(idx, n, row["nom"] + " " + row["prenom"])
            obj_dict = {
                "pk": row["id"],
                "activity_id": row["activity"],
                "last_name": row["nom"],
                "first_name": row["prenom"],
                "inviter": self.MAP_IDBDE[row["responsable"]],
            }
            bulk_mgr.add(Guest(**obj_dict))
        bulk_mgr.done()
        cur.execute("SELECT * FROM entree_activites ORDER by id")
        n = cur.rowcount
        for idx, row in enumerate(cur):
            update_line(idx, n, row["nom"] + " " + row["prenom"])
            guest = None
            if row["est_invite"]:
                for g in map_idguests[row["id"]]:
                    if g.activity.pk == activity.pk:
                        guest = g
                        break
                    if not guest:
            obj_dict = {
                "activity": row["activity"],
                "time": make_aware(row["heure_entree"]),
                "note": guest.inviter if guest else MAP_IDBDE[row["idbde"]],
                "guest": guest,
            }
            try:
                with transaction.atomic():
                    Entry.objects.get_or_create(**obj_dict)
            except IntegrityError as e:
                raise e
           

    def handle(self, *args, **kwargs):
        # default args, provided by ImportCommand.
        nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
        # connecting to nk15 database
        conn = pg.connect(database=nk15db, user=nk15user)
        cur = conn.cursor(cursor_factory=pge.DictCursor)

        if kwargs["map"]:
            self.load(kwargs["map"])
        self.import_activities(cur, chunk_size)