# -*- coding: utf-8 -*-
"""
Vector Packager + QGS Project Creator (Auto Grouping + Basemap + QField Ready + Resources)
------------------------------------------------------------------------------------------
Modified: Uses separate "PSU_LIST" vector for Batch JSON creation (instead of VECTOR_1)
Updated: Exports only one PSUList.json (no timestamp)
Enhanced: Overwrites PSUList.json automatically if PSU list changes
"""

from qgis.core import (
    QgsProcessingAlgorithm,
    QgsProcessingParameterVectorLayer,
    QgsProcessingParameterFile,
    QgsProcessingParameterFolderDestination,
    QgsProcessingParameterBoolean,
    QgsProcessingParameterString,
    QgsProcessingContext,
    QgsProcessingFeedback,
    QgsProject,
    QgsVectorLayer,
    QgsRasterLayer,
    QgsCoordinateReferenceSystem,
    QgsSnappingConfig,
    QgsTolerance
)
import os
import shutil
import xml.etree.ElementTree as ET
import json
from datetime import datetime


class VectorPackagerQGS(QgsProcessingAlgorithm):
    DEST_FOLDER = "DEST_FOLDER"
    OPEN_FOLDER_AFTER = "OPEN_FOLDER_AFTER"

    def initAlgorithm(self, config=None):
        # VECTOR_1 is required (used for folder name)
        self.addParameter(QgsProcessingParameterVectorLayer(
            "VECTOR_1", "Vector Layer 1 (used for folder name)", optional=False
        ))

        # VECTOR_2-3 optional
        for i in range(2, 4):
            self.addParameter(QgsProcessingParameterVectorLayer(
                f"VECTOR_{i}", f"Vector Layer {i} (optional)", optional=True
            ))

        # QML parameters (optional)
        for i in range(1, 4):
            self.addParameter(QgsProcessingParameterFile(
                f"QML_{i}", f"Optional QML for Vector {i}", extension='qml', optional=True
            ))

        # --- NEW: Add PSU_LIST vector
        self.addParameter(QgsProcessingParameterVectorLayer(
            "PSU_LIST",
            "PSU List Layer (used for Batch JSON creation)",
            optional=True
        ))

        self.addParameter(QgsProcessingParameterFolderDestination(
            self.DEST_FOLDER, "Destination Folder"
        ))

        self.addParameter(QgsProcessingParameterBoolean(
            self.OPEN_FOLDER_AFTER,
            "Open project folder after packaging",
            defaultValue=False
        ))

    def processAlgorithm(self, parameters, context: QgsProcessingContext, feedback: QgsProcessingFeedback):
        # --- Collect selected vectors and QMLs ---
        vectors, qmls = [], []
        for i in range(1, 4):
            v = self.parameterAsVectorLayer(parameters, f"VECTOR_{i}", context)
            q = self.parameterAsFile(parameters, f"QML_{i}", context)
            if v:
                vectors.append(v)
                qmls.append(q if q else None)

        if not vectors:
            raise Exception("[ERROR] No valid vector layers provided!")

        first_vector_source = vectors[0].source()
        base_name = os.path.splitext(os.path.basename(first_vector_source))[0]

        # --- Get PSU_LIST layer ---
        psu_layer = self.parameterAsVectorLayer(parameters, "PSU_LIST", context)

        # --- Get Excel + Project Name ---
        workload_excel = self.parameterAsFile(parameters, "SAMPLE_WORKLOAD_EXCEL", context)
        project_name = self.parameterAsString(parameters, "PROJECT_NAME", context).strip()

        dest_folder = self.parameterAsString(parameters, self.DEST_FOLDER, context)
        open_after = self.parameterAsBool(parameters, self.OPEN_FOLDER_AFTER, context)

        # --- Folder structure ---
        qfield_root = os.path.join(dest_folder, "QField_Projects")
        os.makedirs(qfield_root, exist_ok=True)

        project_folder = os.path.join(qfield_root, base_name)
        os.makedirs(project_folder, exist_ok=True)

        resources_folder = os.path.join(project_folder, "Resources")
        os.makedirs(resources_folder, exist_ok=True)

        project_path = os.path.join(project_folder, f"{base_name}_qfield.qgs")

        # --- Copy selected vectors to Resources ---
        copied_layers = []
        for v, q in zip(vectors, qmls):
            src = v.source()
            dst = os.path.join(resources_folder, os.path.basename(src))
            try:
                shutil.copy(src, dst)
                copied_layers.append((dst, q))
                feedback.pushInfo(f"[OK] Copied: {os.path.basename(src)} -> Resources")
            except Exception as e:
                feedback.reportError(f"[ERROR] Failed to copy {os.path.basename(src)}: {e}")

        # --- Create QGS Project ---
        project = QgsProject()
        project.clear()
        project.setCrs(QgsCoordinateReferenceSystem("EPSG:4326"))
        root = project.layerTreeRoot()

        # Define groups
        grp_tracklog = root.addGroup("Tracklog")
        grp_samples = root.addGroup("Samples")
        grp_basemap = root.addGroup("Basemap")

        layer_map = {}

        # --- Load copied layers ---
        for gpkg_path, qml_path in copied_layers:
            original_name = os.path.splitext(os.path.basename(gpkg_path))[0]
            lname_lower = original_name.lower()

            if "additional" in lname_lower:
                layer_name = "additional_replacement_ssu_or_replacement_psu"
            else:
                layer_name = original_name

            layer = QgsVectorLayer(gpkg_path, layer_name, "ogr")
            if not layer.isValid():
                feedback.reportError(f"[WARN] Invalid layer skipped: {layer_name}")
                continue

            layer.setCrs(QgsCoordinateReferenceSystem("EPSG:4326"))

            if qml_path and os.path.exists(qml_path):
                layer.loadNamedStyle(qml_path)

            project.addMapLayer(layer, addToLegend=False)
            layer_map[gpkg_path] = os.path.join("Resources", os.path.basename(gpkg_path)).replace("\\", "/")

            if "tracklog" in lname_lower:
                grp_tracklog.addLayer(layer)
            elif "selected_ssu" in lname_lower or "additional" in lname_lower:
                grp_samples.addLayer(layer)
            else:
                root.addLayer(layer)

        # --- Add Google Satellite Basemap ---
        google_xyz_source = (
            "type=xyz&zmin=0&zmax=20&url=https://mt1.google.com/vt/"
            "lyrs%3Ds%26x%3D{x}%26y%3D{y}%26z%3D{z}"
        )
        google_layer = QgsRasterLayer(google_xyz_source, "Google Satellite", "wms")
        if google_layer.isValid():
            project.addMapLayer(google_layer, False)
            grp_basemap.addLayer(google_layer)
            feedback.pushInfo("[INFO] Added Google Satellite to Basemap")

        # --- Enable Snapping (Configure in memory first) ---
        try:
            snap_config = project.snappingConfig()
            snap_config.setEnabled(True)
            snap_config.setMode(QgsSnappingConfig.AllLayers)
            snap_config.setType(QgsSnappingConfig.VertexAndSegment)
            snap_config.setTolerance(20)
            snap_config.setUnits(QgsTolerance.Pixels)
            project.setSnappingConfig(snap_config)
            feedback.pushInfo("[INFO] Snapping configured.")
        except Exception as e:
            feedback.reportError(f"[WARN] Snapping setup failed: {e}")

        # --- SAVE PROJECT ONCE ---
        try:
            project.write(project_path)
            feedback.pushInfo(f"[OK] Project file created: {os.path.basename(project_path)}")
        except Exception as e:
            feedback.reportError(f"[ERROR] Failed to write project file: {e}")

        # --- REWRITE PATHS TO RESOURCES (Defensive XML Edit) ---
        # We do this after clearing the project object to ensure no file locks or memory conflicts.
        # This addresses the 'Access Violation' seen in QGIS 3.28 when manipulating project files.
        try:
            # Explicitly clear the project object to release layer handles before XML manipulation
            project.clear()
            del project
            
            if os.path.exists(project_path):
                tree = ET.parse(project_path)
                root_xml = tree.getroot()
                found_any = False
                for ds in root_xml.findall(".//datasource"):
                    src = ds.text
                    if src and src in layer_map:
                        ds.text = f"./{layer_map[src]}"
                        found_any = True
                
                if found_any:
                    tree.write(project_path, encoding="UTF-8", xml_declaration=True)
                    feedback.pushInfo("[OK] Project paths localized to ./Resources/")
        except Exception as e:
            feedback.reportError(f"[ERROR] Failed to localize paths in XML: {e}")

        # --- Cleanup temporary ZIP files ---
        zip_count = 0
        for file in os.listdir(project_folder):
            if file.lower().endswith(".zip"):
                try:
                    os.remove(os.path.join(project_folder, file))
                    zip_count += 1
                except Exception:
                    pass

        # --- CREATE SINGLE BATCH JSON FILE (skip if batch row > 0 or recently updated) ---
        try:
            import time
            expr_context = context.expressionContext()
            batch_index = expr_context.variable("processing_batch_index")
            
            # Identify the potential JSON path to check its age
            parent_dir = os.path.dirname(qfield_root)
            json_path = os.path.join(parent_dir, "json", "PSUList.json")
            
            # Initialize psu_values to empty
            psu_values = []
            
            # 1. Detection via Batch Index (standalone batch)
            # Disable JSON generation completely for all batch rows (including row 0)
            is_recent_batch_row = (batch_index is not None and str(batch_index).strip() != '')
            
            # 2. Detection via File Timestamp (for Models in batch)
            # If the file was updated in the last 10 seconds, skip.
            is_freshly_updated = False
            if os.path.exists(json_path):
                file_age = time.time() - os.path.getmtime(json_path)
                if file_age < 10: # 10 second threshold
                    is_freshly_updated = True

            if is_recent_batch_row or is_freshly_updated:
                msg = f"Batch mode detected (Row {int(batch_index)+1})" if is_recent_batch_row else "PSUList.json recently updated"
                feedback.pushInfo(f"[INFO] {msg}. Skipping PSU list regeneration.")
                psu_values = [] # Ensure it's empty to skip next block
            else:
                main_layer = self.parameterAsVectorLayer(parameters, "PSU_LIST", context)
                
                if not main_layer or not main_layer.isValid():
                    feedback.pushInfo("[WARN] PSU_LIST invalid or missing; JSON not generated.")
                    psu_values = []
                elif main_layer.fields().indexFromName("PSU_number") == -1:
                    feedback.pushInfo("[WARN] PSU_LIST missing 'PSU_number' field; JSON not generated.")
                    psu_values = []
                else:
                    # Extract PSU list for Batch JSON
                    from qgis.core import QgsFeatureRequest
                    request = QgsFeatureRequest().setFlags(QgsFeatureRequest.NoGeometry).setSubsetOfAttributes(["PSU_number"], main_layer.fields())
                    
                    psu_values = sorted({
                        f["PSU_number"]
                        for f in main_layer.getFeatures(request)
                        if f["PSU_number"] not in [None, ""]
                    })
                    
            if psu_values:
                feedback.pushInfo(f"[INFO] Found {len(psu_values)} unique PSU_number entries for batch JSON export")

                try:
                    sample_excel_val = parameters.get("sample_workload__multiinput_is_allowed_excel") or expr_context.variable("sample_workload__multiinput_is_allowed_excel")
                except Exception:
                    sample_excel_val = None
                try:
                    project_name_val = parameters.get("project_name") or expr_context.variable("project_name")
                except Exception:
                    project_name_val = None

                if not sample_excel_val:
                    sample_excel_val = workload_excel if 'workload_excel' in locals() else "''"
                if not project_name_val:
                    project_name_val = project_name if 'project_name' in locals() else base_name

                parent_dir = os.path.dirname(qfield_root)
                json_dir = os.path.join(parent_dir, "json")
                os.makedirs(json_dir, exist_ok=True)

                json_path = os.path.join(json_dir, "PSUList.json")

                # --- Compare with existing JSON if present
                regenerate_json = True
                if os.path.exists(json_path):
                    try:
                        with open(json_path, "r", encoding="utf-8") as jf:
                            existing_data = json.load(jf)
                        existing_psus = [
                            str(item["PARAMETERS"]["replicatepsu_number"]).strip("'\"")
                            for item in existing_data if "PARAMETERS" in item
                        ]
                        if sorted(existing_psus) == [str(p) for p in psu_values]:
                            feedback.pushInfo("[INFO] PSUList.json already up to date - skipping regeneration.")
                            regenerate_json = False
                        else:
                            feedback.pushInfo("[INFO] PSU list changed - regenerating PSUList.json.")
                    except Exception as e:
                        feedback.reportError(f"[WARN] Could not read existing PSUList.json, regenerating: {e}")
                        regenerate_json = True

                # --- Generate JSON if needed
                if regenerate_json:
                    dest_folder_str = repr(dest_folder)
                    sample_excel_str = repr(sample_excel_val)
                    project_name_str = repr(project_name_val)

                    batch_array = []
                    batch_array_340 = []
                    for i, psu in enumerate(psu_values):
                        # --- Normalize PSU to string without .0 if whole number ---
                        if isinstance(psu, float) and psu.is_integer():
                            psu_str = str(int(psu))
                        else:
                            psu_str = str(psu).strip()
                            
                        is_last_psu = (i == len(psu_values) - 1)
                        
                        # Path to the individual split Excel file for this PSU
                        split_excel_path = os.path.join(dest_folder, f"{psu_str}.xlsx").replace("\\", "/")
                            
                        batch_array.append({
                            "PARAMETERS": {
                                "sample_workload__multiinput_is_allowed_excel": repr(split_excel_path),
                                "is_the_loaded_workload_an_huc_": "False",
                                "project_name": project_name_str,
                                "ssu_filter_by": "1",
                                "replicatepsu_number": repr(psu_str),
                                "project_directory": dest_folder_str,
                                "include_addtional_ssu_or_replacement_psu_sample_layer": "True",
                                "include_tracklog_layer": "True",
                                "open_the_qfield_project_after_packaging_": "True" if is_last_psu else "False"
                            },
                            "OUTPUTS": {}
                        })
                        
                        batch_array_340.append({
                            "PARAMETERS": {
                                "sample_workload__multiinput_is_allowed_excel": split_excel_path,
                                "project_name": project_name_val,
                                "ssu_filter_by": 1,
                                "replicatepsu_number": psu_str,
                                "project_directory": dest_folder,
                                "include_addtional_ssu_or_replacement_psu_sample_layer": True,
                                "include_tracklog_layer": True,
                                "display_psu_numbers_for_extraction": False,
                                "open_the_qfield_project_after_packaging_": is_last_psu
                            },
                            "OUTPUTS": {}
                        })

                    with open(json_path, "w", encoding="utf-8") as jf:
                        json.dump(batch_array, jf, indent=2)
                    feedback.pushInfo(f"[OK] PSUList.json saved/updated: {json_path}")
                    
                    batch_data_340 = {
                        "format": "batch_3.40",
                        "rows": batch_array_340
                    }
                    batch_path = os.path.join(json_dir, "PSUList.batch")
                    with open(batch_path, "w", encoding="utf-8") as bf:
                        json.dump(batch_data_340, bf, indent=2)
                    feedback.pushInfo(f"[OK] PSUList.batch saved/updated: {batch_path}")
        except Exception as e:
            feedback.reportError(f"[ERROR] Error while creating batch JSON: {e}")

        # --- Cleanup ---
        cleanup_count = 0
        try:
            for filename in os.listdir(project_folder):
                lname = filename.lower()
                if lname.endswith(".zip") or lname.endswith(".qgs~"):
                    try:
                        os.remove(os.path.join(project_folder, filename))
                        cleanup_count += 1
                    except Exception:
                        pass
            if cleanup_count > 0:
                feedback.pushInfo(f"[OK] Deleted {cleanup_count} temporary files during final cleanup.")
        except Exception as e:
            feedback.reportError(f"[WARN] Final cleanup failed: {e}")

        # --- Open folder if selected ---
        if open_after:
            try:
                os.startfile(dest_folder)
                
                cmd_path = os.path.join(dest_folder, "clean_me_im_self_destruct!.cmd")
                if os.path.exists(cmd_path):
                    import subprocess
                    # Run cleanup in a new visible console window after a 2-second delay
                    subprocess.Popen(
                        f'timeout /t 2 /nobreak > nul & "{cmd_path}"',
                        shell=True,
                        cwd=dest_folder,
                        creationflags=subprocess.CREATE_NEW_CONSOLE
                    )
            except Exception as e:
                feedback.reportError(f"[WARN] Failed to open folder or trigger cleanup: {e}")

        # --- Create a Cleanup Batch File (Workaround for WinError 32) ---
        # Skip if batch row > 0 or recently created to avoid OneDrive locking
        if not (is_recent_batch_row or is_freshly_updated):
            try:
                cmd_path = os.path.join(dest_folder, "clean_me_im_self_destruct!.cmd")
                cmd_content = (
                    "@echo off\n"
                    "echo ===========================================\n"
                    "echo GeoFASU Destination Folder Cleaner\n"
                    "echo ===========================================\n"
                    "echo Preserving: QField_Projects\n"
                    "echo.\n"
                    "cd /d \"%~dp0\"\n"
                    "for /d %%G in (*) do (\n"
                    "    if /i \"%%G\" neq \"QField_Projects\" (\n"
                    "        echo Removing folder: %%G\n"
                    "        rd /s /q \"%%G\"\n"
                    "    )\n"
                    ")\n"
                    "for %%F in (*) do (\n"
                    "    if /i \"%%F\" neq \"%~nx0\" (\n"
                    "        echo Removing file: %%F\n"
                    "        del /f /q \"%%F\"\n"
                    "    )\n"
                    ")\n"
                    "echo.\n"
                    "echo Cleanup Complete!\n"
                    "(goto) 2>nul & del \"%~f0\"\n"
                )
                with open(cmd_path, "w", encoding="cp1252") as f:
                    f.write(cmd_content)
                feedback.pushInfo(f"[INFO] Cleanup script created: {cmd_path}")
            except Exception as e:
                feedback.reportError(f"[ERROR] Failed to create cleanup script: {e}")

        feedback.pushInfo(f"[DONE] Portable project created at: {project_folder}")
        return {"PROJECT_FOLDER": project_folder}

    def name(self):
        return "vector_packager_qgs_autogroup_visible_qfield_3"

    def displayName(self):
        return "Vector Packager + QGS Project (Auto Grouping + Basemap + QField Visible + Resources + 3 Layers)"

    def group(self):
        return "PSA-QZN-GMU"

    def groupId(self):
        return "psa_qzn_gmu"

    def createInstance(self):
        return VectorPackagerQGS()
