package com.k_int.discover.util;

import com.k_int.discover.datamodel.schema.dto.VocabPathDataDTO;
import java.io.IOException;
import org.apache.commons.httpclient.*;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.params.HttpMethodParams;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;

import com.k_int.discover.datamodel.schema.dto.VocabTermDataDTO;
import java.io.ByteArrayInputStream;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.xml.sax.SAXException;


/**
 * A class to handle looking up a term within a vocab to get information about it
 * given the identifier
 * @author rpb rich@k-int.com
 */
public class VocabLookupUtils {
    
    private static Log log = LogFactory.getLog(VocabLookupUtils.class);
    private static final String API_GET_ZTHES_VOCAB = "BANK_URL/linkeddata/AUTHORITY/VOCABULARY.zthes";
    private static final String API_GET_ZTHES_VOCAB_NESTED = "BANK_URL/linkeddata/AUTHORITY/VOCABULARY.zthes?type=nested";
 

    public static Set<VocabTermDataDTO> getAllTermsInVocabAsSet(String bankUrl, String authority, String vocabId, boolean indexWithQualifiedIdentifier) {
        
        Map<String,VocabTermDataDTO> allTermsMap = getAllTermsInVocab(bankUrl, authority, vocabId, indexWithQualifiedIdentifier);
        
        Set<VocabTermDataDTO> allTermsSet = new LinkedHashSet<VocabTermDataDTO>();
        allTermsSet.addAll(allTermsMap.values());
        
        return allTermsSet;
    }

    public static Map<String,VocabTermDataDTO> getAllTermsInVocab(String bankUrl, String authority, String vocabId, boolean indexWithQualifiedIdentifier) {
        
        Map<String,VocabTermDataDTO> termData = new LinkedHashMap<String,VocabTermDataDTO>();
        
        String url = API_GET_ZTHES_VOCAB_NESTED;
        url = url.replace("BANK_URL", bankUrl);
        url = url.replace("AUTHORITY", authority);
        url = url.replace("VOCABULARY", vocabId);

        HttpClient client = new HttpClient();
        client.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler());

        HttpMethod getMethod = new GetMethod(url);

        byte[] response = null;

        try {
            int statusCode = client.executeMethod(getMethod);

            if (statusCode != HttpStatus.SC_OK) {
                log.error("An error occurred when querying the Bank with URL: " + url + ". Returned status code: " + statusCode);
            }
            
            response = getMethod.getResponseBody();

            Document xmlDoc = readXML(response);

            // Get the root node and then parse through the contents
            Element docRoot = xmlDoc.getDocumentElement();
            NodeList topLevelNodes = docRoot.getChildNodes();
            

            for (int nodeListIndx = 0; nodeListIndx < topLevelNodes.getLength(); nodeListIndx++) {
                Node topNode = topLevelNodes.item(nodeListIndx);
                
                String topNodeName = topNode.getNodeName();
                
                if ( "term".equals(topNodeName) ) {
                    // We have a top term to work from..
                    termData = processTermNode(topNode, termData, null, bankUrl, authority, vocabId, indexWithQualifiedIdentifier);
                } else {
                    // Some other node that we don't care about so don't do anything..
                }
            }
        } catch (HttpException he) {
            log.error("HttpException thrown when querying the Bank: " + he.getMessage());
            he.printStackTrace();
        } catch (IOException ioe) {
            log.error("IOException thrown when querying the Bank: " + ioe.getMessage());
            ioe.printStackTrace();
        } catch (ParserConfigurationException pce) {
            log.error("ParserConfigurationException thrown when parsing the output from the bank: " + pce.getMessage());
            pce.printStackTrace();
        } catch (SAXException se) {
            log.error("SAXException thrown when parsing the output from the bank:" + se.getMessage());
            se.printStackTrace();
        }

        return termData;
    }
    
    private static Map<String,VocabTermDataDTO> processTermNode(Node termNode, Map<String,VocabTermDataDTO> termsSoFar, Set<VocabPathDataDTO> pathData, String bankUrl, String authority, String vocabulary, boolean indexWithQualifiedIdentifier) {
        
        // Get the information from this node and then find the children NTs and process them
        NodeList termDetails = termNode.getChildNodes();
        
        // Loop through once to get this node's info
        String nodeId = null;
        String nodeName = null;
        Set<VocabPathDataDTO> newPathData = new LinkedHashSet<VocabPathDataDTO>();
        if ( pathData != null )
            newPathData.addAll(pathData);
        
        log.debug("termDetails.length = " + termDetails.getLength());
        
        for(int ctr = 0; ctr < termDetails.getLength(); ctr++) {
            // Break out of the loop if we have a node name and id
            if ( nodeId != null && nodeName != null ) {
                break;
            }

            Node detail = termDetails.item(ctr);

            if (detail.getNodeType() == Node.ELEMENT_NODE) {
                if (detail.getNodeName() != null && detail.getNodeName().equals("termName")) {
                    nodeName = detail.getTextContent();
                    log.debug("nodeName set to: " + nodeName);
                }
                if (detail.getNodeName() != null && detail.getNodeName().equals("termId")) {
                    nodeId = detail.getTextContent();
                    log.debug("nodeId set to: " + nodeId);
                }
            }
        }
        
        if ( nodeId != null ) {
            newPathData.add(new VocabPathDataDTO(nodeId, nodeName));
            
            String qualifiedId = bankUrl + "||" + authority + "||" + vocabulary + "||" + nodeId;
            VocabTermDataDTO termData = new VocabTermDataDTO(nodeId, qualifiedId, nodeName, newPathData);
            
            // Set up the map key based on either the qualified identifier or the particular term identifer depending on 
            // what has been requested
            if ( indexWithQualifiedIdentifier )
                termsSoFar.put(qualifiedId,termData);
            else 
                termsSoFar.put(nodeId, termData);
        }
        
        // Loop through again to work through the children
        for(int ctr2=0; ctr2 < termDetails.getLength(); ctr2++) {
            Node detail = termDetails.item(ctr2);
            
            if (detail.getNodeType() == Node.ELEMENT_NODE) {
                if (detail.getNodeName() != null && detail.getNodeName().equals("relation")) {
                    // We're dealing with a child relation - get the actual child term and process it
                    
                    NodeList relationChildren = detail.getChildNodes();
                    for(int ctr3 = 0; ctr3 < relationChildren.getLength(); ctr3++) {
                        Node childNode = relationChildren.item(ctr3);
                        
                        if ( childNode.getNodeType() == Node.ELEMENT_NODE) {
                            if ( childNode.getNodeName() != null && childNode.getNodeName().equals("term") ) {
                                log.debug("About to call processTermNode with a new child term...");
                                termsSoFar = processTermNode(childNode, termsSoFar, newPathData, bankUrl, authority, vocabulary, indexWithQualifiedIdentifier);
                            }
                        }
                    }
                    
                    
                }
            }
            
        }
        
        return termsSoFar;
    }

    private static Document readXML(byte[] xmlData) throws ParserConfigurationException, SAXException, IOException {
        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
        DocumentBuilder db = dbf.newDocumentBuilder();
        Document doc = db.parse(new ByteArrayInputStream(xmlData));
        return doc;
    }

    
    public static VocabTermDataDTO lookupTermDetails(String qualifiedIdentifier) {
        
        VocabTermDataDTO retval = null;
        
        
        // Split the specified qualified identifier into the authority, vocab id and term id
        // so we can use this information to get the required information from the bank
        String[] splitIdentifier = qualifiedIdentifier.split("\\|\\|");
        if ( splitIdentifier.length != 4 ) {
            // We don't have enough parts of the identifier - can't continue...
            System.err.println("######################### not enough parts in the identifier. Length: " + splitIdentifier.length + ". qualifiedIdentifier = " + qualifiedIdentifier);
            for(String aPart: splitIdentifier) {
                System.err.println("part: " + aPart);
            }
            // TODO
        } else {
            String bankUrl = splitIdentifier[0];
            String authority = splitIdentifier[1];
            String vocabId = splitIdentifier[2];
            String termId = splitIdentifier[3];
            
            // First go and get all of the terms in the vocab
            Map<String,VocabTermDataDTO> allTerms = getAllTermsInVocab(bankUrl, authority, vocabId, true);
            
            if ( allTerms.containsKey(qualifiedIdentifier) ) {
                // Term found - get it back
                retval = allTerms.get(qualifiedIdentifier);
                log.debug("############Matching term found in the vocab and being returned");
            } else {
                // Term not found
                log.debug("############No matching term found in the vocab. Qualified id: " + qualifiedIdentifier);
            }
        }
                
        return retval;
    }
    
    public static VocabTermDataDTO lookupTermDetails(String bankUrl, String authority, String vocabId, String termId) {
        
        VocabTermDataDTO retval = null;
        
        
        // First go and get all of the terms in the vocab
        Map<String,VocabTermDataDTO> allTerms = getAllTermsInVocab(bankUrl, authority, vocabId, false);

        if ( allTerms.containsKey(termId) ) {
            // Term found - get it back
            retval = allTerms.get(termId);
        } else {
            // Term not found
            log.debug("No matching term found in the vocab. authority id: " + authority + " vocab id: " + vocabId + " termId: " + termId);
        }
                
        return retval;
    }
    
}
