1# -*- coding: utf-8 -*-
"""
Functions to update a graph representing citations between multiple ACS/Nature journals

"""

__authors__ = "Donna Löding, Alina Molkentin, Xinyi Tang, Judith Große, Malte Schokolowski"
__email__ = "cis-project2021@zbh.uni-hamburg.de"
__status__ = "Production"
#__copyright__ = ""
#__credits__ = ["", "", "", ""]
#__license__ = ""
#__version__ = ""
#__maintainer__ = ""


import sys

sys.path.append("../../")

from input.publication import Publication
from verarbeitung.get_pub_from_input import get_pub
from .Knoten_Vergleich import doi_listen_vergleichen
from .delete_nodes_edges import delete_nodes_and_edges
from .connect_new_input import connect_old_and_new_input
from .update_depth import update_depth
from .import_from_json import input_from_json


def get_old_input_dois(old_obj_input_list):
    '''
        :param old_obj_input_list:  list of publications retrieved from old json file
        :type old_obj_input_list:   List[Publication]

        function to return pub dois for old publications of group input retrieved from json file
    '''

    # new list to save doi_url for each old publication of group input
    old_input_dois = []
    for pub in old_obj_input_list:
        if (pub.group == 0):
            old_input_dois.append(pub.doi_url)
    return old_input_dois

def get_new_input_dois(new_input, test_var):
    '''
        :param new_input:   input list of doi from UI
        :type new_input:    list of strings

        :param test_var:    variable to differenciate between test and url call
        :type test_var:     boolean

        function to return pub dois for input urls
    '''

    # new list to save doi_url for each new input url
    new_input_dois = []
    for new_node in new_input:
        # retrieves information and adds to new list if successful 
        pub = get_pub(new_node, test_var)
        if (type(pub) != Publication):
            print(pub)
            continue

        new_input_dois.append(pub.doi_url)
    return(new_input_dois)


def update_graph(new_doi_input_list, json_file, search_depth, search_height, test_var = False):
    '''
        :param new_doi_input_list:  input list of doi from UI
        :type new_doi_input_list:   List[String]

        :param old_obj_input_list:  list of publications retrieved from old json file
        :type old_obj_input_list:   List[Publication]

        :param old_edges_list:      list of links between publications retrieved from old json file
        :type old_edges_list:       List[List[String,String]]

        :param test_var:            variable to differenciate between test and url call
        :type test_var:             boolean

        function to compare old and new input, start node/edge removal and to return updated sets of nodes and edges
    '''

    # gets information from previous cunstruction call
    old_obj_input_list , old_edges_list = input_from_json(json_file)

    # one global list to save the process of removing unneeded publications and one to save valid edges
    global processed_list, valid_edges
    processed_list = old_obj_input_list
    valid_edges = old_edges_list


    # get dois from lists to compare for differences
    old_doi_input_list = get_old_input_dois(old_obj_input_list)
    new_doi_input_list = get_new_input_dois(new_doi_input_list, test_var)

    # retrieve which publications are already known, removed, inserted
    common_nodes, inserted_nodes, deleted_nodes = doi_listen_vergleichen(old_doi_input_list, new_doi_input_list)

    # deletes publications and edges from node_list if publications can no longer be reached
    if (len(deleted_nodes) > 0):
        delete_nodes_and_edges(processed_list, common_nodes, valid_edges)
    
    old_search_depth, old_search_height = update_depth(processed_list, valid_edges, search_depth, search_height, test_var)
    
    if (len(inserted_nodes) > 0):
        connect_old_and_new_input(processed_list, valid_edges, inserted_nodes, old_search_depth, old_search_height, search_depth, search_height, test_var)

    return(processed_list, valid_edges)