Skip to content
Snippets Groups Projects
Commit 03baca25 authored by Malte Schokolowski's avatar Malte Schokolowski
Browse files

added groups to Processing and json

parent b51d5f07
No related branches found
No related tags found
1 merge request!7Main
# -*- coding: utf-8 -*-
"""
Functions to generate a graph representing citations between multiple JCIM ACS journals
Functions to generate a graph representing citations between multiple ACS/Nature journals
"""
......@@ -41,7 +41,7 @@ def initialize_nodes_list(doi_input_list):
# adds a node for every publication unknown
# adds edges for citations between publications
def create_graph_structure_citations(pub, search_depth, search_depth_max):
def create_graph_structure_citations(pub, search_height, search_height_max):
for citation in pub._citations:
not_in_nodes = True
for node in nodes:
......@@ -50,7 +50,8 @@ def create_graph_structure_citations(pub, search_depth, search_depth_max):
not_in_nodes = False
break
if (not_in_nodes):
if (search_depth <= search_depth_max):
if (search_height <= search_height_max):
#citation.group = "citation"
nodes.append(citation)
edges.append([pub.doi_url,citation.doi_url])
......@@ -62,7 +63,7 @@ def create_graph_structure_citations(pub, search_depth, search_depth_max):
# adds a node for every publication unknown
# adds edges for references between publications
def create_graph_structure_references(pub, search_height, search_height_max):
def create_graph_structure_references(pub, search_depth, search_depth_max):
for reference in pub._references:
not_in_nodes = True
for node in nodes:
......@@ -71,7 +72,8 @@ def create_graph_structure_references(pub, search_height, search_height_max):
not_in_nodes = False
break
if (not_in_nodes):
if (search_height <= search_height_max):
if (search_depth <= search_depth_max):
#reference.group = "reference"
nodes.append(reference)
edges.append([reference.doi_url,pub.doi_url])
......@@ -81,21 +83,21 @@ def create_graph_structure_references(pub, search_height, search_height_max):
# recursive function to implement depth-first-search on citations
# recursive function to implement height-first-search on citations
# doi_citations: input list of citet dois
# search_depth: current search_depth of depth-first-search
# search_depth_max: maximal search_depth for dfs
def process_citations_rec(doi_citations, search_depth, search_depth_max):
# depth of search is increased by 1 with each recursive call
search_depth += 1
# search_height: current search_height of height-first-search
# search_height_max: maximal search_height for dfs
def process_citations_rec(doi_citations, search_height, search_height_max):
# height of search is increased by 1 with each recursive call
search_height += 1
# create class object for every citation from list
for pub_doi in doi_citations:
pub = input(pub_doi)
create_graph_structure_citations(pub, search_depth, search_depth_max)
# If the maximum depth has not yet been reached, all references from the publication
create_graph_structure_citations(pub, search_height, search_height_max)
# If the maximum height has not yet been reached, all references from the publication
# are written to an array and the function is called again with this array.
if (search_depth < search_depth_max):
if (search_height < search_height_max):
citations_list = []
for citation in pub._citations:
......@@ -105,25 +107,25 @@ def process_citations_rec(doi_citations, search_depth, search_depth_max):
citations_list.append(citation.doi_url)
# recursive call of function.
process_citations_rec(citations_list, search_depth, search_depth_max)
process_citations_rec(citations_list, search_height, search_height_max)
# recursive function to implement depth-first-search on references
# recursive function to implement height-first-search on references
# doi_references: input list of referenced dois
# search_height: current search_height of depth-first-search
# search_height_max: maximal search_height for dfs
def process_references_rec(doi_references, search_height, search_height_max):
# The height is increased by 1 with each recursive call
search_height += 1
# search_depth: current search_depth of height-first-search
# search_depth_max: maximal search_depth for dfs
def process_references_rec(doi_references, search_depth, search_depth_max):
# The depth is increased by 1 with each recursive call
search_depth += 1
# create class object for every citation from list
for pub_doi in doi_references:
pub = input(pub_doi)
create_graph_structure_references(pub, search_height, search_height_max)
# If the maximum height has not yet been reached, all references from the publication
create_graph_structure_references(pub, search_depth, search_depth_max)
# If the maximum depth has not yet been reached, all references from the publication
# are written to an array and the function is called again with this array.
if (search_height < search_height_max):
if (search_depth < search_depth_max):
references_list = []
for reference in pub._references:
......@@ -133,23 +135,23 @@ def process_references_rec(doi_references, search_height, search_height_max):
references_list.append(reference.doi_url)
# recursive call of function.
process_references_rec(references_list, search_height, search_height_max)
process_references_rec(references_list, search_depth, search_depth_max)
def process_main(doi_input_list, search_depth, search_height):
def process_main(doi_input_list, search_height, search_depth):
# ERROR-Handling doi_array = NULL
if (len(doi_input_list) == 0):
print("Error, no input data")
# ERROR- if a negative number is entered for depth
if (search_depth < 0):
print("Error, search_depth of search must be positive")
# ERROR- if a negative number is entered for height
if (search_height < 0):
print("Error, search_height of search must be positive")
print("Error, search_height of search must be positive")
# ERROR- if a negative number is entered for depth
if (search_depth < 0):
print("Error, search_depth of search must be positive")
# create empty array for the nodes
# create empty array for the edges
......@@ -158,8 +160,8 @@ def process_main(doi_input_list, search_depth, search_height):
edges = []
initialize_nodes_list(doi_input_list)
process_citations_rec(doi_input_list, 0, search_depth)
process_references_rec(doi_input_list, 0, search_height)
process_citations_rec(doi_input_list, 0, search_height)
process_references_rec(doi_input_list, 0, search_depth)
output_to_json(nodes,edges)
......
......@@ -11,7 +11,9 @@ def output_to_json(V,E):
new_dict["name"] = node.title
new_dict["author"] = node.contributors
#new_dict["year"] = node.publication_date
#new_dict["journal"] = node.journal
new_dict["doi"] = node.doi_url
#new_dict["group"] = node.group
list_of_node_dicts.append(new_dict)
for edge in E:
new_dict_2 = dict()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment