Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
C
CiS Projekt
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Requirements
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Package Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Ockenden, Samuel
CiS Projekt
Commits
03baca25
Commit
03baca25
authored
3 years ago
by
Malte Schokolowski
Browse files
Options
Downloads
Patches
Plain Diff
added groups to Processing and json
parent
b51d5f07
No related branches found
Branches containing commit
No related tags found
1 merge request
!7
Main
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
verarbeitung/Processing.py
+35
-33
35 additions, 33 deletions
verarbeitung/Processing.py
verarbeitung/json_demo.py
+2
-0
2 additions, 0 deletions
verarbeitung/json_demo.py
with
37 additions
and
33 deletions
verarbeitung/Processing.py
+
35
−
33
View file @
03baca25
# -*- coding: utf-8 -*-
"""
Functions to generate a graph representing citations between multiple
JCIM ACS
journals
Functions to generate a graph representing citations between multiple
ACS/Nature
journals
"""
...
...
@@ -41,7 +41,7 @@ def initialize_nodes_list(doi_input_list):
# adds a node for every publication unknown
# adds edges for citations between publications
def
create_graph_structure_citations
(
pub
,
search_
depth
,
search_
depth
_max
):
def
create_graph_structure_citations
(
pub
,
search_
height
,
search_
height
_max
):
for
citation
in
pub
.
_citations
:
not_in_nodes
=
True
for
node
in
nodes
:
...
...
@@ -50,7 +50,8 @@ def create_graph_structure_citations(pub, search_depth, search_depth_max):
not_in_nodes
=
False
break
if
(
not_in_nodes
):
if
(
search_depth
<=
search_depth_max
):
if
(
search_height
<=
search_height_max
):
#citation.group = "citation"
nodes
.
append
(
citation
)
edges
.
append
([
pub
.
doi_url
,
citation
.
doi_url
])
...
...
@@ -62,7 +63,7 @@ def create_graph_structure_citations(pub, search_depth, search_depth_max):
# adds a node for every publication unknown
# adds edges for references between publications
def
create_graph_structure_references
(
pub
,
search_
height
,
search_
height
_max
):
def
create_graph_structure_references
(
pub
,
search_
depth
,
search_
depth
_max
):
for
reference
in
pub
.
_references
:
not_in_nodes
=
True
for
node
in
nodes
:
...
...
@@ -71,7 +72,8 @@ def create_graph_structure_references(pub, search_height, search_height_max):
not_in_nodes
=
False
break
if
(
not_in_nodes
):
if
(
search_height
<=
search_height_max
):
if
(
search_depth
<=
search_depth_max
):
#reference.group = "reference"
nodes
.
append
(
reference
)
edges
.
append
([
reference
.
doi_url
,
pub
.
doi_url
])
...
...
@@ -81,21 +83,21 @@ def create_graph_structure_references(pub, search_height, search_height_max):
# recursive function to implement
depth
-first-search on citations
# recursive function to implement
height
-first-search on citations
# doi_citations: input list of citet dois
# search_
depth
: current search_
depth of depth
-first-search
# search_
depth
_max: maximal search_
depth
for dfs
def
process_citations_rec
(
doi_citations
,
search_
depth
,
search_
depth
_max
):
#
depth
of search is increased by 1 with each recursive call
search_
depth
+=
1
# search_
height
: current search_
height of height
-first-search
# search_
height
_max: maximal search_
height
for dfs
def
process_citations_rec
(
doi_citations
,
search_
height
,
search_
height
_max
):
#
height
of search is increased by 1 with each recursive call
search_
height
+=
1
# create class object for every citation from list
for
pub_doi
in
doi_citations
:
pub
=
input
(
pub_doi
)
create_graph_structure_citations
(
pub
,
search_
depth
,
search_
depth
_max
)
# If the maximum
depth
has not yet been reached, all references from the publication
create_graph_structure_citations
(
pub
,
search_
height
,
search_
height
_max
)
# If the maximum
height
has not yet been reached, all references from the publication
# are written to an array and the function is called again with this array.
if
(
search_
depth
<
search_
depth
_max
):
if
(
search_
height
<
search_
height
_max
):
citations_list
=
[]
for
citation
in
pub
.
_citations
:
...
...
@@ -105,25 +107,25 @@ def process_citations_rec(doi_citations, search_depth, search_depth_max):
citations_list
.
append
(
citation
.
doi_url
)
# recursive call of function.
process_citations_rec
(
citations_list
,
search_
depth
,
search_
depth
_max
)
process_citations_rec
(
citations_list
,
search_
height
,
search_
height
_max
)
# recursive function to implement
depth
-first-search on references
# recursive function to implement
height
-first-search on references
# doi_references: input list of referenced dois
# search_
height
: current search_
height of depth
-first-search
# search_
height
_max: maximal search_
height
for dfs
def
process_references_rec
(
doi_references
,
search_
height
,
search_
height
_max
):
# The
height
is increased by 1 with each recursive call
search_
height
+=
1
# search_
depth
: current search_
depth of height
-first-search
# search_
depth
_max: maximal search_
depth
for dfs
def
process_references_rec
(
doi_references
,
search_
depth
,
search_
depth
_max
):
# The
depth
is increased by 1 with each recursive call
search_
depth
+=
1
# create class object for every citation from list
for
pub_doi
in
doi_references
:
pub
=
input
(
pub_doi
)
create_graph_structure_references
(
pub
,
search_
height
,
search_
height
_max
)
# If the maximum
height
has not yet been reached, all references from the publication
create_graph_structure_references
(
pub
,
search_
depth
,
search_
depth
_max
)
# If the maximum
depth
has not yet been reached, all references from the publication
# are written to an array and the function is called again with this array.
if
(
search_
height
<
search_
height
_max
):
if
(
search_
depth
<
search_
depth
_max
):
references_list
=
[]
for
reference
in
pub
.
_references
:
...
...
@@ -133,23 +135,23 @@ def process_references_rec(doi_references, search_height, search_height_max):
references_list
.
append
(
reference
.
doi_url
)
# recursive call of function.
process_references_rec
(
references_list
,
search_
height
,
search_
height
_max
)
process_references_rec
(
references_list
,
search_
depth
,
search_
depth
_max
)
def
process_main
(
doi_input_list
,
search_
depth
,
search_
height
):
def
process_main
(
doi_input_list
,
search_
height
,
search_
depth
):
# ERROR-Handling doi_array = NULL
if
(
len
(
doi_input_list
)
==
0
):
print
(
"
Error, no input data
"
)
# ERROR- if a negative number is entered for depth
if
(
search_depth
<
0
):
print
(
"
Error, search_depth of search must be positive
"
)
# ERROR- if a negative number is entered for height
if
(
search_height
<
0
):
print
(
"
Error, search_height of search must be positive
"
)
print
(
"
Error, search_height of search must be positive
"
)
# ERROR- if a negative number is entered for depth
if
(
search_depth
<
0
):
print
(
"
Error, search_depth of search must be positive
"
)
# create empty array for the nodes
# create empty array for the edges
...
...
@@ -158,8 +160,8 @@ def process_main(doi_input_list, search_depth, search_height):
edges
=
[]
initialize_nodes_list
(
doi_input_list
)
process_citations_rec
(
doi_input_list
,
0
,
search_
depth
)
process_references_rec
(
doi_input_list
,
0
,
search_
height
)
process_citations_rec
(
doi_input_list
,
0
,
search_
height
)
process_references_rec
(
doi_input_list
,
0
,
search_
depth
)
output_to_json
(
nodes
,
edges
)
...
...
This diff is collapsed.
Click to expand it.
verarbeitung/json_demo.py
+
2
−
0
View file @
03baca25
...
...
@@ -11,7 +11,9 @@ def output_to_json(V,E):
new_dict
[
"
name
"
]
=
node
.
title
new_dict
[
"
author
"
]
=
node
.
contributors
#new_dict["year"] = node.publication_date
#new_dict["journal"] = node.journal
new_dict
[
"
doi
"
]
=
node
.
doi_url
#new_dict["group"] = node.group
list_of_node_dicts
.
append
(
new_dict
)
for
edge
in
E
:
new_dict_2
=
dict
()
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment