Skip to content
Snippets Groups Projects
Commit ab2d0402 authored by AndiMajore's avatar AndiMajore
Browse files

merge

Former-commit-id: 7011413780a80c8c670fe237b810b0619db76075 [formerly f85df397a43d6fa0dc4025ca06c9653fe98b5e30]
Former-commit-id: cb6c27814cbed6fc6a543414be3277908b250450
parents 6d2aecf7 317b02de
No related branches found
No related tags found
No related merge requests found
......@@ -83,7 +83,7 @@ services:
- db
networks:
- drugstone_net
flower:
flower:
image: mher/flower
container_name: drugstone_flower
env_file:
......
......@@ -3,6 +3,7 @@ from celery.schedules import crontab
CELERY_BEAT_SCHEDULE = {
'update_db': {
'task': 'drugstone.tasks.task_update_db_from_nedrex',
'schedule': crontab(day_of_week=1, hour=5, minute=0),
'schedule': crontab(day_of_week=1, hour=6, minute=0),
# 'schedule': crontab(minute='*/1'),
},
}
......@@ -27,7 +27,6 @@ from drugstone.settings import DEFAULTS
def get_ppi_ds(source, licenced):
try:
ds = models.PPIDataset.objects.filter(name__iexact=source, licenced=licenced).last()
ds.id
return ds
except:
if licenced:
......@@ -38,7 +37,6 @@ def get_ppi_ds(source, licenced):
def get_pdi_ds(source, licenced):
try:
ds = models.PDIDataset.objects.filter(name__iexact=source, licenced=licenced).last()
ds.id
return ds
except:
if licenced:
......@@ -49,7 +47,6 @@ def get_pdi_ds(source, licenced):
def get_pdis_ds(source, licenced):
try:
ds = models.PDisDataset.objects.filter(name__iexact=source, licenced=licenced).last()
ds.id
return ds
except:
if licenced:
......@@ -60,7 +57,6 @@ def get_pdis_ds(source, licenced):
def get_drdis_ds(source, licenced):
try:
ds = models.DrDiDataset.objects.filter(name__iexact=source, licenced=licenced).last()
ds.id
return ds
except:
if licenced:
......@@ -76,6 +72,11 @@ class TaskView(APIView):
parameters = request.data['parameters']
licenced = parameters.get('licenced', False)
print(models.PDIDataset.objects.all())
print(get_ppi_ds(parameters.get('ppi_dataset', DEFAULTS['ppi']), licenced))
print(get_pdi_ds(parameters.get('pdi_dataset', DEFAULTS['pdi']), licenced))
# find databases based on parameter strings
parameters['ppi_dataset'] = PPIDatasetSerializer().to_representation(
get_ppi_ds(parameters.get('ppi_dataset', DEFAULTS['ppi']), licenced))
......@@ -660,60 +661,23 @@ class TissueExpressionView(APIView):
def get(self, request) -> Response:
tissue = Tissue.objects.get(id=request.query_params.get('tissue'))
if request.query_params.get('proteins'):
ids = json.loads(request.query_params.get('proteins'))
proteins = list(Protein.objects.filter(id__in=ids).all())
elif request.query_params.get('token'):
proteins = []
task = Task.objects.get(token=request.query_params['token'])
result = task_result(task)
network = result['network']
node_attributes = result.get('node_attributes')
if not node_attributes:
node_attributes = {}
node_types = node_attributes.get('node_types')
if not node_types:
node_types = {}
parameters = json.loads(task.parameters)
seeds = parameters['seeds']
nodes = network['nodes']
for node in nodes + seeds:
node_type = node_types.get(node)
details = None
if node_type == 'protein':
if details:
proteins.append(details)
else:
try:
prot = Protein.objects.get(uniprot_code=node)
if prot not in proteins:
proteins.append(Protein.objects.get(uniprot_code=node))
except Protein.DoesNotExist:
pass
pt_expressions = {}
for protein in proteins:
try:
expression_level = ExpressionLevel.objects.get(protein=protein, tissue=tissue)
pt_expressions[
ProteinSerializer().to_representation(protein)['drugstone_id']] = expression_level.expression_level
except ExpressionLevel.DoesNotExist:
pt_expressions[ProteinSerializer().to_representation(protein)['drugstone_id']] = None
return Response(pt_expressions)
proteins = request.query_params.get('proteins')
token = request.query_params.get('token')
return self.get_tissue_expression(tissue, proteins, token)
def post(self, request) -> Response:
tissue = Tissue.objects.get(id=request.data.get('tissue'))
proteins = request.data.get('proteins')
token = request.data.get('token')
return self.get_tissue_expression(tissue, proteins, token)
if request.data.get('proteins'):
ids = json.loads(request.data.get('proteins'))
def get_tissue_expression(self, tissue, proteins, token):
if proteins is not None:
ids = json.loads(proteins)
proteins = list(Protein.objects.filter(id__in=ids).all())
elif request.data.get('token'):
elif token is not None:
proteins = []
task = Task.objects.get(token=request.data['token'])
task = Task.objects.get(token=token)
result = task_result(task)
network = result['network']
node_attributes = result.get('node_attributes')
......
......@@ -4,7 +4,7 @@ python3 manage.py makemigrations drugstone
python3 manage.py migrate
python3 manage.py createfixtures
python3 manage.py cleanuptasks
#python3 manage.py populate_db --update -a
#python3 manage.py make_graphs
python3 manage.py populate_db --update -a
python3 manage.py make_graphs
/usr/bin/supervisord -c "/etc/supervisor/conf.d/supervisord.conf"
from tasks.util.custom_edges import add_edges
from tasks.util.read_graph_tool_graph import read_graph_tool_graph
from tasks.util.scores_to_results import scores_to_results
from tasks.util.edge_weights import edge_weights
......@@ -172,6 +173,8 @@ def betweenness_centrality(task_hook: TaskHook):
id_space = task_hook.parameters["config"].get("identifier","symbol")
custom_edges = task_hook.parameters.get("custom_edges", False)
# Parsing input file.
task_hook.set_progress(0 / 3.0, "Parsing input.")
filename = f"{id_space}_{ppi_dataset['name']}-{pdi_dataset['name']}"
......@@ -187,6 +190,11 @@ def betweenness_centrality(task_hook: TaskHook):
include_non_approved_drugs,
target=search_target
)
if custom_edges:
edges = task_hook.parameters.get("input_network")['edges']
g = add_edges(g, edges)
weights = edge_weights(g, hub_penalty)
# Set number of threads if OpenMP support is enabled.
......
import numpy as np
from tasks.util.custom_edges import add_edges
from tasks.util.read_graph_tool_graph import read_graph_tool_graph
from tasks.util.scores_to_results import scores_to_results
from tasks.util.edge_weights import edge_weights
......@@ -173,13 +174,18 @@ def closeness_centrality(task_hook: TaskHook):
id_space = task_hook.parameters["config"].get("identifier", "symbol")
node_name_attribute = "internal_id"
custom_edges = task_hook.parameters.get("custom_edges", False)
filename = f"{id_space}_{ppi_dataset['name']}-{pdi_dataset['name']}"
if ppi_dataset['licenced'] or pdi_dataset['licenced']:
filename += "_licenced"
filename = os.path.join(task_hook.data_directory, filename + ".gt")
g, seed_ids, drug_ids = read_graph_tool_graph(filename, seeds, id_space, max_deg, include_indirect_drugs, include_non_approved_drugs, search_target)
if custom_edges:
edges = task_hook.parameters.get("input_network")['edges']
g = add_edges(g, edges)
task_hook.set_progress(1 / 4.0, "Computing edge weights.")
weights = edge_weights(g, hub_penalty)
......
from tasks.util.read_graph_tool_graph import read_graph_tool_graph
from tasks.util.scores_to_results import scores_to_results
from tasks.util.custom_edges import add_edges
from tasks.task_hook import TaskHook
import graph_tool as gt
import os.path
......@@ -147,6 +148,8 @@ def degree_centrality(task_hook: TaskHook):
search_target = task_hook.parameters.get("target", "drug-target")
filterPaths = task_hook.parameters.get("filter_paths", True)
custom_edges = task_hook.parameters.get("custom_edges", False)
# Parsing input file.
task_hook.set_progress(0 / 3.0, "Parsing input.")
......@@ -160,6 +163,10 @@ def degree_centrality(task_hook: TaskHook):
# g, seed_ids, viral_protein_ids, drug_ids = read_graph_tool_graph(file_path, seeds, datasets, ignored_edge_types, max_deg, ignore_non_seed_baits, False, include_non_approved_drugs)
g, seed_ids, drug_ids = read_graph_tool_graph(filename, seeds, id_space, max_deg, False, include_non_approved_drugs, search_target)
if custom_edges:
edges = task_hook.parameters.get("input_network")['edges']
g = add_edges(g, edges)
# Set number of threads if OpenMP support is enabled.
if gt.openmp_enabled():
gt.openmp_set_num_threads(num_threads)
......
from tasks.task_hook import TaskHook
from tasks.util.custom_edges import add_edges
from tasks.util.steiner_tree import steiner_tree
from tasks.util.find_bridges import find_bridges
from tasks.util.read_graph_tool_graph import read_graph_tool_graph
......@@ -99,6 +100,8 @@ def multi_steiner(task_hook: TaskHook):
node_name_attribute = "internal_id" # nodes in the input network which is created from RepoTrialDB have primaryDomainId as name attribute
custom_edges = task_hook.parameters.get("custom_edges", False)
# Set number of threads if OpenMP support is enabled.
if gt.openmp_enabled():
gt.openmp_set_num_threads(num_threads)
......@@ -112,7 +115,13 @@ def multi_steiner(task_hook: TaskHook):
if ppi_dataset['licenced'] or pdi_dataset['licenced']:
filename += "_licenced"
filename = os.path.join(task_hook.data_directory, filename + ".gt")
print(filename)
g, seed_ids, _ = read_graph_tool_graph(filename, seeds, id_space, max_deg, target=search_target)
if custom_edges:
edges = task_hook.parameters.get("input_network")['edges']
g = add_edges(g, edges)
seed_map = {g.vertex_properties[node_name_attribute][node]: node for node in seed_ids}
task_hook.set_progress(1 / (float(num_trees + 3)), "Computing edge weights.")
weights = edge_weights(g, hub_penalty)
......
from tasks.task_hook import TaskHook
from tasks.util.custom_edges import add_edges
from tasks.util.read_graph_tool_graph import read_graph_tool_graph
from tasks.util.edge_weights import edge_weights
import os.path
......@@ -79,6 +80,8 @@ def network_proximity(task_hook: TaskHook):
filter_paths = task_hook.parameters.get("filter_paths", True)
custom_edges = task_hook.parameters.get("custom_edges", False)
node_name_attribute = "internal_id" # nodes in the input network which is created from RepoTrialDB have primaryDomainId as name attribute
# Set number of threads if OpenMP support is enabled.
if gt.openmp_enabled():
......@@ -95,6 +98,11 @@ def network_proximity(task_hook: TaskHook):
filename = os.path.join(task_hook.data_directory, filename + ".gt")
# g, seed_ids, _, drug_ids = read_graph_tool_graph(file_path, seeds, "", "", max_deg, False, True, include_non_approved_drugs)
g, seed_ids, drug_ids = read_graph_tool_graph(filename, seeds, id_space, max_deg, True, include_non_approved_drugs, target=search_target)
if custom_edges:
edges = task_hook.parameters.get("input_network")['edges']
g = add_edges(g, edges)
# Computing edge weights.
task_hook.set_progress(1.0 / 8, "Computing edge weights.")
weights = edge_weights(g, hub_penalty)
......
from tasks.util.custom_edges import add_edges
from tasks.util.read_graph_tool_graph import read_graph_tool_graph
from tasks.util.scores_to_results import scores_to_results
from tasks.util.edge_weights import edge_weights
......@@ -195,6 +196,8 @@ def trust_rank(task_hook: TaskHook):
search_target = task_hook.parameters.get("target", "drug-target")
filter_paths = task_hook.parameters.get("filter_paths", True)
custom_edges = task_hook.parameters.get("custom_edges", False)
# Parsing input file.
task_hook.set_progress(0 / 4.0, "Parsing input.")
......@@ -206,6 +209,11 @@ def trust_rank(task_hook: TaskHook):
filename += "_licenced"
filename = os.path.join(task_hook.data_directory, filename+".gt")
g, seed_ids, drug_ids = read_graph_tool_graph(filename, seeds, id_space, max_deg, include_indirect_drugs, include_non_approved_drugs, search_target)
if custom_edges:
edges = task_hook.parameters.get("input_network")['edges']
g = add_edges(g, edges)
task_hook.set_progress(1 / 4.0, "Computing edge weights.")
weights = edge_weights(g, hub_penalty, inverse=True)
......
def make_node_id_map(g):
mapping = {}
for node in range(g.num_vertices()):
mapping[g.vertex_properties['internal_id'][node]] = node
return mapping
def add_edges(g, edge_list):
"""
edge list is [{"fom":..., "to":...}, ...]
"""
mapping = make_node_id_map(g)
edge_id_list = []
for edge in edge_list:
a = mapping[edge['from']] if edge['from'] in mapping else False
b = mapping[edge['to']] if edge['to'] in mapping else False
if a and b:
edge_id_list.append((a, b, 'protein-protein'))
e_type = g.edge_properties["type"]
g.add_edge_list(edge_id_list, eprops=[e_type])
return g
\ No newline at end of file
......@@ -125,11 +125,6 @@ def read_graph_tool_graph(file_path, seeds, id_space, max_deg, include_indirect_
for edge in deleted_edges:
g.remove_edge(edge)
g.set_fast_edge_removal(fast=False)
# vertices = 0
# for _ in g.vertices():
# vertices += 1
# edges = 0
# for _ in g.edges():
# edges += 1
# Return the graph and the indices of the seed_ids and the seeds.
return g, list(seed_ids.keys()), drug_ids
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment