Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
M
MULTIMAP
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package Registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Admin message
A compter du 1er avril, attention à vos pipelines :
Nouvelles limitations de Docker Hub
Show more breadcrumbs
UMEC Mathieu
MULTIMAP
Commits
84ddbb62
Commit
84ddbb62
authored
1 year ago
by
UMEC Mathieu
Browse files
Options
Downloads
Patches
Plain Diff
cleaning branches
parent
295d8e8b
No related branches found
Branches containing commit
No related tags found
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
Mapping_using_the_API.py
+9
-45
9 additions, 45 deletions
Mapping_using_the_API.py
main.py
+8
-15
8 additions, 15 deletions
main.py
with
17 additions
and
60 deletions
Mapping_using_the_API.py
+
9
−
45
View file @
84ddbb62
...
...
@@ -12,7 +12,7 @@ from scipy.stats import hypergeom
from
utils
import
excel_file_writer
,
pre_cut
,
recup_all_inf_excel
,
cor_index
FOLDER
=
"
C:
\\
Users
\\
mumec
\\
Desktop
\\
Mini_codes
\\
mapping_using_the_api
\\
"
import
certifi
import
urlopen
def
send_request_to_mapping_api
(
url
,
data_json
,
head
,
met
=
'
POST
'
):
...
...
@@ -27,50 +27,17 @@ def send_request_to_mapping_api(url, data_json, head, met='POST'):
Returns:
Type of return: 1 excel file whith 5 columns
req = request.Request(url, data=data_json, headers=head, method=met)
with request.urlopen(req) as response:
result = response.read()
out_data = result.decode(
'
utf-8
'
)
return out_data
"""
print
()
context
=
ssl
.
create_default_context
(
cafile
=
certifi
.
where
())
r
=
requests
.
post
(
url
,
data_json
,
headers
=
head
,
verify
=
certifi
.
where
())
print
(
type
(
r
))
return
r
"""
def send_request_to_mapping_api(url, data_json, head, met=
'
POST
'
):
This function gives the result of mapping of a metabolites list from RAMP.
Here
'
s an example of 4 metabolites giving 505 lines.
[
"
KEGG:C01157
"
,
"
hmdb:HMDB0000064
"
,
"
hmdb:HMDB0000148
"
,
"
chebi:16015
"
]
Arg:
url = the url to use
data_json = the data to post
head = headers to use
met =
'
POST
'
Returns:
Type of return: 1 excel file with 5 columns
try:
req = request.Request(url, data=data_json, headers=head, method=met, verify=certifi.where())
try
:
req
=
request
.
Request
(
url
,
data
=
data_json
,
headers
=
head
,
method
=
met
)
with
request
.
urlopen
(
req
)
as
response
:
result
=
response
.
read
()
out_data
=
result
.
decode
(
'
utf-8
'
)
return out_data
except error.HTTPError as e:
print(f
"
Error: The server couldn
'
t fulfill the request. {e}
"
)
return []
except Exception as e:
print(f
"
An unexpected error occurred: {e}
"
)
return []
"""
except
error
.
URLError
as
e
:
r
=
requests
.
post
(
url
,
data
=
data_json
,
headers
=
head
,
verify
=
False
)
return
r
.
text
return
out_data
def
mapping_ramp_api
(
metabolites_list
,
outfile
,
inf
=
"
flow
"
):
"""
...
...
@@ -767,8 +734,7 @@ def opti_multimapping(file, outfolder, mapping="flow"):
cpdbf
=
outfolder
+
recap
[
i_map_opt
][
0
]
+
"
_mapping_opti.xlsx
"
datas_cpdb
=
m_ora_cpdb
(
cpdb_o_opti
,
acctype
,
infos
=
"
flow
"
,
ofile
=
cpdbf
,
cor_inf
=
[
name_opti
,
cpdb_o_opti
])
#Probléme RAMP voir aprés
print
(
"
CEST AU TOUR DE RAMP
"
)
for
line
in
inf
[
1
:]:
to_test
.
append
(
line
[
1
])
l_opt_ramp
=
[]
...
...
@@ -814,8 +780,6 @@ def opti_multimapping(file, outfolder, mapping="flow"):
print
(
"
lines Ramp
"
,
n_map
)
l_opt_ramp_tri
[
0
]
=
"
RAMP
"
recap
.
append
(
l_opt_ramp_tri
)
datas_ramp
=
[
"
NA
"
for
i
in
range
(
len
(
file
)
+
1
)
]
# provisoire aussi !!!!
recap
.
append
(
modulation
)
df_recap
=
pd
.
DataFrame
(
data
=
recap
).
transpose
()
n_out_f
=
outfolder
+
"
recap_multimapping.xlsx
"
...
...
This diff is collapsed.
Click to expand it.
main.py
+
8
−
15
View file @
84ddbb62
...
...
@@ -9,8 +9,7 @@ import pandas as pd
import
py4cytoscape
as
p4c
sys
.
path
.
append
(
'
C:
\\
Users
\\
mumec
\\
Desktop
\\
Dossier_gitlab_local
\\
traitement_des_données
'
)
sys
.
path
.
append
(
'
C:
\\
Users
\\
mumec
\\
Desktop
\\
Dossier_gitlab_local
\\
chebi-ids.git
'
)
sys
.
path
.
append
(
"
C:
\\
Users
\\
mumec
\\
Desktop
\\
Mini_codes
\\
pdf_PyPDF2
"
)
from
test_pdf
import
out_pdf_mapping
from
pdf_generation_toolbox
import
out_pdf_mapping
from
Recovery_of_associated_Chebi_IDs
import
chebi_horizontal
from
utils
import
excel_file_writer
,
pre_cut
,
recup_all_inf_excel
from
complete_processing_of_mapping_results
import
c_p_o_m_r
...
...
@@ -125,13 +124,12 @@ def workflow(infile, out_folder):
result_cpdb
,
result_ramp
,
recap
=
opti_multimapping
(
datas_f_map
,
out_folder
,
mapping
=
"
flow
"
)
# ajouter la mention ID exact !
#
l_visu = c_p_o_m_r(result_ramp, out_folder, "RAMP", f_view_sav=out_folder,
#
modul="flow", f_modul=recap)
l_visu
=
c_p_o_m_r
(
result_ramp
,
out_folder
,
"
RAMP
"
,
f_view_sav
=
out_folder
,
modul
=
"
flow
"
,
f_modul
=
recap
)
l_visu_c
=
c_p_o_m_r
(
result_cpdb
,
out_folder
,
"
CPDB
"
,
f_view_sav
=
out_folder
,
modul
=
"
flow
"
,
f_modul
=
recap
)
#l_visu += l_visu_c[1:]
#l_visu.append(l_visu_c[0])
l_visu
=
l_visu_c
# provisooire aussi !
l_visu
+=
l_visu_c
[
1
:]
l_visu
.
append
(
l_visu_c
[
0
])
result_ramp_pdf
=
[]
result_cpdb_pdf
=
[]
recap_pdf
=
[]
...
...
@@ -144,9 +142,9 @@ def workflow(infile, out_folder):
new_line
.
append
(
str
(
i_line
[
i_col
]))
liste_reverse
[
i_liste
].
append
(
new_line
)
file_path
=
"
Modele_de_pdf_feneratio_en_anglais_rev_19-02-2024.docx
"
#
out_pdf_mapping(file_path, data_input, chebi_hori, recap_pdf,
#
result_ramp_pdf[:8], result_cpdb_pdf[:4],
#
out_folder, l_visu)
out_pdf_mapping
(
file_path
,
data_input
,
chebi_hori
,
recap_pdf
,
result_ramp_pdf
[:
8
],
result_cpdb_pdf
[:
4
],
out_folder
,
l_visu
)
l_bdd
=
[
"
Wikipathways
"
,
"
KEGG
"
,
"
EHMN
"
,
"
HumanCyc
"
,
"
INOH
"
,
"
Reactome
"
]
# "SMPDB" plantage
t2
=
time
()
...
...
@@ -155,23 +153,18 @@ def workflow(infile, out_folder):
for
bddnow
in
l_bdd
:
t_i_name
=
[
recap
[
0
]
+
recap
[
0
],
recap
[
1
]
+
recap
[
2
]]
#not opti
print
(
t_i_name
)
#out_links = out_folder + "CPDB_links_network" + bddnow + "datas_base.xlsx"
out_links
=
out_folder
+
"
Network CPDB with only
"
+
bddnow
+
"
pathways.xlsx
"
edge_data
,
nodes_data
=
paths_link_cpdb
(
result_cpdb
,
out_links
,
recap
,
bdd
=
bddnow
,
flow
=
True
,
tab_id_name
=
t_i_name
)
"""
if
bddnow
==
"
Reactome
"
:
print
(
network_visu
(
edge_data
[
0
:
3
],
nodes_data
,
bdd
=
bddnow
,
sav_fol
=
out_folder
))
else
:
print
(
network_visu
(
edge_data
[
0
:
3
],
nodes_data
,
bdd
=
bddnow
))
"""
t3
=
time
()
"""
print
(
"
le temps nécessaires pour effectuer les visualisation a était de
"
,
t3
-
t1
,
"
secondes.
"
)
"""
print
(
"
le temps total pour faire tourner le programme est a était
"
,
t3
-
t1
,
"
secondes.
"
)
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment