Commit 0a58511f authored by Jerome Mariette's avatar Jerome Mariette
Browse files

from python2 to python3 > ok from command line

parent 80259313
......@@ -138,8 +138,8 @@ class AllPairs(Abstraction):
inputs = [inputs_a_file, inputs_b_file]
outputs = parse_output_list(self.outputs,
map(lambda p: '_'.join(
map(lambda s: os.path.basename(str(s)), p)),inputs))
['_'.join(
[os.path.basename(str(s)) for s in p]) for p in inputs])
# Schedule allpairs_master
with Options(local=True, collect=[i] if self.collect else None):
......@@ -149,8 +149,8 @@ class AllPairs(Abstraction):
else:
inputs = list(itertools.product(inputs_a, inputs_b))
outputs = parse_output_list(self.outputs,
map(lambda p: '_'.join(
map(lambda s: os.path.basename(str(s)), p)),inputs))
['_'.join(
[os.path.basename(str(s)) for s in p]) for p in inputs])
# We use a wrapper script to collect the output of the
# comparison and put in {INPUT_A} {INPUT_B} {OUTPUT} format, as
......@@ -164,7 +164,7 @@ class AllPairs(Abstraction):
# Wrapper script should run locally and we should always
# try to collect the temporary intermediate output file.
with Options(local=True, collect=[tmp_output]):
yield AllPairsCompareWrapper(output, o, map(lambda p: os.path.basename(str(p)), i), None)
yield AllPairsCompareWrapper(output, o, [os.path.basename(str(p)) for p in i], None)
AllPairsCompareWrapper = parse_function('printf "%s\\t%s\\t%s\\n" {ARG} `cat {IN}` > {OUT}')
......@@ -194,7 +194,7 @@ class Iterate(Abstraction):
if iterable(self.inputs):
inputs = self.inputs
else:
inputs = range(self.inputs)
inputs = list(range(self.inputs))
outputs = parse_output_list(self.outputs, inputs)
includes = parse_input_list(self.includes)
......
61a62
> self.symbol = self.nest.symbol
62a63
> self.batch = self.nest.batch
72a74
> self.nest.symbol = self.symbol
74a76
> self.nest.batch = self.batch
......@@ -30,31 +30,16 @@ except NameError:
def getfuncname(function):
""" Return name of function. """
try:
return function.func_name # Python2
except AttributeError:
return function.__name__ # Python3
return function.__name__
# map function
# Make Python2 map the same as imap (always want iterator rather than list).
try:
from itertools import imap
map = imap
except ImportError:
map = map
map = map
# zip_longest function
# Make Python2 zip_longest the same as izip_longest.
try:
from itertools import izip_longest
zip_longest = izip_longest
except ImportError:
from itertools import zip_longest
zip_longest = zip_longest
from itertools import zip_longest
zip_longest = zip_longest
# Next compatibility decorator
......
133,134c133,134
< basename_woext = os.path.splitext(os.path.basename(input))[0] if os.path.splitext(os.path.basename(input))[1] != ".gz" else os.path.splitext(os.path.splitext(os.path.basename(input))[0])[0],
< BASE_WOEXT = os.path.splitext(os.path.basename(input))[0] if os.path.splitext(os.path.basename(input))[1] != ".gz" else os.path.splitext(os.path.splitext(os.path.basename(input))[0])[0]))
---
> basename_woext = os.path.splitext(os.path.basename(input))[0],
> BASE_WOEXT = os.path.splitext(os.path.basename(input))[0]))
......@@ -325,11 +325,11 @@ class SQLCursor(ObjectCursor):
def __or__(self, other):
debug(D_DATASET, 'or: {0} {1}'.format(self._field, other))
return Or(*map(lambda o: self == o, other))
return Or(*[self == o for o in other])
def __and__(self, other):
debug(D_DATASET, 'and: {0} {1}'.format(self._field, other))
return And(*map(lambda o: self == o, other))
return And(*[self == o for o in other])
# MySQL Dataset
......@@ -353,7 +353,7 @@ def And(*filters):
if isinstance(filters[0], str):
return '(' + ' AND '.join(filters) + ')'
else:
return lambda filters: all(map(lambda d: f(d), filters))
return lambda filters: all([f(d) for d in filters])
def Or(*filters):
if not filters: return ''
......@@ -361,7 +361,7 @@ def Or(*filters):
if isinstance(filters[0], str):
return '(' + ' OR '.join(filters) + ')'
else:
return lambda filters: any(map(lambda d: f(d), filters))
return lambda filters: any([f(d) for d in filters])
# Query class
......
18,22c18,22
< # try:
< # from MySQLdb import connect as MySQLConnect
< # from MySQLdb.cursors import SSDictCursor as MySQLSSDictCursor
< # except ImportError as e:
< # warn(D_DATASET, 'Unable to import MySQL: {0}'.format(e))
---
> try:
> from MySQLdb import connect as MySQLConnect
> from MySQLdb.cursors import SSDictCursor as MySQLSSDictCursor
> except ImportError as e:
> warn(D_DATASET, 'Unable to import MySQL: {0}'.format(e))
......@@ -101,7 +101,7 @@ class Makeflow(Engine):
if options.collect:
self.dag_file.write('@_MAKEFLOW_COLLECT_LIST+={0}\n'.format(
' '.join(map(str, options.collect))))
for k, v in options.environment.items():
for k, v in list(options.environment.items()):
self.dag_file.write('@{0}={1}\n'.format(k, v))
# Write task command
......@@ -116,7 +116,7 @@ class Makeflow(Engine):
def emit_variables(self):
""" Write variables to DAG file """
for key, value in self.variables.items():
for key, value in list(self.variables.items()):
self.dag_file.write('{0}={1}\n'.format(key, value))
self.dag_file.flush()
......
14c14
< import os
---
> import os, re
66c66
< def emit_task(self, abstraction, function, command, inputs, outputs, options, symbol=None):
---
> def emit_task(self, abstraction, function, command, inputs, outputs, options):
91,95c91
<
< # if a symbol is provided
< if symbol:
< self.dag_file.write('\t@SYMBOL="' + symbol+'"\n')
<
---
>
133c133,139
< command_list.extend(arguments.split())
---
> # Is the -B option has been used
> arg_groups = re.search("(-\S)?\s?(\S*)\s?(-B)\s[\"'](.*)[\"']\s?(-\S)?\s?(\S*)", arguments)
> if arg_groups:
> for arg_group in arg_groups.groups():
> if arg_group: command_list.extend([arg_group])
> else:
> command_list.extend(arguments.split())
144d143
< """
148a148
>
151,152d150
< """
< raise RuntimeError('Failed to execute DAG {0} using {1}:\n{2}'.format(self.dag_path, self.path, e))
......@@ -15,6 +15,7 @@ import inspect
import itertools
import os
import sys
import collections
# Base Function class
......@@ -256,7 +257,7 @@ if __name__ == '__main__':
raise Exception( "The number of object returned by the function is different from the number of outputs specified!")
if len (output_path) > 0 :
for i, obj in enumerate(outputs):
objh = open(output_path[i], "w")
objh = open(output_path[i], "wb")
pickle.dump(obj, objh)
objh.close()
'''.format(PYTHON_VERSION)
......@@ -317,7 +318,7 @@ def parse_function(function, py_func_builder=PythonFunction, environment=None):
return Function(function, environment=environment)
if callable(function):
if isinstance(function, collections.Callable):
return py_func_builder(function)
raise WeaverError(D_FUNCTION,
......@@ -334,7 +335,7 @@ class Pipeline(Function):
def __init__(self, functions, separator=None):
self.functions = [parse_function(f) for f in functions]
Function.__init__(self, self.functions[0].path,
cmd_format='Pipeline({0})'.format(map(str, self.functions)))
cmd_format='Pipeline({0})'.format(list(map(str, self.functions))))
self.includes = set([f.path for f in self.functions])
if separator is None:
self.separator = Pipeline.DEFAULT_SEPARATOR
......
83a84
>
85a86,87
> if nest.batch:
> options.batch = nest.batch
86c87
<
---
>
88c89
< list(inputs) + list(includes), outputs, options, nest.symbol)
---
> list(inputs) + list(includes), outputs, options)
211a212
> {{2}}
213c214
< {{2}}(*sys.argv[1:])
---
> {{3}}(*sys.argv[1:])
216c217
< def __init__(self, function, executable=None, cmd_format=None):
---
> def __init__(self, function, add_path=None, executable=None, cmd_format=None):
225c226,232
< source = self.PYTHON_TEMPLATE.format(', '.join(imports), body, name)
---
> if add_path:
> add_path = add_path.extend(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
> else:
> add_path = [os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'),
> os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')]
> path = ""
> for apath in add_path:
> path += "sys.path.insert(0, '" + apath + "')\n"
> source = self.PYTHON_TEMPLATE.format(', '.join(imports), path, body, name)
......@@ -98,8 +98,8 @@ class Nest(Makeflow):
# For each Abstraction, create InlineNest and schedule tasks to be
# executed there; only do this if we have more than one Abstraction.
self.tasks = []
if len(task_dict.keys()) > 1:
for abstraction, tasks in task_dict.items():
if len(list(task_dict.keys())) > 1:
for abstraction, tasks in list(task_dict.items()):
# For tasks scheduled directly by a Function (Abstraction is
# None), then simply schedule for execution in current Nest.
if abstraction is SENTINEL:
......@@ -119,7 +119,7 @@ class Nest(Makeflow):
inline_nest()
else:
# Copy tasks from Abstractions to Nest task list.
for abstraction, tasks in task_dict.items():
for abstraction, tasks in list(task_dict.items()):
for task in tasks:
self.tasks.append(task)
......@@ -142,7 +142,7 @@ class Nest(Makeflow):
# For each set of tasks, split the set into small sub-groups; for each
# sub-group, create a new InlineNest and schedule the tasks there.
self.tasks = []
for (abstraction, function), tasks in task_dict.items():
for (abstraction, function), tasks in list(task_dict.items()):
inline_tasks = max(CurrentScript().inline_tasks, abstraction.group)
if inline_tasks < len(tasks):
for group in groups(tasks, inline_tasks):
......
42c42
< wrapper=None, track_imports=True, track_exports=True):
---
> wrapper=None, track_imports=True, track_exports=True, path=None):
46d45
< self.symbol = None
46a47,48
> self.batch = ""
53c53
< Makeflow.__init__(self, wrapper=wrapper,
---
> Makeflow.__init__(self, wrapper=wrapper, path=path,
57c57
< self.dag_file = open(self.dag_path, 'w+')
---
> self.dag_file = open(self.dag_path, 'a+')
188c187
< options, symbol=None):
---
> options):
199c198
< [abstraction, function, command, inputs, outputs, options, symbol])
---
> [abstraction, function, command, inputs, outputs, options])
......@@ -204,7 +204,7 @@ Subsystems:
with Nest(work_dir, wrapper=self.engine_wrapper) as nest:
with self.options:
try:
execfile(self.path, self.globals)
exec(compile(open(self.path).read(), self.path, 'exec'), self.globals)
nest.compile()
except Exception as e:
fatal(D_SCRIPT, 'Error compiling script: {0}'.format(e), print_traceback=True)
......
......@@ -10,7 +10,7 @@ import sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + '/../..'))
try:
from StringIO import StringIO
from io import StringIO
except ImportError:
from io import StringIO
......
......@@ -73,7 +73,7 @@ class ParseStringListTestCase(TestCase):
self.assertEqual(parse_string_list(t), ['a'])
def test_03_generator(self):
g = range(10)
g = list(range(10))
self.assertEqual(parse_string_list(g), list(map(str, g)))
# Main execution ---------------------------------------------------------------
......@@ -86,7 +86,7 @@ TestCases = [
if __name__ == '__main__':
test_runner = TextTestRunner(verbosity = 2)
test_suite = TestSuite(map(TestLoader().loadTestsFromTestCase, TestCases))
test_suite = TestSuite(list(map(TestLoader().loadTestsFromTestCase, TestCases)))
test_runner.run(test_suite)
# vim: set sts=4 sw=4 ts=8 expandtab ft=python: --------------------------------
\ No newline at end of file
......@@ -69,7 +69,7 @@ class Stash(object):
self.root = root or os.path.join(os.curdir, '_Stash')
self.depth = depth or Stash.DEPTH
self.file_counter = itertools.cycle(range(0, Stash.FILES_PER_FOLDER))
self.file_counter = itertools.cycle(list(range(0, Stash.FILES_PER_FOLDER)))
self.folder_counter = itertools.count()
self.file_number = None
self.folder_number = None
......@@ -158,7 +158,7 @@ def chunks(iterator, n, padvalue=None):
def groups(iterator, n):
""" Like chunks, but filter out None from sub-groups. """
return (filter(lambda x: x is not None, i) for i in chunks(iterator, n))
return ([x for x in i if x is not None] for i in chunks(iterator, n))
def flatten(object_list):
""" Flatten nested lists into a single sequence and return iterator. """
......
84c84
< def __next__(self):
---
> def __next__(self, file_prefix="w"):
96,97c96,97
< number = template.format(self.folder_number, self.file_number)
< args = [c for c in number[:self.depth]] + [number]
---
> number = file_prefix + template.format(self.folder_number, self.file_number)
> args = [c for c in number[len(file_prefix):self.depth+len(file_prefix)]] + [number]
202c202
< if os.path.exists(executable):
---
> if os.path.isfile(executable):
211c211
< if os.path.exists(exe_path):
---
> if os.path.isfile(exe_path):
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment