mirror of
https://github.com/RPCSX/mbuild.git
synced 2024-11-23 10:19:44 +00:00
commit
4347e71179
@ -105,15 +105,15 @@ to collect the commands using the L{dag_t.results} function and analyze the
|
||||
output. This is very handy for test and validation suites.
|
||||
"""
|
||||
|
||||
from base import *
|
||||
from dag import *
|
||||
from work_queue import *
|
||||
from env import *
|
||||
from util import *
|
||||
from plan import *
|
||||
from arar import *
|
||||
from doxygen import doxygen_run, doxygen_args, doxygen_env
|
||||
from header_tag import *
|
||||
from .base import *
|
||||
from .dag import *
|
||||
from .work_queue import *
|
||||
from .env import *
|
||||
from .util import *
|
||||
from .plan import *
|
||||
from .arar import *
|
||||
from .doxygen import doxygen_run, doxygen_args, doxygen_env
|
||||
from .header_tag import *
|
||||
|
||||
__all__ = [ 'base',
|
||||
'dag',
|
||||
@ -131,9 +131,6 @@ __all__ = [ 'base',
|
||||
import time
|
||||
def mbuild_exit():
|
||||
"""mbuild's exit function"""
|
||||
#print "SLEEPING"
|
||||
#time.sleep(0.5)
|
||||
#print "EXITING"
|
||||
|
||||
import atexit
|
||||
atexit.register(mbuild_exit)
|
||||
|
@ -18,11 +18,13 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
#END_LEGAL
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import re
|
||||
|
||||
|
||||
class arar_error(Exception):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
@ -58,7 +60,7 @@ def repack(files, ar='ar', target='liball.a', verbose=False):
|
||||
else:
|
||||
cmd = "%s x ../%s" % (ar,arg)
|
||||
if verbose:
|
||||
print "EXTRACTING %s" % (cmd)
|
||||
print ("EXTRACTING %s" % (cmd))
|
||||
error= os.system(cmd)
|
||||
if error:
|
||||
raise arar_error('Extract failed for command %s' % (cmd))
|
||||
@ -66,7 +68,7 @@ def repack(files, ar='ar', target='liball.a', verbose=False):
|
||||
local_target = os.path.basename(target)
|
||||
cmd = "%s rcv %s %s" % (ar, local_target, " ".join(files))
|
||||
if verbose:
|
||||
print "RECOMBINING %s" % (cmd)
|
||||
print ("RECOMBINING %s" % (cmd))
|
||||
error=os.system(cmd)
|
||||
if error:
|
||||
raise arar_error('Recombine failed')
|
||||
@ -74,7 +76,7 @@ def repack(files, ar='ar', target='liball.a', verbose=False):
|
||||
os.chdir('..')
|
||||
os.rename(os.path.join(tdir,local_target), target)
|
||||
if verbose:
|
||||
print "CREATED %s" % (target)
|
||||
print ("CREATED %s" % (target))
|
||||
shutil.rmtree(tdir)
|
||||
|
||||
|
||||
|
@ -192,3 +192,8 @@ def on_windows():
|
||||
"""
|
||||
global _on_windows
|
||||
return _on_windows
|
||||
|
||||
def ensure_string(x):
|
||||
if isinstance(x,bytes):
|
||||
return x.decode('utf-8')
|
||||
return x
|
||||
|
@ -23,10 +23,10 @@
|
||||
import os
|
||||
import sys
|
||||
import platform
|
||||
from base import *
|
||||
from util import *
|
||||
from env import *
|
||||
import msvs
|
||||
from .base import *
|
||||
from .util import *
|
||||
from .env import *
|
||||
from . import msvs
|
||||
|
||||
|
||||
def set_compiler_env_common(env):
|
||||
@ -108,7 +108,7 @@ def set_env_gnu(env):
|
||||
'4':'%(OPTOPT)s4'} )
|
||||
|
||||
# lazy toolchain and other env var (f) expansion
|
||||
mktool = lambda(f): "%(toolchain)s%(" + f + ")s"
|
||||
mktool = lambda f: "%(toolchain)s%(" + f + ")s"
|
||||
|
||||
if env['CXX_COMPILER'] == '':
|
||||
env['CXX_COMPILER'] = ( 'compiler', { 'gnu':'g++',
|
||||
@ -418,7 +418,7 @@ def set_env_ms(env):
|
||||
env['ARCHIVER'] =( 'compiler', { 'ms': 'lib.exe',
|
||||
'icl' : 'xilib.exe'})
|
||||
# lazy toolchain and other env var (f) expansion
|
||||
mktool = lambda(f): "%(toolchain)s%(" + f + ")s"
|
||||
mktool = lambda f: "%(toolchain)s%(" + f + ")s"
|
||||
|
||||
if env['CXX'] == '':
|
||||
env['CXX'] = quote(mktool('CXX_COMPILER'))
|
||||
@ -444,7 +444,7 @@ def yasm_support(env):
|
||||
env['ASDOPT']='-D'
|
||||
try:
|
||||
env['ASFLAGS'] = ' -f' + yasm_formats[env['host_os']][env['host_cpu']]
|
||||
env['ASMOUT'] = '-o '
|
||||
env['ASMOUT'] = '-o '
|
||||
env['AS'] = 'yasm'
|
||||
except:
|
||||
die("YASM does not know what format to use for build O/S: %s and target CPU: %s" %
|
||||
|
@ -66,26 +66,27 @@
|
||||
# file, we must run it to produce the missing header.
|
||||
#
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import platform
|
||||
import types
|
||||
import collections
|
||||
import atexit
|
||||
try:
|
||||
import cPickle as apickle
|
||||
except:
|
||||
import pickle as apickle
|
||||
|
||||
|
||||
from base import *
|
||||
from work_queue import *
|
||||
from env import *
|
||||
from util import *
|
||||
from plan import *
|
||||
import scanner
|
||||
import dfs
|
||||
import util
|
||||
from .base import *
|
||||
from .work_queue import *
|
||||
from .env import *
|
||||
from .util import *
|
||||
from .plan import *
|
||||
from . import scanner
|
||||
from . import dfs
|
||||
from . import util
|
||||
|
||||
class _mbuild_dep_record_t(object):
|
||||
"""This stores the basic dependence structure for the
|
||||
@ -133,14 +134,7 @@ class _mbuild_dep_record_t(object):
|
||||
def hash_file(self):
|
||||
#msgb("HASHING", str(self.file_name))
|
||||
if os.path.exists(self.file_name):
|
||||
try:
|
||||
lines = file(self.file_name).readlines()
|
||||
except:
|
||||
die("COULD NOT READ: %s" %(str(self.file_name)))
|
||||
self.signature = hash_list(lines)
|
||||
if verbose(99):
|
||||
msgb("HASHFILE", "%s -> %s" % (self.signature,
|
||||
self.file_name))
|
||||
self.signature = util.hash_file(self.file_name)
|
||||
else:
|
||||
if verbose(99):
|
||||
msgb("COULD NOT HASH MISSING FILE", self.file_name)
|
||||
@ -248,7 +242,7 @@ class _mbuild_dep_record_t(object):
|
||||
def dump(self):
|
||||
"""print a string representing this node of the DAG. The
|
||||
string comes from the __str__ function"""
|
||||
print self.dump_str()
|
||||
print(self.dump_str())
|
||||
def __str__(self):
|
||||
return self.dump_str()
|
||||
|
||||
@ -256,6 +250,9 @@ class _mbuild_storage_object_t(object):
|
||||
def __init__(self, signature):
|
||||
self.signature = signature
|
||||
|
||||
def _do_terminate(d):
|
||||
"""called by atexit function for dag_t objects"""
|
||||
d.terminate()
|
||||
|
||||
class dag_t(object):
|
||||
"""
|
||||
@ -300,7 +297,8 @@ class dag_t(object):
|
||||
set(self._canonize_if_exists_fn(env['required']))
|
||||
else:
|
||||
self.required_set = set()
|
||||
|
||||
|
||||
atexit.register(_do_terminate, self)
|
||||
|
||||
def cycle_check(self):
|
||||
"""Check the DAG for illegal cycles in the include structure.
|
||||
@ -309,7 +307,7 @@ class dag_t(object):
|
||||
"""
|
||||
node_dict = {}
|
||||
# build the graph for the DFS
|
||||
for k,v in self.recs.iteritems():
|
||||
for k,v in iter(self.recs.items()):
|
||||
if k in node_dict:
|
||||
node = node_dict[k]
|
||||
else:
|
||||
@ -328,18 +326,18 @@ class dag_t(object):
|
||||
msgb("CYCLE DETECTED IN DAG")
|
||||
return cycle
|
||||
|
||||
def __del__(self):
|
||||
def terminate(self):
|
||||
self.dag_write_signatures()
|
||||
|
||||
def dump(self):
|
||||
"""print a string representing the DAG. """
|
||||
print "DAG DUMP"
|
||||
for v in self.recs.itervalues():
|
||||
print("DAG DUMP")
|
||||
for v in iter(self.recs.values()):
|
||||
v.dump()
|
||||
|
||||
def _hash_mixed_list(l):
|
||||
|
||||
if isinstance(l, types.ListType):
|
||||
if isinstance(l, list):
|
||||
il = l
|
||||
else:
|
||||
il = [l]
|
||||
@ -357,7 +355,7 @@ class dag_t(object):
|
||||
if verbose(10):
|
||||
msgb("WRITING SIGNATURES", self.signature_file_name)
|
||||
d = {}
|
||||
for (k,v) in self.recs.iteritems():
|
||||
for (k,v) in iter(self.recs.items()):
|
||||
# get the new hash values for anything that had a command
|
||||
# execute for it.
|
||||
if v.creator:
|
||||
@ -436,11 +434,11 @@ class dag_t(object):
|
||||
warn("READING SIGNATURES FAILED FOR "+ file_name)
|
||||
return
|
||||
if verbose(99):
|
||||
for k, v in self.old_signatures.iteritems():
|
||||
for k, v in iter(self.old_signatures.items()):
|
||||
msgb("SIGREAD", "%s -> %s" % (str(v.signature),k))
|
||||
|
||||
# Add old signatures to any existing files
|
||||
for k, v in self.recs.iteritems():
|
||||
for k, v in iter(self.recs.items()):
|
||||
if k in self.old_signatures:
|
||||
v.old_signature = self.old_signatures[k].signature
|
||||
|
||||
@ -542,13 +540,13 @@ class dag_t(object):
|
||||
required for the build. Internal function"""
|
||||
if verbose(10):
|
||||
msgb("INPUT TARGETS", str(targets))
|
||||
for v in self.recs.itervalues():
|
||||
for v in iter(self.recs.values()):
|
||||
v.required = False
|
||||
|
||||
target_dictionary = dict.fromkeys(targets, True)
|
||||
if verbose(10):
|
||||
msgb("TARGETS", str(target_dictionary))
|
||||
for v in self.recs.itervalues():
|
||||
for v in iter(self.recs.values()):
|
||||
if v.creator:
|
||||
if v.file_name in target_dictionary:
|
||||
if not v.required:
|
||||
@ -563,13 +561,13 @@ class dag_t(object):
|
||||
(1)there was an error in the build or (2) there is a
|
||||
circularity in the dependence structure."""
|
||||
did_not_build = []
|
||||
for v in self.recs.itervalues():
|
||||
for v in iter(self.recs.values()):
|
||||
if v.required and not v.visited:
|
||||
did_not_build.append(v.file_name)
|
||||
return did_not_build
|
||||
|
||||
def _find_loops(self, root_nodes):
|
||||
#print "FIND LOOPS"
|
||||
#print ("FIND LOOPS")
|
||||
|
||||
def _mark_loop(level,n,stack,all_sccs):
|
||||
# Tarjan's algorithm for strongly connected components
|
||||
@ -602,7 +600,7 @@ class dag_t(object):
|
||||
level = 1
|
||||
|
||||
for v in root_nodes:
|
||||
#print "MARKING", v.file_name
|
||||
#print ("MARKING", v.file_name)
|
||||
_mark_loop(level,v,stack,all_sccs)
|
||||
|
||||
# mark nodes that are part of include-loops (and print them out)
|
||||
@ -623,19 +621,19 @@ class dag_t(object):
|
||||
nodes = collections.deque() # work list
|
||||
|
||||
if targets:
|
||||
if not isinstance(targets, types.ListType): # make it a list
|
||||
if not isinstance(targets, list): # make it a list
|
||||
targets = [ targets ]
|
||||
self._find_required_nodes(targets)
|
||||
else:
|
||||
# mark all nodes required since no targets are specified
|
||||
for v in self.recs.itervalues():
|
||||
for v in iter(self.recs.values()):
|
||||
v.required = True
|
||||
|
||||
self._find_loops(self.recs.itervalues())
|
||||
self._find_loops(iter(self.recs.values()))
|
||||
|
||||
# build a list of roots -- files that have nothing they depend on.
|
||||
# store that list in the nodes list
|
||||
for v in self.recs.itervalues():
|
||||
for v in iter(self.recs.values()):
|
||||
v.visited = False # initialize all to false
|
||||
v.added = False # initialize all to false
|
||||
if (v.part_of_loop or len(v.files_that_are_inputs) == 0) and v.required:
|
||||
@ -834,7 +832,7 @@ class dag_t(object):
|
||||
def _make_list(self, x): # private
|
||||
"""Make a list from a single object if the thing is not
|
||||
already a list. If it is a list, just return the list"""
|
||||
if isinstance(x,types.ListType):
|
||||
if isinstance(x,list):
|
||||
return x
|
||||
return [ x ]
|
||||
|
||||
@ -1015,7 +1013,7 @@ class dag_t(object):
|
||||
@return: A list of L{command_t} objects.
|
||||
"""
|
||||
executed_commands = []
|
||||
for r in self.recs.itervalues():
|
||||
for r in iter(self.recs.values()):
|
||||
if r.creator:
|
||||
if r.creator.completed:
|
||||
executed_commands.append(r.creator)
|
||||
@ -1039,7 +1037,7 @@ class dag_t(object):
|
||||
"""
|
||||
if verbose(12):
|
||||
msgb("DAG ADDING", str(d))
|
||||
if isinstance(d,types.DictType):
|
||||
if isinstance(d,dict):
|
||||
q = self._convert_to_dagfood(d)
|
||||
c = self._add_dagfood(env,q)
|
||||
elif isinstance(d,plan_t):
|
||||
|
@ -22,7 +22,7 @@
|
||||
"""This file provides a node_t type and a dfs() routine that prints out
|
||||
cycles found in a graph represented as a list of node_t objects.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
_dfs_verbose = False
|
||||
|
||||
class node_t(object):
|
||||
@ -53,7 +53,7 @@ class node_t(object):
|
||||
s = []
|
||||
s.append("TARGET: %s\n\t" % self.name)
|
||||
s.append("discovered %d finalized %d\n\t" % (self.discover, self.finalize))
|
||||
s.extend(map(lambda(x): "\t\n%s" % x.name, self.afters))
|
||||
s.extend(["\t\n{}".format(x.name) for x in self.afters])
|
||||
return ''.join(s)
|
||||
|
||||
|
||||
@ -62,7 +62,7 @@ def _print_cycle(last_visit, grey_loop_closer):
|
||||
pad = ''
|
||||
p = last_visit
|
||||
while 1:
|
||||
print pad, p.name
|
||||
print (pad, p.name)
|
||||
if p == grey_loop_closer:
|
||||
break
|
||||
p = p.predecessor
|
||||
@ -73,7 +73,7 @@ def _visit(n):
|
||||
n.color = 1
|
||||
n.discover = _dfs_time
|
||||
if _dfs_verbose:
|
||||
print "visiting %s" % str(n)
|
||||
print ("visiting %s" % str(n))
|
||||
_dfs_time += 1
|
||||
retval = False
|
||||
for a in n.afters:
|
||||
@ -82,7 +82,7 @@ def _visit(n):
|
||||
retval |= _visit(a)
|
||||
elif a.color == 1:
|
||||
# a back-edge
|
||||
print "cycle"
|
||||
print ("cycle")
|
||||
_print_cycle(n,a)
|
||||
retval = True
|
||||
n.color = 2
|
||||
@ -117,7 +117,7 @@ def _visit_transpose(n):
|
||||
global _dfs_time
|
||||
n.color = 1
|
||||
if _dfs_verbose:
|
||||
print "visiting %s" % str(n)
|
||||
print ("visiting %s" % str(n))
|
||||
for a in n.befores:
|
||||
if a.color == 0:
|
||||
_visit_transpose(a)
|
||||
@ -134,7 +134,7 @@ def dfs_transpose(nodes):
|
||||
if n.color == 0:
|
||||
_visit_transpose(n)
|
||||
if _dfs_verbose:
|
||||
print "===="
|
||||
print ("====")
|
||||
|
||||
####################################################
|
||||
|
||||
@ -151,11 +151,11 @@ def _test_dfs():
|
||||
nodes = [ node1, node2, node3, node4 ]
|
||||
cycle = dfs(nodes)
|
||||
if cycle:
|
||||
print "CYCLE DETECTED"
|
||||
#print "VISIT TRANSPOSE"
|
||||
print ("CYCLE DETECTED")
|
||||
#print ("VISIT TRANSPOSE")
|
||||
#dfs_transpose(nodes)
|
||||
|
||||
# print "NODES\n", "\n".join(map(str,nodes))
|
||||
# print ("NODES\n", "\n".join(map(str,nodes)))
|
||||
|
||||
if __name__ == '__main__':
|
||||
_test_dfs()
|
||||
|
@ -31,7 +31,10 @@ import glob
|
||||
import types
|
||||
|
||||
try:
|
||||
import mbuild
|
||||
from . import base
|
||||
from . import dag
|
||||
from . import util
|
||||
from . import plan
|
||||
except:
|
||||
s = "\nXED ERROR: mfile.py could not find mbuild." + \
|
||||
" Should be a sibling of the xed2 directory.\n\n"
|
||||
@ -55,7 +58,7 @@ def _doxygen_version_okay(s, want_major, want_minor, want_fix):
|
||||
values[2] = re.sub(r'-.*$','',values[2])
|
||||
try:
|
||||
fix = int(values[2])
|
||||
except ValueError:
|
||||
except ValueError as v:
|
||||
pass
|
||||
if (maj > 1) or \
|
||||
(maj == want_major and minor > want_minor) or \
|
||||
@ -79,16 +82,16 @@ def _find_doxygen(env):
|
||||
if os.path.exists(doxygen_cmd_cygwin):
|
||||
doxygen_cmd = doxygen_cmd_cygwin
|
||||
else:
|
||||
mbuild.msgb('DOXYGEN',"Could not find cygwin's doxygen," +
|
||||
base.msgb('DOXYGEN',"Could not find cygwin's doxygen," +
|
||||
"trying doxygen from PATH")
|
||||
elif env['build_os'] == 'lin':
|
||||
if mbuild.verbose(1):
|
||||
mbuild.msgb("CHECKING FOR", doxygen_cmd_intel)
|
||||
if base.verbose(1):
|
||||
base.msgb("CHECKING FOR", doxygen_cmd_intel)
|
||||
if os.path.exists(doxygen_cmd_intel):
|
||||
doxygen_cmd = doxygen_cmd_intel
|
||||
elif env['build_os'] == 'mac':
|
||||
if mbuild.verbose(1):
|
||||
mbuild.msgb("CHECKING FOR", doxygen_cmd_mac)
|
||||
if base.verbose(1):
|
||||
base.msgb("CHECKING FOR", doxygen_cmd_mac)
|
||||
if os.path.exists(doxygen_cmd_mac):
|
||||
doxygen_cmd = doxygen_cmd_mac
|
||||
else:
|
||||
@ -96,26 +99,26 @@ def _find_doxygen(env):
|
||||
|
||||
doxygen_cmd = env.escape_string(doxygen_cmd)
|
||||
doxygen_okay = False
|
||||
if mbuild.verbose(1):
|
||||
mbuild.msgb('Checking doxygen version','...')
|
||||
if mbuild.check_python_version(2,4):
|
||||
if base.verbose(1):
|
||||
base.msgb('Checking doxygen version','...')
|
||||
if base.check_python_version(2,4):
|
||||
try:
|
||||
(retval, output, error_output) = \
|
||||
mbuild.run_command(doxygen_cmd + " --version")
|
||||
util.run_command(doxygen_cmd + " --version")
|
||||
if retval==0:
|
||||
if len(output) > 0:
|
||||
first_line = output[0].strip()
|
||||
if mbuild.verbose(1):
|
||||
mbuild.msgb("Doxygen version", first_line)
|
||||
if base.verbose(1):
|
||||
base.msgb("Doxygen version", first_line)
|
||||
doxygen_okay = _doxygen_version_okay(first_line, 1,4,6)
|
||||
else:
|
||||
for o in output:
|
||||
mbuild.msgb("Doxygen-version-check STDOUT", o)
|
||||
base.msgb("Doxygen-version-check STDOUT", o)
|
||||
if error_output:
|
||||
for line in error_output:
|
||||
mbuild.msgb("STDERR ",line.rstrip())
|
||||
base.msgb("STDERR ",line.rstrip())
|
||||
except:
|
||||
mbuild.die("Doxygen required by the command line options " +
|
||||
base.die("Doxygen required by the command line options " +
|
||||
"but no doxygen found")
|
||||
|
||||
return (doxygen_cmd, doxygen_okay)
|
||||
@ -136,10 +139,10 @@ def _customize_doxygen_file(env, subs):
|
||||
Returns True on success"""
|
||||
|
||||
# doxygen wants quotes around paths with spaces
|
||||
for k,s in subs.iteritems():
|
||||
for k,s in iter(subs.items()):
|
||||
if re.search(' ',s):
|
||||
if not re.search('^".*"$',s):
|
||||
mbuild.die("Doxygen requires quotes around strings with spaces: [%s]->[%s]" %
|
||||
base.die("Doxygen requires quotes around strings with spaces: [%s]->[%s]" %
|
||||
( k,s))
|
||||
return False
|
||||
|
||||
@ -147,7 +150,7 @@ def _customize_doxygen_file(env, subs):
|
||||
try:
|
||||
lines = file(env['doxygen_config']).readlines()
|
||||
except:
|
||||
mbuild.msgb("Could not open input file: " + env['doxygen_config'])
|
||||
base.msgb("Could not open input file: " + env['doxygen_config'])
|
||||
return False
|
||||
|
||||
env['doxygen_config_customized'] = \
|
||||
@ -155,22 +158,22 @@ def _customize_doxygen_file(env, subs):
|
||||
try:
|
||||
ofile = open(env['doxygen_config_customized'],'w')
|
||||
except:
|
||||
mbuild.msgb("Could not open output file: " + env['doxygen_config_customized'])
|
||||
base.msgb("Could not open output file: " + env['doxygen_config_customized'])
|
||||
return False
|
||||
|
||||
# compile the patterns
|
||||
rsubs = {}
|
||||
for k,v in subs.iteritems():
|
||||
for k,v in iter(subs.items()):
|
||||
rsubs[k]=re.compile(r'(?P<tag>[$][(]' + k + '[)])')
|
||||
|
||||
olines = []
|
||||
for line in lines:
|
||||
oline = line
|
||||
for k,p in rsubs.iteritems():
|
||||
#print 'searching for', k, 'to replace it with', subs[k]
|
||||
for k,p in iter(rsubs.items()):
|
||||
#print ('searching for', k, 'to replace it with', subs[k])
|
||||
m = p.search(oline)
|
||||
while m:
|
||||
#print 'replacing', k, 'with', subs[k]
|
||||
#print ('replacing', k, 'with', subs[k])
|
||||
oline = _replace_match(oline, m, subs[k], 'tag')
|
||||
m = p.search(oline)
|
||||
olines.append(oline)
|
||||
@ -181,7 +184,7 @@ def _customize_doxygen_file(env, subs):
|
||||
ofile.write(line)
|
||||
except:
|
||||
ofile.close()
|
||||
mbuild.msgb("Could not write output file: " + env['doxygen_config_customized'])
|
||||
base.msgb("Could not write output file: " + env['doxygen_config_customized'])
|
||||
return False
|
||||
|
||||
ofile.close()
|
||||
@ -191,12 +194,12 @@ def _build_doxygen_main(args, env):
|
||||
"""Customize the doxygen input file. Run the doxygen command, copy
|
||||
in any images, and put the output in the right place."""
|
||||
|
||||
if type(args) is types.ListType:
|
||||
if isinstance(args, list):
|
||||
if len(args) < 2:
|
||||
mbuild.die("Need subs dictionary and dummy file arg for the doxygen command " +
|
||||
base.die("Need subs dictionary and dummy file arg for the doxygen command " +
|
||||
"to indicate its processing")
|
||||
else:
|
||||
mbuild.die("Need a list for _build_doxygen_main with the subs " +
|
||||
base.die("Need a list for _build_doxygen_main with the subs " +
|
||||
"dictionary and the dummy file name")
|
||||
|
||||
(subs,dummy_file) = args
|
||||
@ -214,26 +217,26 @@ def _build_doxygen_main(args, env):
|
||||
try:
|
||||
okay = _customize_doxygen_file(env, subs)
|
||||
except:
|
||||
mbuild.die("CUSTOMIZE DOXYGEN INPUT FILE FAILED")
|
||||
base.die("CUSTOMIZE DOXYGEN INPUT FILE FAILED")
|
||||
if not okay:
|
||||
return (1, ['Doxygen customization failed'])
|
||||
|
||||
cmd = env['DOXYGEN'] + ' ' + \
|
||||
env.escape_string(env['doxygen_config_customized'])
|
||||
if mbuild.verbose(1):
|
||||
mbuild.msgb("RUN DOXYGEN", cmd)
|
||||
(retval, output, error_output) = mbuild.run_command(cmd)
|
||||
if base.verbose(1):
|
||||
base.msgb("RUN DOXYGEN", cmd)
|
||||
(retval, output, error_output) = util.run_command(cmd)
|
||||
|
||||
for line in output:
|
||||
mbuild.msgb("DOX",line.rstrip())
|
||||
base.msgb("DOX",line.rstrip())
|
||||
if error_output:
|
||||
for line in error_output:
|
||||
mbuild.msgb("DOX-ERROR",line.rstrip())
|
||||
base.msgb("DOX-ERROR",line.rstrip())
|
||||
if retval != 0:
|
||||
mbuild.msgb("DOXYGEN FAILED")
|
||||
mbuild.die("Doxygen run failed. Retval=", str(retval))
|
||||
mbuild.touch(dummy_file)
|
||||
mbuild.msgb("DOXYGEN","succeeded")
|
||||
base.msgb("DOXYGEN FAILED")
|
||||
base.die("Doxygen run failed. Retval=", str(retval))
|
||||
util.touch(dummy_file)
|
||||
base.msgb("DOXYGEN","succeeded")
|
||||
return (0, []) # success
|
||||
|
||||
|
||||
@ -255,7 +258,7 @@ def _make_doxygen_reference_manual(env, doxygen_inputs, subs, work_queue,
|
||||
"""Install the doxygen reference manual the doyxgen_output_dir
|
||||
directory. doxygen_inputs is a list of files """
|
||||
|
||||
dox_dag = mbuild.dag_t(hash_file_name,env=env)
|
||||
dox_dag = dag.dag_t(hash_file_name,env=env)
|
||||
|
||||
# so that the scanner can find them
|
||||
dirs = {}
|
||||
@ -278,7 +281,7 @@ def _make_doxygen_reference_manual(env, doxygen_inputs, subs, work_queue,
|
||||
if run_always:
|
||||
_build_doxygen_main([subs,dummy], env)
|
||||
else:
|
||||
c1 = mbuild.plan_t(command=_build_doxygen_main,
|
||||
c1 = plan.plan_t(command=_build_doxygen_main,
|
||||
args= [subs,dummy],
|
||||
env= env,
|
||||
input= doxygen_inputs,
|
||||
@ -288,9 +291,9 @@ def _make_doxygen_reference_manual(env, doxygen_inputs, subs, work_queue,
|
||||
okay = work_queue.build(dag=dox_dag)
|
||||
phase = "DOXYGEN"
|
||||
if not okay:
|
||||
mbuild.die("[%s] failed. dying..." % phase)
|
||||
if mbuild.verbose(1):
|
||||
mbuild.msgb(phase, "build succeeded")
|
||||
base.die("[%s] failed. dying..." % phase)
|
||||
if base.verbose(1):
|
||||
base.msgb(phase, "build succeeded")
|
||||
|
||||
|
||||
############################################################
|
||||
|
107
mbuild/env.py
107
mbuild/env.py
@ -20,6 +20,7 @@
|
||||
#END_LEGAL
|
||||
|
||||
"""Environment support"""
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
@ -29,12 +30,12 @@ import optparse
|
||||
import time
|
||||
import copy
|
||||
|
||||
from base import *
|
||||
import util
|
||||
import build_env
|
||||
import plan
|
||||
import msvs
|
||||
import mbuild
|
||||
from .base import *
|
||||
from . import util
|
||||
from . import build_env
|
||||
from . import plan
|
||||
from . import msvs
|
||||
|
||||
|
||||
def _remove_libname(args,env):
|
||||
#lib = env.expand('%(LIBNAME)s')
|
||||
@ -170,7 +171,7 @@ class env_t(object):
|
||||
msgb("VERSION", "$Id: mbuild_env.py 44 2007-03-16 15:54:44Z mjcharne $")
|
||||
def __setitem__(self,k,value):
|
||||
"""Write a value to the environment dictionary"""
|
||||
if isinstance(value,types.StringType):
|
||||
if util.is_stringish(value):
|
||||
self.env[k] = util.posix_slashes(value)
|
||||
else:
|
||||
self.env[k] = value
|
||||
@ -206,10 +207,10 @@ class env_t(object):
|
||||
"""
|
||||
if newenv == None:
|
||||
newenv = self.env
|
||||
if isinstance(command_string, types.StringType):
|
||||
if util.is_stringish(command_string):
|
||||
return self._iterative_substitute(command_string, newenv)
|
||||
if isinstance(command_string, types.ListType):
|
||||
return map(lambda(x): self._iterative_substitute(x, newenv), command_string)
|
||||
if isinstance(command_string, list):
|
||||
return [ self._iterative_substitute(x, newenv) for x in command_string ]
|
||||
die("expand_string only handles substitution in strings or lists of strings")
|
||||
|
||||
def expand_key(self,k, newenv=None):
|
||||
@ -230,11 +231,11 @@ class env_t(object):
|
||||
die("Could not find %s in the environment" % k)
|
||||
|
||||
|
||||
if isinstance(newenv[k],types.ListType):
|
||||
if isinstance(newenv[k],list):
|
||||
# We must process each string in the list and do
|
||||
# substitutions on them. For example, CPPPATH
|
||||
return map(lambda(x): self._iterative_substitute(x,newenv), newenv[k])
|
||||
if isinstance(newenv[k], types.StringType):
|
||||
return [ self._iterative_substitute(x,newenv) for x in newenv[k]]
|
||||
if util.is_stringish(newenv[k]):
|
||||
return self._iterative_substitute("%(" + k + ")s", newenv)
|
||||
# non strings (scalars)
|
||||
return newenv[k]
|
||||
@ -262,11 +263,11 @@ class env_t(object):
|
||||
name = m.group('name')
|
||||
if name not in dct1:
|
||||
die("Bad substitution for " + name)
|
||||
#print "SUBSTITUTING %s" % name
|
||||
#print ("SUBSTITUTING %s" % name)
|
||||
v = dct1[name]
|
||||
# repeatedly expand any tuples that show up.
|
||||
while not isinstance(v,types.StringType):
|
||||
if isinstance(v,types.TupleType):
|
||||
while not util.is_stringish(v):
|
||||
if isinstance(v,tuple):
|
||||
(key, dct) = v
|
||||
|
||||
# look up key in the main dictionary to create a
|
||||
@ -297,7 +298,7 @@ class env_t(object):
|
||||
t = self._mysub(t,name,v)
|
||||
m = subs_pattern.search(t)
|
||||
if debug:
|
||||
print t
|
||||
print (t)
|
||||
return t
|
||||
|
||||
def _dosub_old(self,s,d):
|
||||
@ -306,8 +307,8 @@ class env_t(object):
|
||||
to return. If the input s is a list, then we recursively
|
||||
expand each element of that list"""
|
||||
|
||||
if isinstance(s,types.ListType):
|
||||
return map(lambda(x): self.dosub(x,d), s)
|
||||
if isinstance(s,list):
|
||||
return [ self.dosub(x,d) for x in s]
|
||||
|
||||
# The common case: Just expanding a simple string.
|
||||
t = s
|
||||
@ -333,7 +334,7 @@ class env_t(object):
|
||||
s.append("\nSYSTEM: ")
|
||||
s.append(self.env['system'])
|
||||
s.append("\nDICTIONARY:\n")
|
||||
for k,v in self.env.iteritems():
|
||||
for k,v in iter(self.env.items()):
|
||||
s.append("\t")
|
||||
s.append(k)
|
||||
s.append("->")
|
||||
@ -360,13 +361,17 @@ class env_t(object):
|
||||
|
||||
def _check_registry_environment(self,env_var):
|
||||
s = 'SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment'
|
||||
is_py2 = sys.version[0] == '2'
|
||||
try:
|
||||
import _winreg
|
||||
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, s)
|
||||
(val, typ) = _winreg.QueryValueEx(key, env_var)
|
||||
if is_py2:
|
||||
import _winreg as winreg
|
||||
else:
|
||||
import winreg
|
||||
key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, s)
|
||||
(val, typ) = winreg.QueryValueEx(key, env_var)
|
||||
return val
|
||||
except:
|
||||
mbuild.die(("Could not read windows registry for variable %s.\n" % \
|
||||
die(("Could not read windows registry for variable %s.\n" % \
|
||||
(env_var)) +
|
||||
"Use win32 python and install pywin32")
|
||||
|
||||
@ -809,7 +814,7 @@ class env_t(object):
|
||||
f = '/proc/cpuinfo'
|
||||
proc_pat= re.compile(r'proces')
|
||||
if os.path.exists(f):
|
||||
for line in file(f).readlines():
|
||||
for line in open(f,'r'):
|
||||
if proc_pat.search(line):
|
||||
n += 1
|
||||
return n
|
||||
@ -836,7 +841,7 @@ class env_t(object):
|
||||
for k in kwds:
|
||||
if k in incoming_env:
|
||||
t = incoming_env[k]
|
||||
if isinstance(t,types.ListType) and replace==False:
|
||||
if isinstance(t,list) and replace==False:
|
||||
self.env[k].extend(t)
|
||||
else:
|
||||
self.env[k] = t
|
||||
@ -852,12 +857,12 @@ class env_t(object):
|
||||
if targets == None:
|
||||
targets = []
|
||||
|
||||
if not isinstance(targets,types.ListType):
|
||||
if not isinstance(targets,list):
|
||||
die("The 'targets' environment option must be a list")
|
||||
|
||||
if 'args' in self.env:
|
||||
args = self.env['args']
|
||||
if isinstance(args,types.ListType):
|
||||
if isinstance(args,list):
|
||||
targets.extend(args)
|
||||
else:
|
||||
die("The 'args' environment option must be a list")
|
||||
@ -1009,7 +1014,7 @@ class env_t(object):
|
||||
spaces."""
|
||||
if field in dct:
|
||||
v = dct[field]
|
||||
if isinstance(v,types.ListType):
|
||||
if isinstance(v,list):
|
||||
vflat = ' '.join(v)
|
||||
dct[field]= vflat
|
||||
|
||||
@ -1161,10 +1166,10 @@ class env_t(object):
|
||||
"""Return True if system supports AVX1. Does not work
|
||||
on windows"""
|
||||
if self.on_linux():
|
||||
lines = file('/proc/cpuinfo').readlines()
|
||||
for l in lines:
|
||||
if 'avx' in l:
|
||||
return True
|
||||
with open('/proc/cpuinfo','r') as fp:
|
||||
for l in fp:
|
||||
if 'avx' in l:
|
||||
return True
|
||||
elif self.on_mac():
|
||||
cmd = "/usr/sbin/sysctl hw.optional.avx1_0"
|
||||
(retval, output, error_output) = util.run_command(cmd)
|
||||
@ -1181,7 +1186,7 @@ class env_t(object):
|
||||
cmd = "/usr/sbin/sysctl hw.optional.x86_64"
|
||||
(retval,output, error_output) = util.run_command(cmd)
|
||||
if retval == 0 and len(output)>0:
|
||||
if re.match('hw.optional.x86_64: 1', output[0]):
|
||||
if re.match('hw.optional.x86_64: 1', ensure_string(output[0])):
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -1305,8 +1310,8 @@ class env_t(object):
|
||||
@rtype: string
|
||||
@return: fn with a new suffix specified by newext
|
||||
"""
|
||||
if isinstance(fn,types.ListType):
|
||||
return map(lambda(x): self.resuffix(x,newext), fn)
|
||||
if isinstance(fn,list):
|
||||
return [self.resuffix(x,newext) for x in fn]
|
||||
else:
|
||||
(root,ext) = os.path.splitext(fn)
|
||||
return root + newext
|
||||
@ -1411,7 +1416,7 @@ class env_t(object):
|
||||
@param newdef: string to add to the ASFLAGS
|
||||
environment variable.
|
||||
"""
|
||||
if isinstance(newdef,types.ListType):
|
||||
if isinstance(newdef,list):
|
||||
deflist = newdef
|
||||
else:
|
||||
deflist = [ newdef ]
|
||||
@ -1425,7 +1430,7 @@ class env_t(object):
|
||||
@param newdef: string to add to the CCFLAGS
|
||||
environment variable.
|
||||
"""
|
||||
if isinstance(newdef,types.ListType):
|
||||
if isinstance(newdef,list):
|
||||
deflist = newdef
|
||||
else:
|
||||
deflist = [ newdef ]
|
||||
@ -1440,7 +1445,7 @@ class env_t(object):
|
||||
@param newdef: string to add to the CXXFLAGS
|
||||
environment variable.
|
||||
"""
|
||||
if isinstance(newdef,types.ListType):
|
||||
if isinstance(newdef,list):
|
||||
deflist = newdef
|
||||
else:
|
||||
deflist = [ newdef ]
|
||||
@ -1456,7 +1461,7 @@ class env_t(object):
|
||||
@type include_dir: string or list of strings
|
||||
@param include_dir: string to add to the CPPPATH environment variable
|
||||
"""
|
||||
if isinstance(include_dir,types.ListType):
|
||||
if isinstance(include_dir,list):
|
||||
lst = include_dir
|
||||
else:
|
||||
lst = [ include_dir ]
|
||||
@ -1472,7 +1477,7 @@ class env_t(object):
|
||||
@type sys_include_dir: string or list of strings
|
||||
@param sys_include_dir: string to add to the SYSTEMINCLUDE environment variable
|
||||
"""
|
||||
if isinstance(sys_include_dir,types.ListType):
|
||||
if isinstance(sys_include_dir,list):
|
||||
lst = sys_include_dir
|
||||
else:
|
||||
lst = [ sys_include_dir ]
|
||||
@ -1488,7 +1493,7 @@ class env_t(object):
|
||||
@type link_dir: string or list of strings
|
||||
@param link_dir: string to add to the LINKPATH variable
|
||||
"""
|
||||
if isinstance(link_dir,types.ListType):
|
||||
if isinstance(link_dir,list):
|
||||
for d in link_dir:
|
||||
self.env['LINKPATH'].append(util.posix_slashes(d))
|
||||
else:
|
||||
@ -1505,7 +1510,7 @@ class env_t(object):
|
||||
@param value: the value to remove
|
||||
"""
|
||||
if var in self.env:
|
||||
if isinstance(self.env[var], types.ListType):
|
||||
if isinstance(self.env[var], list):
|
||||
try:
|
||||
self.env[var].remove(value)
|
||||
except:
|
||||
@ -1530,8 +1535,8 @@ class env_t(object):
|
||||
"""
|
||||
if var not in self.env:
|
||||
self.env[var] = value
|
||||
elif isinstance(self.env[var],types.ListType):
|
||||
if isinstance(value, types.ListType):
|
||||
elif isinstance(self.env[var],list):
|
||||
if isinstance(value, list):
|
||||
self.env[var].extend(value)
|
||||
else:
|
||||
self.env[var].append(value)
|
||||
@ -1948,7 +1953,7 @@ class env_t(object):
|
||||
libs.append(lib)
|
||||
else:
|
||||
for dir in self.env['LINKPATH']:
|
||||
t = mbuild.join(dir,lib)
|
||||
t = util.join(dir,lib)
|
||||
if os.path.exists(t):
|
||||
#msgb("ADDING DERIVED DEPENDENCE ON LIBRARY", t)
|
||||
libs.append(t)
|
||||
@ -1965,7 +1970,7 @@ class env_t(object):
|
||||
exename = self.build_dir_join(exename)
|
||||
d['EXENAME'] = exename
|
||||
|
||||
if not isinstance(objs, types.ListType):
|
||||
if not isinstance(objs, list):
|
||||
objs = [ objs ]
|
||||
objs = self._escape_list_of_strings(objs)
|
||||
obj = " ".join(objs)
|
||||
@ -1983,7 +1988,7 @@ class env_t(object):
|
||||
if relocate:
|
||||
libname = self.build_dir_join(libname)
|
||||
d['LIBNAME'] = libname
|
||||
if not isinstance(objs,types.ListType):
|
||||
if not isinstance(objs,list):
|
||||
objs = [ objs ]
|
||||
objs = self._escape_list_of_strings(objs)
|
||||
obj = " ".join(objs)
|
||||
@ -1992,10 +1997,10 @@ class env_t(object):
|
||||
self._escape_dict(d)
|
||||
n = []
|
||||
scmd = self.env['STATIC_LIB_COMMAND']
|
||||
if not isinstance(scmd,types.ListType):
|
||||
if not isinstance(scmd,list):
|
||||
scmd = [ scmd ]
|
||||
for cmd in scmd:
|
||||
if isinstance(cmd,types.StringType):
|
||||
if util.is_stringish(cmd):
|
||||
n.append(self.expand_string(cmd, d))
|
||||
else:
|
||||
n.append(cmd)
|
||||
@ -2020,7 +2025,7 @@ class env_t(object):
|
||||
libname = self.build_dir_join(libname)
|
||||
d['LIBNAME'] = libname
|
||||
d['SOLIBNAME'] = os.path.basename(libname)
|
||||
if not isinstance(objs,types.ListType):
|
||||
if not isinstance(objs,list):
|
||||
objs = [ objs ]
|
||||
objs = self._escape_list_of_strings(objs)
|
||||
obj = " ".join(objs)
|
||||
|
@ -17,12 +17,13 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
#END_LEGAL
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
from stat import *
|
||||
|
||||
|
||||
def _get_mode(fn):
|
||||
"get the mode of the file named fn, suitable for os.chmod() or open() calls"
|
||||
mode = os.stat(fn)[ST_MODE]
|
||||
@ -90,10 +91,10 @@ def _shell_script(lines):
|
||||
second = lines[1];
|
||||
|
||||
if re.match("#!",first):
|
||||
#print "\t\t First script test true"
|
||||
#print ("\t\t First script test true")
|
||||
return True
|
||||
if re.search("-\*-",first) or re.search("-\*-",second):
|
||||
#print "\t\t Second script test true"
|
||||
#print ("\t\t Second script test true")
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -122,7 +123,7 @@ def apply_header_to_data_file(header, file, prefix="#"):
|
||||
"apply header to file using script comment style"
|
||||
f = open(file,"r")
|
||||
mode = _get_mode(file)
|
||||
#print "file: " + file + " mode: " + "%o" % mode
|
||||
#print ("file: " + file + " mode: " + "%o" % mode)
|
||||
contents = f.readlines()
|
||||
f.close()
|
||||
trimmed_contents = _remove_existing_header(contents, prefix)
|
||||
@ -146,13 +147,13 @@ def apply_header_to_data_file(header, file, prefix="#"):
|
||||
####################################################################
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) < 4:
|
||||
print "Usage " + sys.argv[0] + " [-s|-t] legal-header file-name [file-name...]\n"
|
||||
print ("Usage " + sys.argv[0] + " [-s|-t] legal-header file-name [file-name...]\n")
|
||||
sys.exit(1)
|
||||
|
||||
type = sys.argv[1]
|
||||
header_file = sys.argv[2]
|
||||
if not os.path.exists(header_file):
|
||||
print "Could not find header file: [%s]\n" % (header_file)
|
||||
print ("Could not find header file: [%s]\n" % (header_file))
|
||||
sys.exit(1)
|
||||
|
||||
files_to_tag = sys.argv[3:]
|
||||
@ -171,5 +172,5 @@ if __name__ == '__main__':
|
||||
if re.search(".svn",file) == None and re.search(".new$",file) == None:
|
||||
apply_header_to_data_file(header, file.strip())
|
||||
else:
|
||||
print "2nd argument must be -s or -t\n"
|
||||
print ("2nd argument must be -s or -t\n")
|
||||
sys.exit(1)
|
||||
|
@ -33,13 +33,13 @@
|
||||
"""Environment setup for Microsoft Visual Studio. Set INCLUDE,
|
||||
LIBPATH, LIB, PATH, VCINSTALLDIR, VS80COMNTOOLS, VSINSTALLDIR, etc.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import platform
|
||||
from base import *
|
||||
from util import *
|
||||
from env import *
|
||||
from .base import *
|
||||
from .util import *
|
||||
from .env import *
|
||||
|
||||
########################################################################
|
||||
def set_env(v,s):
|
||||
@ -54,7 +54,7 @@ def set_env(v,s):
|
||||
# error msg.
|
||||
try:
|
||||
os.environ[v]=s
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
sys.stderr.write( str(e) + '\n')
|
||||
sys.stderr.write("Env Variable [%s]\n" % (v))
|
||||
sys.stderr.write("Original was [%s]\n" % (orig))
|
||||
@ -75,10 +75,10 @@ def add_env(v,s):
|
||||
"""Add v=v;old_vs to the shell environment. Inserts at front"""
|
||||
if 0:
|
||||
if os.path.exists(s):
|
||||
tag = "GOOD"
|
||||
tag = "GOOD"
|
||||
else:
|
||||
tag = "BAD"
|
||||
print "{} {}".format(tag,s)
|
||||
tag = "BAD"
|
||||
print ("{} {}".format(tag,s))
|
||||
v.insert(0,s)
|
||||
########################################################################
|
||||
|
||||
@ -687,9 +687,9 @@ def _find_latest_subdir(d):
|
||||
ctime = 0
|
||||
for g in glob(d + '*'):
|
||||
gtime = os.path.getctime(g)
|
||||
if gtime > ctime:
|
||||
ctime = gtime
|
||||
subdir = g
|
||||
if gtime > ctime:
|
||||
ctime = gtime
|
||||
subdir = g
|
||||
return subdir
|
||||
def _ijoin(x,y):
|
||||
return '{}/{}'.format(x,y)
|
||||
@ -1025,7 +1025,7 @@ def _set_msvs_dev14(env, x64_host, x64_target, regv=None): # msvs 2015
|
||||
add_env(path, t + '/bin/x86')
|
||||
|
||||
if sdk10a:
|
||||
b = _find_latest_subdir(sdk10a + '/bin/')
|
||||
b = _find_latest_subdir(sdk10a + '/bin/')
|
||||
add_env(path, b + '/x64')
|
||||
else:
|
||||
add_env(path, sdk81a + '/bin/NETFX 4.5.1 Tools/x64')
|
||||
@ -1072,7 +1072,7 @@ def _set_msvs_dev14(env, x64_host, x64_target, regv=None): # msvs 2015
|
||||
add_env(path, t + '/bin/x86')
|
||||
|
||||
if sdk10a:
|
||||
b = _find_latest_subdir(sdk10a + '/bin/')
|
||||
b = _find_latest_subdir(sdk10a + '/bin/')
|
||||
add_env(path, b + '/x64')
|
||||
else:
|
||||
add_env(path, sdk81a + '/bin/NETFX 4.5.1 Tools')
|
||||
@ -1125,18 +1125,23 @@ def _figure_out_msvs_version_filesystem(env, specific_version=0):
|
||||
return str(v)
|
||||
return None # we don't know
|
||||
|
||||
_is_py2 = sys.version[0] == '2'
|
||||
|
||||
def _read_registry(root,key,value):
|
||||
import _winreg
|
||||
if _is_py2:
|
||||
import _winreg as winreg
|
||||
else:
|
||||
import winreg
|
||||
try:
|
||||
hkey = _winreg.OpenKey(root, key)
|
||||
hkey = winreg.OpenKey(root, key)
|
||||
except:
|
||||
return None
|
||||
try:
|
||||
(val, typ) = _winreg.QueryValueEx(hkey, value)
|
||||
(val, typ) = winreg.QueryValueEx(hkey, value)
|
||||
except:
|
||||
_winreg.CloseKey(hkey)
|
||||
winreg.CloseKey(hkey)
|
||||
return None
|
||||
_winreg.CloseKey(hkey)
|
||||
winreg.CloseKey(hkey)
|
||||
return val
|
||||
|
||||
def pick_compiler(env):
|
||||
@ -1161,12 +1166,16 @@ def _pick_compiler_until_dev14(env):
|
||||
return toolchain
|
||||
|
||||
def _find_msvc_in_registry(env,version):
|
||||
import _winreg
|
||||
if _is_py2:
|
||||
import _winreg as winreg
|
||||
else:
|
||||
import winreg
|
||||
|
||||
vs_ver = str(version) + '.0'
|
||||
vs_key = 'SOFTWARE\\Microsoft\\VisualStudio\\' + vs_ver + '\\Setup\\VS'
|
||||
vc_key = 'SOFTWARE\\Microsoft\\VisualStudio\\' + vs_ver + '\\Setup\\VC'
|
||||
vs_dir = _read_registry(_winreg.HKEY_LOCAL_MACHINE, vs_key, 'ProductDir')
|
||||
vc_dir = _read_registry(_winreg.HKEY_LOCAL_MACHINE, vc_key, 'ProductDir')
|
||||
vs_dir = _read_registry(winreg.HKEY_LOCAL_MACHINE, vs_key, 'ProductDir')
|
||||
vc_dir = _read_registry(winreg.HKEY_LOCAL_MACHINE, vc_key, 'ProductDir')
|
||||
|
||||
# On a 64-bit host, look for a 32-bit installation
|
||||
|
||||
@ -1175,9 +1184,9 @@ def _find_msvc_in_registry(env,version):
|
||||
vs_ver + '\\Setup\\VS'
|
||||
vc_key = 'SOFTWARE\\Wow6432Node\\Microsoft\\VisualStudio\\' + \
|
||||
vs_ver + '\\Setup\\VC'
|
||||
vs_dir = _read_registry(_winreg.HKEY_LOCAL_MACHINE,
|
||||
vs_dir = _read_registry(winreg.HKEY_LOCAL_MACHINE,
|
||||
vs_key, 'ProductDir')
|
||||
vc_dir = _read_registry(_winreg.HKEY_LOCAL_MACHINE,
|
||||
vc_dir = _read_registry(winreg.HKEY_LOCAL_MACHINE,
|
||||
vc_key, 'ProductDir')
|
||||
return (vs_dir,vc_dir)
|
||||
|
||||
|
@ -19,12 +19,13 @@
|
||||
#END_LEGAL
|
||||
|
||||
"""Function for header include scanning"""
|
||||
|
||||
from __future__ import print_function
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import base
|
||||
import util
|
||||
from . import base
|
||||
from . import util
|
||||
|
||||
|
||||
class mbuild_header_record_t:
|
||||
"""Stores information about headers that we find"""
|
||||
@ -56,6 +57,13 @@ def mbuild_compute_path(hname, search_path):
|
||||
mbuild_include_pattern = re.compile(r'^[ \t]*#[ \t]*include[ \t]+"(?P<hdr>[^"]+)"')
|
||||
mbuild_nasm_include_pattern = re.compile(r'^[ \t]*%include[ \t]+"(?P<hdr>[^"]+)"')
|
||||
|
||||
is_py2 = sys.version[0] == '2'
|
||||
def _open_errors(fn):
|
||||
if is_py2:
|
||||
return open(fn, 'r')
|
||||
else:
|
||||
return open(fn, 'r', errors='ignore')
|
||||
|
||||
def mbuild_scan(fn, search_path):
|
||||
"""Given a file name fn, and a list of search paths, scan for
|
||||
headers in fn and return a list of mbuild_header_record_t's. The
|
||||
@ -74,21 +82,22 @@ def mbuild_scan(fn, search_path):
|
||||
if source_path == '':
|
||||
source_path = '.'
|
||||
aug_search_path = [source_path] + search_path
|
||||
|
||||
for line in file(fn).readlines():
|
||||
#print line
|
||||
hgroup = mbuild_include_pattern.match(line)
|
||||
if not hgroup:
|
||||
hgroup = mbuild_nasm_include_pattern.match(line)
|
||||
if hgroup:
|
||||
hname = hgroup.group('hdr')
|
||||
#print hname
|
||||
full_name = mbuild_compute_path(hname, aug_search_path)
|
||||
if full_name:
|
||||
hr = mbuild_header_record_t(full_name)
|
||||
else:
|
||||
hr = mbuild_header_record_t(hname, found=False)
|
||||
all_names.append(hr)
|
||||
|
||||
with _open_errors(fn) as f:
|
||||
for line in f:
|
||||
#print (line)
|
||||
hgroup = mbuild_include_pattern.match(line)
|
||||
if not hgroup:
|
||||
hgroup = mbuild_nasm_include_pattern.match(line)
|
||||
if hgroup:
|
||||
hname = hgroup.group('hdr')
|
||||
#print (hname)
|
||||
full_name = mbuild_compute_path(hname, aug_search_path)
|
||||
if full_name:
|
||||
hr = mbuild_header_record_t(full_name)
|
||||
else:
|
||||
hr = mbuild_header_record_t(hname, found=False)
|
||||
all_names.append(hr)
|
||||
return all_names
|
||||
|
||||
|
||||
@ -97,7 +106,7 @@ def _test_scan():
|
||||
paths = ["/home/mjcharne/proj/learn/" ]
|
||||
all_headers = mbuild_scan("/home/mjcharne/proj/learn/foo.cpp", paths)
|
||||
for hr in all_headers:
|
||||
print hr
|
||||
print (hr)
|
||||
|
||||
if __name__ == '__main__':
|
||||
_test_scan()
|
||||
|
@ -32,14 +32,13 @@ import time
|
||||
import subprocess
|
||||
import tempfile
|
||||
import shlex
|
||||
import mbuild
|
||||
import traceback
|
||||
try:
|
||||
import cPickle as apickle
|
||||
except:
|
||||
import pickle as apickle
|
||||
|
||||
from base import *
|
||||
from .base import *
|
||||
|
||||
def find_python(env):
|
||||
"""return path to NON cygwin"""
|
||||
@ -55,7 +54,7 @@ def find_python(env):
|
||||
if os.path.exists(p):
|
||||
return p
|
||||
if not pycmd:
|
||||
mbuild.die("Could not find win32 python at these locations: %s" %
|
||||
die("Could not find win32 python at these locations: %s" %
|
||||
"\n\t" + "\n\t".join(python_commands))
|
||||
|
||||
return pycmd
|
||||
@ -248,10 +247,10 @@ def prefix_files(dir,input_files):
|
||||
@rtype: string or list of strings
|
||||
@return: input file(s) prefixed with dir sp
|
||||
"""
|
||||
if isinstance(input_files,types.ListType):
|
||||
new_files = map(lambda(x): join(dir, x), input_files)
|
||||
if isinstance(input_files,list):
|
||||
new_files = [join(dir,x) for x in input_files]
|
||||
return new_files
|
||||
elif isinstance(input_files,types.StringType):
|
||||
elif is_stringish(input_files):
|
||||
new_file = join(dir, input_files)
|
||||
return new_file
|
||||
die("Unhandled type in prefix_files: "+ str(type(input_files)))
|
||||
@ -357,8 +356,8 @@ def flip_slashes(s):
|
||||
|
||||
if on_native_windows():
|
||||
return s
|
||||
if type(s) == types.ListType:
|
||||
return map(flip_slashes, s)
|
||||
if isinstance(s, list):
|
||||
return list(map(flip_slashes, s))
|
||||
t = re.sub(r'\\',_mysep,s,0) # replace all
|
||||
return t
|
||||
|
||||
@ -370,8 +369,8 @@ def posix_slashes(s):
|
||||
@rtype: string or list of strings
|
||||
@return: string(s) with forward slashes
|
||||
"""
|
||||
if type(s) == types.ListType:
|
||||
return map(posix_slashes, s)
|
||||
if isinstance(s,list):
|
||||
return list(map(posix_slashes, s))
|
||||
#t = re.sub(r'\\','/',s,0) # replace all
|
||||
last = len(s)-1
|
||||
t=[]
|
||||
@ -388,7 +387,7 @@ def posix_slashes(s):
|
||||
def glob(s):
|
||||
"""Run the normal glob.glob() on s but make sure all the slashes
|
||||
are flipped forward afterwards. This is shorthand for
|
||||
mbuild.posix_slashes(glob.glob(s))"""
|
||||
posix_slashes(glob.glob(s))"""
|
||||
import glob
|
||||
return posix_slashes(glob.glob(s))
|
||||
|
||||
@ -427,29 +426,28 @@ def escape_special_characters(s):
|
||||
|
||||
if check_python_version(2,5):
|
||||
import hashlib
|
||||
hasher = hashlib.sha1
|
||||
else:
|
||||
import sha
|
||||
hasher = sha.new
|
||||
|
||||
def hash_list(list_of_strings):
|
||||
"""Compute a sha1 hash of a list of strings and return the hex digest"""
|
||||
if check_python_version(2,5):
|
||||
m = hashlib.sha1()
|
||||
else:
|
||||
m = sha.new()
|
||||
map(lambda (x): m.update(x), list_of_strings)
|
||||
d = m.hexdigest()
|
||||
return d
|
||||
m = hasher()
|
||||
for l in list_of_strings:
|
||||
m.update(l.encode('utf-8'))
|
||||
return m.hexdigest()
|
||||
|
||||
|
||||
def hash_file(fn):
|
||||
if os.path.exists(fn):
|
||||
try:
|
||||
lines = file(fn).readlines()
|
||||
except:
|
||||
die("COULD NOT READ: %s" % (fn))
|
||||
signature = hash_list(lines)
|
||||
return signature
|
||||
return None
|
||||
if not os.path.exists(fn):
|
||||
return None
|
||||
m = hasher()
|
||||
with open(fn,'rb') as afile:
|
||||
buf = afile.read()
|
||||
m.update(buf)
|
||||
return m.hexdigest()
|
||||
|
||||
|
||||
|
||||
def write_signatures(fn,d):
|
||||
@ -610,7 +608,7 @@ def _cond_open_input_file(directory,input_file_name):
|
||||
fn = os.path.join(directory, input_file_name)
|
||||
else:
|
||||
fn = input_file_name
|
||||
input_file_obj = file(fn,"r")
|
||||
input_file_obj = open(fn,"r")
|
||||
return input_file_obj
|
||||
return None
|
||||
|
||||
@ -659,11 +657,12 @@ def run_command(cmd,
|
||||
stderr = subprocess.PIPE,
|
||||
cwd=directory,
|
||||
env=osenv,
|
||||
universal_newlines=True,
|
||||
**kwargs)
|
||||
(stdout, stderr ) = sub.communicate()
|
||||
if not isinstance(stderr,types.ListType):
|
||||
if not isinstance(stderr,list):
|
||||
stderr = [stderr]
|
||||
if not isinstance(stdout,types.ListType):
|
||||
if not isinstance(stdout,list):
|
||||
stdout = [stdout]
|
||||
return (sub.returncode, stdout, stderr)
|
||||
else:
|
||||
@ -675,13 +674,14 @@ def run_command(cmd,
|
||||
stderr = subprocess.STDOUT,
|
||||
cwd=directory,
|
||||
env=osenv,
|
||||
universal_newlines=True,
|
||||
**kwargs)
|
||||
stdout = sub.stdout.readlines()
|
||||
sub.wait()
|
||||
if not isinstance(stdout,types.ListType):
|
||||
if not isinstance(stdout,list):
|
||||
stdout = [stdout]
|
||||
return (sub.returncode, stdout, None)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
s= ["Execution failed for: %s\n" % (cmd) ]
|
||||
s.append("Result is %s\n" % (str(e)))
|
||||
# put the error message in stderr if there is a separate
|
||||
@ -689,11 +689,11 @@ def run_command(cmd,
|
||||
if separate_stderr:
|
||||
if stderr == None:
|
||||
stderr = []
|
||||
elif not isinstance(stderr,types.ListType):
|
||||
elif not isinstance(stderr,list):
|
||||
stderr = [stderr]
|
||||
if stdout == None:
|
||||
stdout = []
|
||||
elif not isinstance(stdout,types.ListType):
|
||||
elif not isinstance(stdout,list):
|
||||
stdout = [stdout]
|
||||
if separate_stderr:
|
||||
stderr.extend(s)
|
||||
@ -745,6 +745,7 @@ def run_command_unbufferred(cmd,
|
||||
stderr = subprocess.STDOUT,
|
||||
env=osenv,
|
||||
cwd=directory,
|
||||
universal_newlines=True,
|
||||
**kwargs)
|
||||
while 1:
|
||||
# FIXME: 2008-12-05 bad for password prompts without newlines.
|
||||
@ -759,7 +760,7 @@ def run_command_unbufferred(cmd,
|
||||
|
||||
sub.wait()
|
||||
return (sub.returncode, lines, [])
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
lines.append("Execution failed for: %s\n" % (cmd))
|
||||
lines.append("Result is %s\n" % (str(e)))
|
||||
return (1, lines,[])
|
||||
@ -797,7 +798,7 @@ def run_command_output_file(cmd,
|
||||
lines = []
|
||||
cmd_args = _prepare_cmd(cmd)
|
||||
try:
|
||||
output = file(output_file_name,"w")
|
||||
output = open(output_file_name,"w")
|
||||
input_file_obj = _cond_open_input_file(directory, input_file_name)
|
||||
sub = subprocess.Popen(cmd_args,
|
||||
shell=use_shell,
|
||||
@ -807,6 +808,7 @@ def run_command_output_file(cmd,
|
||||
stderr = subprocess.STDOUT,
|
||||
env=osenv,
|
||||
cwd=directory,
|
||||
universal_newlines=True,
|
||||
**kwargs)
|
||||
#msgb("RUNNING SUBPROCESS")
|
||||
while 1:
|
||||
@ -821,10 +823,13 @@ def run_command_output_file(cmd,
|
||||
output.close()
|
||||
sub.wait()
|
||||
return (sub.returncode, lines, [])
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
lines.append("Execution failed for: %s\n" % (cmd))
|
||||
lines.append("Result is %s\n" % (str(e)))
|
||||
return (1, lines,[])
|
||||
except:
|
||||
print("Unxpected error:", sys.exc_info()[0])
|
||||
raise
|
||||
|
||||
def run_cmd_io(cmd, fn_i, fn_o,shell_executable=None, directory=None):
|
||||
"""
|
||||
@ -856,12 +861,13 @@ def run_cmd_io(cmd, fn_i, fn_o,shell_executable=None, directory=None):
|
||||
stdin=fin,
|
||||
stdout=fout,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
cwd=directory)
|
||||
retval = sub.wait()
|
||||
fin.close()
|
||||
fout.close()
|
||||
return retval
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
die("Execution failed for cmd %s\nResult is %s\n" % (cmd,str(e)))
|
||||
|
||||
def find_dir(d):
|
||||
@ -871,7 +877,7 @@ def find_dir(d):
|
||||
last = ''
|
||||
while dir != last:
|
||||
target_dir = os.path.join(dir,d)
|
||||
#print "Trying %s" % (target_dir)
|
||||
#print ("Trying %s" % (target_dir))
|
||||
if os.path.exists(target_dir):
|
||||
return target_dir
|
||||
last = dir
|
||||
@ -1017,6 +1023,7 @@ class _timed_command_t(threading.Thread):
|
||||
cwd=self.directory,
|
||||
env=self.osenv,
|
||||
stdin = input_file_obj,
|
||||
universal_newlines=True,
|
||||
**self.kwargs)
|
||||
except:
|
||||
(self.exception_type,
|
||||
@ -1106,8 +1113,8 @@ def run_command_timed( cmd,
|
||||
|
||||
# we use a temporary file to hold the output because killing the
|
||||
# process disrupts the normal output collection mechanism.
|
||||
fo = tempfile.SpooledTemporaryFile()
|
||||
fe = tempfile.SpooledTemporaryFile()
|
||||
fo = tempfile.SpooledTemporaryFile() # FIXME: PY3 mode='w+'?
|
||||
fe = tempfile.SpooledTemporaryFile() # FIXME: PY3 mode='w+'?
|
||||
tc = _timed_command_t(cmd,
|
||||
shell_executable,
|
||||
directory,
|
||||
@ -1145,4 +1152,11 @@ def run_command_timed( cmd,
|
||||
tc.exception_object,
|
||||
tc.exception_trace))
|
||||
|
||||
return (exit_code, output, stderr)
|
||||
return (exit_code, output, stderr)
|
||||
|
||||
def is_stringish(x):
|
||||
return isinstance(x,bytes) or isinstance(x,str)
|
||||
def make_list_of_str(lst):
|
||||
return [ str(x) for x in lst]
|
||||
def open_readlines(fn, mode='r'):
|
||||
return open(f,mode).readlines()
|
||||
|
@ -19,17 +19,21 @@
|
||||
#END_LEGAL
|
||||
|
||||
"""Command objects and parallel work queue"""
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import types
|
||||
import Queue
|
||||
is_py2 = sys.version[0] == '2'
|
||||
if is_py2:
|
||||
import Queue as queue
|
||||
else:
|
||||
import queue as queue
|
||||
from threading import Thread
|
||||
from collections import deque
|
||||
|
||||
from base import *
|
||||
from util import *
|
||||
from dag import *
|
||||
from .base import *
|
||||
from .util import *
|
||||
from .dag import *
|
||||
|
||||
|
||||
############################################################################
|
||||
@ -125,7 +129,7 @@ class command_t(object):
|
||||
self.id = command_t._ids
|
||||
command_t._ids += 1
|
||||
# store the command as a list
|
||||
if isinstance(command,types.ListType):
|
||||
if isinstance(command,list):
|
||||
self.command = command
|
||||
else:
|
||||
self.command = [ command ]
|
||||
@ -249,7 +253,7 @@ class command_t(object):
|
||||
if not isinstance(i,types.FunctionType):
|
||||
s.append(i)
|
||||
t = " - ".join(s)
|
||||
h = hash_string(t)
|
||||
h = hash_string(t.encode('utf-8'))
|
||||
return h
|
||||
|
||||
def add_before_me(self,n):
|
||||
@ -257,7 +261,7 @@ class command_t(object):
|
||||
@type n: L{command_t}
|
||||
@param n: another (earlier) command
|
||||
"""
|
||||
if isinstance(n,types.ListType):
|
||||
if isinstance(n,list):
|
||||
for x in n:
|
||||
self.before_me.append(x)
|
||||
x.after_me.append(self)
|
||||
@ -270,7 +274,7 @@ class command_t(object):
|
||||
@type n: L{command_t}
|
||||
@param n: another (later) command
|
||||
"""
|
||||
if isinstance(n, types.ListType):
|
||||
if isinstance(n, list):
|
||||
for x in n:
|
||||
self.after_me.append(x)
|
||||
x.before_me.append(self)
|
||||
@ -325,7 +329,7 @@ class command_t(object):
|
||||
for cmd in self.command:
|
||||
if isinstance(cmd,types.FunctionType):
|
||||
s.append("PYTHON FN: " + cmd.__name__)
|
||||
elif isinstance(cmd,types.StringType):
|
||||
elif is_stringish(cmd):
|
||||
s.append(cmd)
|
||||
else:
|
||||
s.append(str(cmd))
|
||||
@ -392,7 +396,7 @@ class command_t(object):
|
||||
for line in self.output:
|
||||
if tab_output:
|
||||
s.append('\t')
|
||||
s.append(line)
|
||||
s.append(ensure_string(line))
|
||||
if show_output and self.show_output and self.stderr_exists():
|
||||
s.append(bracket('STDERR'))
|
||||
s.append(nl)
|
||||
@ -407,7 +411,7 @@ class command_t(object):
|
||||
|
||||
def _extend_output(self,output):
|
||||
if output:
|
||||
if isinstance(output,types.ListType):
|
||||
if isinstance(output,list):
|
||||
self.output.extend(output)
|
||||
else:
|
||||
self.output.append(output)
|
||||
@ -415,7 +419,7 @@ class command_t(object):
|
||||
def _extend_output_stderr(self,output, stderr):
|
||||
self._extend_output(output)
|
||||
if stderr:
|
||||
if isinstance(stderr,types.ListType):
|
||||
if isinstance(stderr,list):
|
||||
self.stderr.extend(stderr)
|
||||
else:
|
||||
self.stderr.append(stderr)
|
||||
@ -446,7 +450,7 @@ class command_t(object):
|
||||
(self.exit_status, output) = cmd( self.args, self.xenv )
|
||||
self._extend_output(output)
|
||||
|
||||
elif isinstance(cmd,types.StringType):
|
||||
elif is_stringish(cmd):
|
||||
# execute command strings
|
||||
if self.output_file_name:
|
||||
(self.exit_status, output, stderr) = \
|
||||
@ -480,12 +484,12 @@ class command_t(object):
|
||||
|
||||
else:
|
||||
self.exit_status = 1
|
||||
self.extend_output("Unhandled command object: " + self.dump())
|
||||
self._extend_output("Unhandled command object: " + self.dump())
|
||||
|
||||
# stop if something failed
|
||||
if self.exit_status != 0:
|
||||
break;
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.exit_status = 1
|
||||
self.stderr.append("Execution error for: %s\n%s" % (str(e), self.dump()))
|
||||
break
|
||||
@ -505,7 +509,7 @@ def _worker_one_task(incoming,outgoing):
|
||||
outgoing.put(item)
|
||||
return False
|
||||
item.execute()
|
||||
#incoming.task_done() # PYTHON2.5 ONLY
|
||||
incoming.task_done()
|
||||
outgoing.put(item)
|
||||
return True
|
||||
|
||||
@ -533,9 +537,9 @@ class work_queue_t(object):
|
||||
|
||||
# worker threads can add stuff to the new_queue so we
|
||||
# use an MT-safe queue.
|
||||
self.new_queue = Queue.Queue(0)
|
||||
self.out_queue = Queue.Queue(0)
|
||||
self.back_queue = Queue.Queue(0)
|
||||
self.new_queue = queue.Queue(0)
|
||||
self.out_queue = queue.Queue(0)
|
||||
self.back_queue = queue.Queue(0)
|
||||
self.pending_commands = deque()
|
||||
|
||||
self.message_delay = 10
|
||||
@ -616,11 +620,6 @@ class work_queue_t(object):
|
||||
elapsed = get_elapsed_time(self.start_time, self.end_time)
|
||||
return elapsed
|
||||
|
||||
def __del__(self):
|
||||
if verbose(3):
|
||||
msgb("DEL WORK QUEUE")
|
||||
self._terminate()
|
||||
|
||||
def _terminate(self):
|
||||
"""Shut everything down. Kill the worker threads if any were
|
||||
being used. This is called when the work_queue_t is garbage
|
||||
@ -711,7 +710,7 @@ class work_queue_t(object):
|
||||
msgb("ADD CMD", str(type(command)))
|
||||
|
||||
if command:
|
||||
if isinstance(command,types.ListType):
|
||||
if isinstance(command,list):
|
||||
for c in command:
|
||||
if verbose(5):
|
||||
msgb("ADD CMD", str(type(c)))
|
||||
@ -823,8 +822,9 @@ class work_queue_t(object):
|
||||
self.running -= 1
|
||||
self.finished += 1
|
||||
self.running_commands.remove(cmd)
|
||||
self.back_queue.task_done()
|
||||
return cmd
|
||||
except Queue.Empty:
|
||||
except queue.Empty:
|
||||
return None
|
||||
except KeyboardInterrupt:
|
||||
msgb('INTERRUPT')
|
||||
@ -896,7 +896,7 @@ class work_queue_t(object):
|
||||
# some stuff did not build, force an error status return
|
||||
msgb("ERROR: DID NOT BUILD SOME STUFF", "\n\t".join(did_not_build))
|
||||
if self.dag:
|
||||
print self.dag.dump()
|
||||
print (self.dag.dump())
|
||||
self.end_time = get_time()
|
||||
self._cleanup()
|
||||
return False
|
||||
@ -963,7 +963,7 @@ class work_queue_t(object):
|
||||
for x in self.dag._enable_successors(c):
|
||||
self.add(x.creator)
|
||||
if c and (self.show_errors_only==False or c.failed()):
|
||||
print c.dump(show_output=show_output)
|
||||
print (c.dump(show_output=show_output))
|
||||
if self._done():
|
||||
break;
|
||||
return okay
|
||||
@ -1009,7 +1009,7 @@ class work_queue_t(object):
|
||||
for x in self.dag._enable_successors(c):
|
||||
self.add(x.creator)
|
||||
if self.show_errors_only==False or c.failed():
|
||||
print c.dump(show_output=show_output)
|
||||
print (c.dump(show_output=show_output))
|
||||
self._status()
|
||||
if self._done():
|
||||
break;
|
||||
|
@ -18,6 +18,7 @@
|
||||
#
|
||||
#END_LEGAL
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import find
|
||||
import mbuild
|
||||
@ -29,7 +30,7 @@ if 'clean' in env['targets']:
|
||||
mbuild.remove_tree(env['build_dir'])
|
||||
sys.exit(0)
|
||||
if not env.on_linux():
|
||||
print "This is a linux only test"
|
||||
print ("This is a linux only test" )
|
||||
sys.exit(0)
|
||||
|
||||
mbuild.cmkdir(env['build_dir'])
|
||||
|
@ -25,6 +25,10 @@ import mbuild
|
||||
|
||||
env = mbuild.env_t(init_verbose=0)
|
||||
env.parse_args()
|
||||
if not env.on_windows():
|
||||
print ("This is a windows only test" )
|
||||
sys.exit(0)
|
||||
|
||||
#mbuild.build_env.set_env_icl(env)
|
||||
mbuild.cmkdir(env['build_dir'])
|
||||
dag = mbuild.dag_t()
|
||||
|
@ -16,7 +16,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
#END_LEGAL
|
||||
|
||||
from __future__ import print_function
|
||||
import find
|
||||
import mbuild
|
||||
print mbuild.get_elapsed_time(10,0)
|
||||
print (mbuild.get_elapsed_time(10,0))
|
||||
|
@ -23,6 +23,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
import find
|
||||
import mbuild
|
||||
|
||||
env = mbuild.env_t()
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
for i in range(0,1000*1000*100):
|
||||
print i
|
||||
print(i)
|
||||
|
@ -18,23 +18,25 @@
|
||||
#
|
||||
#END_LEGAL
|
||||
|
||||
from __future__ import print_function
|
||||
import find
|
||||
import mbuild
|
||||
|
||||
def dump(lines):
|
||||
if lines:
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
print "::" + line
|
||||
print ("::" + line)
|
||||
else:
|
||||
print "(EMPTY)"
|
||||
print ("(EMPTY)")
|
||||
|
||||
env = mbuild.env_t(0)
|
||||
env.parse_args()
|
||||
|
||||
infile = file('stdin.py')
|
||||
retval,output,error = mbuild.run_command('cat', stdin=infile)
|
||||
print "EXIT STATUS ", str(retval)
|
||||
print "OUTPUT LINES "
|
||||
|
||||
retval,output,error = mbuild.run_command('cat', input_file_name='stdin.py')
|
||||
print ("EXIT STATUS ", str(retval))
|
||||
print ("OUTPUT LINES ")
|
||||
dump(output)
|
||||
print "ERROR LINES "
|
||||
print ("ERROR LINES ")
|
||||
dump(error)
|
||||
|
@ -17,6 +17,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
#END_LEGAL
|
||||
from __future__ import print_function
|
||||
import find
|
||||
import mbuild
|
||||
|
||||
@ -24,8 +25,8 @@ env = mbuild.env_t(0)
|
||||
env.parse_args()
|
||||
|
||||
retval, output, error = mbuild.run_command_timed('./spew', seconds=2)
|
||||
print "RETURN CODE ", retval
|
||||
print "OUTPUT LINES ", len(output)
|
||||
print "ERROR LINES ", len(error)
|
||||
print ("RETURN CODE ", retval)
|
||||
print ("OUTPUT LINES ", len(output))
|
||||
print ("ERROR LINES ", len(error))
|
||||
for l in error:
|
||||
print 'ERROR OUTPUT [{}]'.format(l.strip())
|
||||
print ('ERROR OUTPUT [{}]'.format(l.strip()))
|
||||
|
@ -17,6 +17,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
#END_LEGAL
|
||||
from __future__ import print_function
|
||||
import find
|
||||
import mbuild
|
||||
|
||||
@ -39,9 +40,9 @@ def some_python_fn(a,b):
|
||||
for j in range(0,n):
|
||||
for k in range(0,n):
|
||||
x += i*j*k
|
||||
return (0, [str(x)], [])
|
||||
return (0, [str(x)])
|
||||
|
||||
c3 = mbuild.command_t(some_python_fn, seconds=2, show_output=False)
|
||||
c3 = mbuild.command_t(some_python_fn, seconds=2, show_output=True)
|
||||
work_queue.add(c3)
|
||||
|
||||
|
||||
@ -53,8 +54,8 @@ if okay:
|
||||
else:
|
||||
mbuild.msgb('BUILD', 'failed')
|
||||
|
||||
print len(c2.output)
|
||||
print c2.output[0:10]
|
||||
print str(c2.stderr)
|
||||
print str(c3.output)
|
||||
print (len(c2.output))
|
||||
print (c2.output[0:10])
|
||||
print (str(c2.stderr))
|
||||
print (str(c3.output))
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user