filter() -> map(filter( ; lambda(x) -> lambda x, map(lambda -> list comprehension

Change-Id: Iea61822578f708a1485c528ed008340e2d44a687
(cherry picked from commit 369e1c06f7c70f743f7c2d92f336b3fe2a5ef07e)
This commit is contained in:
Mark Charney 2017-06-10 20:03:28 -04:00
parent 8416dbeab2
commit a3a015df71
20 changed files with 81 additions and 82 deletions

View File

@ -981,7 +981,7 @@ class array_gen_t(object):
def emit_initialization(self,verbose=False): # private
"""Return a list of strings containing array initialization lines"""
lines = []
indices = map(lambda(x): x[3], self.ranges) # get the argnames
indices = [ x[3] for x in self.ranges] # get the argnames
missing_key = None
missed_one = True

View File

@ -139,8 +139,8 @@ def work(lines, xeddir = '.', gendir = 'obj'):
_read_constant_tables(lines,tables)
tables=filter(lambda(x): x.valid() , tables)
names=map(lambda(x): x.name , tables)
tables=list(filter(lambda x: x.valid() , tables))
names= [ x.name for x in tables ]
srcs = emit_convert_enum(['INVALID'] + names, xeddir, gendir)
src_file_name = 'xed-convert-table-init.c'
@ -158,7 +158,7 @@ def work(lines, xeddir = '.', gendir = 'obj'):
for t in tables:
l = t.emit_init()
l = map(lambda(x): x+'\n', l)
l = [ x+'\n' for x in l]
xfe.writelines(l)
fo = codegen.function_object_t('xed_init_convert_tables', 'void')
@ -187,7 +187,7 @@ def work(lines, xeddir = '.', gendir = 'obj'):
hdr.append(" unsigned int limit;\n")
hdr.append("} xed_convert_table_t;")
hdr.append("extern xed_convert_table_t xed_convert_table[XED_OPERAND_CONVERT_LAST];")
hfe.writelines(map(lambda(x): x+'\n', hdr))
hfe.writelines( [ x+'\n' for x in hdr] )
hfe.close()
srcs.append(hfe.full_file_name)

View File

@ -292,7 +292,7 @@ class enumer_t(object):
# but we must not put this value in the value-2-string
# table. It must be in a separate string2value table.
#print "\t\t [%s]" % ( str(vals[0:n]))
previous_values = map(lambda(x): x.name, vals[0:n])
previous_values = [ x.name for x in vals[0:n] ]
if self.debug:
print("\t\t [%s]" % ( str(previous_values)))
if v.value in previous_values:

View File

@ -27,7 +27,7 @@ import sys
from genutil import *
def _convert_to_list_of_string(x):
return map(lambda a: str(a), x)
return [ str(a) for a in x]
def _curly_list(x):
return ('{' + ','.join(x) + '},')
def _curly_string(x):
@ -53,7 +53,7 @@ class flag_set_t(object):
('fc1',1),
('fc2',1),
('fc3',1) ]
field_names = map(lambda x: x[0], field_pairs)
field_names = [ x[0] for x in field_pairs]
def __init__(self, very_technically_accurate=False):
for (f,w) in flag_set_t.field_pairs:
@ -291,7 +291,7 @@ class flags_info_t(object):
# flags_recs is a list of flag_rec_t's. Usually 0 or 1. Sometimes 3
if input_line != '':
lst = input_line.split(',')
self.flags_recs = map(lambda(x): flags_rec_t(x.strip()), lst)
self.flags_recs = [ flags_rec_t(x.strip()) for x in lst]
else:
self.flags_recs = []

View File

@ -58,14 +58,14 @@ def work(args): # main function
(insts,undoc) = check(args.chip, xeddb, chip_db)
ilist = list(set(map(lambda x: x.iclass, insts)))
ilist = list(set( [ x.iclass for x in insts ] ))
ilist.sort()
ulist = list(set(map(lambda x: x.iclass, undoc)))
ulist = list(set( [x.iclass for x in undoc] ))
ulist.sort()
if args.otherchip:
(insts2,undoc2) = check(args.otherchip, xeddb, chip_db)
ilist2 = list(set(map(lambda x: x.iclass, insts2)))
ulist2 = list(set(map(lambda x: x.iclass, undoc2)))
ilist2 = list(set( [ x.iclass for x in insts2] ))
ulist2 = list(set( [ x.iclass for x in undoc2] ))
s1 = set(ilist + ulist)
s2 = set(ilist2 + ulist2)
d12 = list(s1-s2)

View File

@ -532,7 +532,7 @@ class bits_list_t(object):
def just_bits(self):
""" return a string of just the bits"""
s = map(lambda(x): x.just_bits(), self.bits)
s = [ x.just_bits() for x in self.bits]
o = []
i = 0
for b in s:
@ -591,7 +591,7 @@ def read_dict_spec(fn):
die("Could not read file: " + fn)
lines = open(fn,'r').readlines()
lines = map(no_comments, lines)
lines = filter(blank_line, lines)
lines = list(filter(blank_line, lines))
for line in lines:
wrds = line.split()
key = wrds[0]
@ -615,7 +615,7 @@ def read_state_spec(fn):
die("Could not read file: " + fn)
lines = open(fn,'r').readlines()
lines = map(no_comments, lines)
lines = filter(blank_line, lines)
lines = list(filter(blank_line, lines))
for line in lines:
## remove comment lines
#line = no_comments(line)
@ -2050,8 +2050,8 @@ def partition_by_required_values(options, instructions, bitpos, token,
", ".join(map(str,all_values_for_this_od)))
for a in all_values_for_this_od:
all_values[a]=True
trimmed_vals = filter(lambda (x): x != operand_decider.requirement,
all_values_for_this_od)
trimmed_vals = list(filter(lambda x: x != operand_decider.requirement,
all_values_for_this_od))
if len(trimmed_vals) == 0:
die("We had a not-equals requirement but did" +
" not have any other values to bin against.")
@ -2896,8 +2896,8 @@ def decorate_operand(options,opnd,ii):
pass
elif opnd.type == 'imm':
if ii.prebindings and opnd.name in ii.prebindings:
opnd.bit_positions = \
map(lambda(x): x.pbit, ii.prebindings[opnd.name].bit_info_list)
opnd.bit_positions = [ x.pbit for x in
ii.prebindings[opnd.name].bit_info_list ]
else:
collect_immediate_operand_bit_positions(options,opnd, ii)
opnd.rightmost_bitpos = max(opnd.bit_positions)
@ -3380,7 +3380,7 @@ def emit_iclass_rep_ops(agi):
o.no_rep_indx = agi.iclasses_enum_order.index(o.no_rep_iclass)
# make a list of keys for the norep-to-whatever hash functions
no_rep_keys = uniqueify(map(lambda x:x.no_rep_indx, repobjs))
no_rep_keys = uniqueify( [x.no_rep_indx for x in repobjs])
no_rep_keys.sort()
msge("NOREP KEYS: {}".format(str(no_rep_keys)))
@ -3437,11 +3437,11 @@ def emit_iclass_enum_info(agi):
"""Emit major enumerations based on stuff we collected from the
graph."""
msge('emit_iclass_enum_info')
iclasses = map(lambda (s): s.upper(),agi.iclasses)
iclasses = [s.upper() for s in agi.iclasses]
add_invalid(iclasses)
# 2...9 # omitting NOP1
iclasses.extend(map(lambda(x): "NOP%s" % (str(x)), range(2,10)))
iclasses.extend( [ "NOP%s" % (str(x)) for x in range(2,10)])
iclasses = uniqueify(iclasses)
# sort each to make sure INVALID is first
@ -3495,20 +3495,20 @@ def emit_enum_info(agi):
graph."""
msge('emit_enum_info')
# make everything uppercase
nonterminals = map(lambda (s): s.upper(), agi.nonterminal_dict.keys())
operand_types = map(lambda (s): s.upper(),agi.operand_types.keys())
operand_widths = map(lambda (s): s.upper(),agi.operand_widths.keys())
nonterminals = [ s.upper() for s in agi.nonterminal_dict.keys()]
operand_types = [ s.upper() for s in agi.operand_types.keys()]
operand_widths = [ s.upper() for s in agi.operand_widths.keys()]
operand_names = map(lambda (s): s.upper(),
agi.operand_storage.get_operands().keys())
operand_names = [ s.upper() for s in
agi.operand_storage.get_operands().keys() ]
msge("OPERAND-NAMES " + " ".join(operand_names))
extensions = map(lambda (s): s.upper(),agi.extensions)
categories = map(lambda (s): s.upper(),agi.categories)
attributes = map(lambda (s): s.upper(),agi.attributes)
extensions = [ s.upper() for s in agi.extensions]
categories = [ s.upper() for s in agi.categories]
attributes = [ s.upper() for s in agi.attributes]
# remove the things with equals signs
attributes = filter(lambda (s): s.find('=') == -1 ,attributes)
attributes = list(filter(lambda s: s.find('=') == -1 ,attributes))
# add an invalid entry to each in the first spot if it is not
@ -3742,7 +3742,7 @@ def compute_iforms(options, gi, operand_storage_dict):
if viform():
for iform,iilist in iforms.iteritems():
msge("IFORM %s: %s" % (iform,
" ".join(map(lambda(x): x.iclass, iilist))))
" ".join([x.iclass for x in iilist] )))
for iclass,iformlist in ii_iforms.iteritems():
str_iforms = {}
@ -3924,7 +3924,7 @@ def make_attributes_equation(agi,ii):
if field_check(ii,'attributes'):
if ii.attributes:
trimmed_attributes = \
filter(lambda (s): s.find('=') == -1 ,ii.attributes)
list(filter(lambda s: s.find('=') == -1 ,ii.attributes))
if len(trimmed_attributes) > 0:
trimmed_attributes.sort()
@ -4112,7 +4112,7 @@ def code_gen_instruction(agi, options, ii, state_dict, fo,
# emit attributes
attributes_index = make_attributes_equation(agi,ii)
operand_names = map(lambda(x): x.name.upper(), ii.operands)
operand_names = [ x.name.upper() for x in ii.operands]
# THE NEW WAY - DATA INITIALIZATION -- see include/private/xed-inst-defs.h
cpl = '3'
@ -4340,7 +4340,7 @@ def compress_iform_strings(values):
len(bases),
len(operand_sigs)))
if len(h) != (max(map(lambda x: int(x), h.keys()))+1):
if len(h) != (max( [ int(x) for x in h.keys()] )+1):
print("PROBLEM IN h LENGTH")
# make an numerically indexed version of the bases table
bi = {}
@ -4468,7 +4468,7 @@ def collect_and_emit_iforms(agi,options):
#msge("NTUPLES %s" % (str(ntuples)))
# rip off first two fields of vtuples
vtuples = map(lambda(x): x[2:], ntuples)
vtuples = [ x[2:] for x in ntuples]
#for t in vtuples:
# msge("TUPLE " + str(t))
@ -4476,7 +4476,7 @@ def collect_and_emit_iforms(agi,options):
# compress_iform_strings(vtuples)
# rip off first two fields of vtuples
first_last_tuples = map(lambda(x): x[2:], first_last_tuples)
first_last_tuples = [ x[2:] for x in first_last_tuples]
generate_iform_first_last_enum(agi,options,first_last_tuples)
#emit imax in global iclass order for data-initialization!
@ -4609,8 +4609,8 @@ def merge_nodes(options,node):
merging = True
while merging:
all_match = True
decider_bits = map(lambda(k): node.next[k].decider_bits ,
node.next.keys())
decider_bits = [ node.next[k].decider_bits for k in
node.next.keys() ]
if not all_the_same(decider_bits):
if vmerge():
msge("Not merging because unequal numbers of decider" +
@ -5309,7 +5309,7 @@ class all_generator_info_t(object):
# remember the c & h file names
self.add_file_name(m.src_full_file_name)
self.add_file_name(m.hdr_full_file_name,header=True)
all_values = map(lambda x: x.name ,m.tuples)
all_values = [ x.name for x in m.tuples ]
return all_values
@ -5711,7 +5711,7 @@ def call_chipmodel(agi):
def read_cpuid_mappings(fn):
lines = open(fn,'r').readlines()
lines = map(no_comments, lines)
lines = filter(blank_line, lines)
lines = list(filter(blank_line, lines))
d = {} # isa-set to list of cpuid records
for line in lines:
wrds = line.split(':')
@ -5960,7 +5960,7 @@ def gen_regs(options,agi):
# remove comments and blank lines
# regs_list is a list of reg_info_t's
regs_list = refine_regs.refine_regs_input(lines)
regs = map(lambda x: x.name, regs_list)
regs = [ x.name for x in regs_list]
agi.all_enums['xed_reg_enum_t'] = regs
(cfn, hfn) = emit_regs_enum(options, regs_list)
@ -6031,7 +6031,7 @@ def refine_widths_input(lines):
return widths_list
def emit_widths_enum(options, widths_list):
just_width_names = map(lambda(x) : x.name, widths_list)
just_width_names = [ x.name for x in widths_list]
width_enum = enum_txt_writer.enum_info_t(just_width_names,
options.xeddir, options.gendir,
'xed-operand-width',

View File

@ -121,7 +121,7 @@ def get_memory_usage():
/proc filesystems."""
try:
lines = file('/proc/%s/status' % os.getpid()).readlines()
pairs = map(lambda(x): x.split(':'), lines)
pairs = [ x.split(':') for x in lines]
dct = dict(pairs)
return (dct['VmSize'].strip(), dct['VmRSS'].strip(), dct['VmData'].strip())
except:
@ -405,8 +405,8 @@ def generate_lookup_function_basis(gi,state_space):
argnames[bt.token][bt.requirement]=True
elif bt.test == 'ne':
all_values_for_this_od = state_space[bt.token]
trimmed_vals = filter(lambda (x): x != bt.requirement,
all_values_for_this_od)
trimmed_vals = list(filter(lambda x: x != bt.requirement,
all_values_for_this_od))
for tv in trimmed_vals:
argnames[bt.token][tv]=True
else:

View File

@ -33,7 +33,7 @@ class hlist_t(object):
h = h << 1
return h
def __str__(self):
s = ",".join(map(lambda(x):str(x),self.lst))
s = ",".join( [ str(x) for x in self.lst])
return s

View File

@ -324,7 +324,7 @@ def dump_vv_map_lookup(agi,
maps = ild_info.get_maps(is_3dnow)
vv_num = map(lambda x: int(x),vv_lu.keys())
vv_num = [ int(x) for x in vv_lu.keys()]
vv_index = max(vv_num) + 1
map_num = len(maps)
arr_name = 'xed3_phash_lu'
@ -395,7 +395,7 @@ def _gen_bymode_fun_dict(info_list, nt_dict, is_conflict_fun,
opcode = info_list[0].opcode
for mode in ildutil.mode_space:
#get info objects with the same modrm.reg bits
infos = filter(lambda(info): mode in info.mode, info_list)
infos = list(filter(lambda info: mode in info.mode, info_list))
if len(infos) == 0:
ildutil.ild_warn('BY MODE resolving: No infos for mode' +
'%s opcode %s map %s' % (mode, opcode, insn_map))
@ -424,7 +424,7 @@ def _gen_byreg_fun_dict(info_list, nt_dict, is_conflict_fun,
opcode = info_list[0].opcode
for reg in range(0,8):
#get info objects with the same modrm.reg bits
infos = filter(lambda(info): info.ext_opcode==reg, info_list)
infos = list(filter(lambda info: info.ext_opcode==reg, info_list))
if len(infos) == 0:
ildutil.ild_warn('BYREG resolving: No infos for reg' +
'%s opcode %s map %s' % (reg, opcode, insn_map))

View File

@ -347,8 +347,8 @@ def work(agi, united_lookup, disp_nts, brdisp_nts, ild_gendir,
all_nts.append(ntseq[0])
#get only those NTs that actually appear in PATTERNs
disp_nts = filter(lambda(nt): nt in all_nts, disp_nts)
brdisp_nts = filter(lambda(nt): nt in all_nts, brdisp_nts)
disp_nts = list(filter(lambda nt: nt in all_nts, disp_nts))
brdisp_nts = list(filter(lambda nt: nt in all_nts, brdisp_nts))
debug.write('DISP SEQS: %s\n' % all_disp_seq)

View File

@ -106,7 +106,7 @@ def get_imm_binding_nts(agi):
nt_names = ild_nt.get_setting_nts(agi, _imm_token)
#filter ONE nt
#FIXME: remove ONE nt from grammar
return filter(lambda(x): x!='ONE', nt_names)
return list(filter(lambda x: x!='ONE', nt_names))
def get_target_opname():
@ -299,7 +299,7 @@ def gen_l1_functions_and_lookup(agi, united_lookup, imm_dict):
def _filter_uimm1_nt(imm_nt_names):
"""Filter UIMM8_1 NT from list"""
return filter(lambda(x): x!=_uimm1_nt, imm_nt_names)
return list(filter(lambda x: x!=_uimm1_nt, imm_nt_names))
def work(agi, united_lookup, imm_nts, ild_gendir, eosz_dict,

View File

@ -165,7 +165,7 @@ def add_ild_info(info_set, pattern):
def get_min_prio_list(info_list):
if len(info_list) == 0:
return []
min_prio = min(info_list, key=lambda(x): x.priority).priority
min_prio = min(info_list, key=lambda x: x.priority).priority
min_list = []
for info in info_list:
if info.priority == min_prio:

View File

@ -494,8 +494,8 @@ def _generate_lookup_function_indices(ii,state_space,argnames):
indices[bt.token] = bt.requirement
elif bt.test == 'ne':
all_values_for_this_od = state_space[bt.token]
trimmed_vals = filter(lambda (x): x != bt.requirement,
all_values_for_this_od)
trimmed_vals = list(filter(lambda x: x != bt.requirement,
all_values_for_this_od))
#Add the list of values; We flaten it later
indices[bt.token] = trimmed_vals
else:
@ -573,8 +573,8 @@ def add_op_deciders(ipattern, state_space, argnames):
argnames[bt.token][bt.requirement]=True
elif bt.test == 'ne':
all_values_for_this_od = state_space[bt.token]
trimmed_vals = filter(lambda (x): x != bt.requirement,
all_values_for_this_od)
trimmed_vals = list(filter(lambda x: x != bt.requirement,
all_values_for_this_od))
for tv in trimmed_vals:
argnames[bt.token][tv]=True
else:

View File

@ -491,7 +491,7 @@ def _find_l1_phash_fks(cdict):
def _find_candidate_lengths_mul(lst):
"""Return integer lengths n, n*1.1, n*1.2, ... n*1.9, n*2"""
n = len(lst)
r = map(lambda x: int(math.ceil((1 + x/10.0)*n)), range(0,11))
r = [ int(math.ceil((1 + x/10.0)*n)) for x in range(0,11)]
# avoid duplicates
s = set()
for a in r:

View File

@ -608,13 +608,13 @@ class instruction_codegen_t():
iform.fbs.sort(cmp=sort_field_bindings)
# create a list of int values
fbs_values = map(lambda x: x.int_value, iform.fbs)
fbs_values = [ x.int_value for x in iform.fbs]
#find the start index of this list of values in the general list
#and update the general list as needed
iform.fb_index = self._find_fb_occurrence(fbs_list, fbs_values)
fbs_list = map(lambda x: str(x), fbs_list)
fbs_list = [ str(x) for x in fbs_list]
return fbs_list
@ -692,7 +692,7 @@ class instruction_codegen_t():
print("NOM_OPCODE: %d" % iform.nominal_opcode)
print("MAP: %s" % iform.map)
fbs_values = map(lambda x: x.int_value, iform.fbs)
fbs_values = [ x.int_value for x in iform.fbs]
print("FB values: %s" % fbs_values)
print("\n\n")
print("-"*20)

View File

@ -282,8 +282,8 @@ class nt_function_gen_t(object):
#we do not need to the PSEUDO regs since
#they are not in use by the encoder
tmp = filter(lambda x: not x.in_comment('PSEUDO'), reg_list_enumer_vals)
regs = map(lambda x: x.name, tmp)
tmp = list(filter(lambda x: not x.in_comment('PSEUDO'), reg_list_enumer_vals))
regs = [ x.name for x in tmp]
#put XED_REG_INVLAID in the beginning
@ -292,7 +292,7 @@ class nt_function_gen_t(object):
ordered_regs.extend(regs)
#add XEG_REG_ prefix
full_reg_name = map(lambda x: 'XED_REG_' + x, ordered_regs)
full_reg_name = [ 'XED_REG_' + x for x in ordered_regs]
self._check_duplications(full_reg_name)
reg2int = {}

View File

@ -388,7 +388,7 @@ class operands_storage_t(object):
self.bins = self._compress_operands()
operands = self.operand_fields.values()
un_compressed = filter(lambda x: x.compressed == False, operands )
un_compressed = list(filter(lambda x: x.compressed == False, operands ))
un_compressed.sort(cmp=cmp_operands)
# first emit all the operands that does not use bit fields

View File

@ -580,8 +580,8 @@ class conditions_t(object):
which ones are captures"""
if vcapture():
msgb("CAPTURE COLLECTION USING:\n\t%s\n" % "\n\t".join(map(str,clist)))
full_captures = filter(lambda(x): x.is_bit_capture(), clist)
captures = map(lambda(x): x.capture_info(), full_captures)
full_captures = list(filter(lambda x: x.is_bit_capture(), clist))
captures = [ x.capture_info() for x in full_captures]
return captures
def compute_field_capture_list(self):
@ -1216,8 +1216,8 @@ class iform_t(object):
modifying to input action_list
'''
emit_actions = filter(lambda x: x.type == 'emit', action_list)
fb_actions = filter(lambda x: x.type == 'FB', action_list)
emit_actions = list(filter(lambda x: x.type == 'emit', action_list))
fb_actions = list(filter(lambda x: x.type == 'FB', action_list))
#iterate to find overlapping actions
action_to_remove = []
@ -1799,8 +1799,7 @@ class encoder_configuration_t(object):
elif rule.actions[0].is_error():
nt.otherwise = [actions.gen_return_action('0')]
else:
nt.otherwise = map(lambda(x): actions.action_t(x),
actns)
nt.otherwise = [ actions.action_t(x) for x in actns]
# in case we have valid action for the otherwise
# rule we should finish it with returnning 1
# which is "not an error"
@ -2973,12 +2972,12 @@ class encoder_configuration_t(object):
fe.add_header(headers)
fe.start()
f_names = map(lambda x: x.function_name, self.fb_ptrs_fo_list)
f_names = [ x.function_name for x in self.fb_ptrs_fo_list]
self._emit_functions_lu_table(fe, 'xed_ptrn_func_ptr_t',
f_names, 'xed_encode_fb_lu_table',
'XED_ENCODE_MAX_FB_PATTERNS')
fe.write('\n\n\n')
f_names = map(lambda x: x.function_name, self.emit_ptrs_fo_list)
f_names = [ x.function_name for x in self.emit_ptrs_fo_list]
self._emit_functions_lu_table(fe, 'xed_ptrn_func_ptr_t',
f_names, 'xed_encode_emit_lu_table',
'XED_ENCODE_MAX_EMIT_PATTERNS')
@ -2990,7 +2989,7 @@ class encoder_configuration_t(object):
'XED_ENCODE_FB_VALUES_TABLE_SIZE',20)
fe.write('\n\n\n')
f_names = map(lambda x: x.function_name, self.group_fos)
f_names = [ x.function_name for x in self.group_fos]
self._emit_functions_lu_table(fe,'xed_encode_function_pointer_t',
f_names, 'xed_encode_groups',
'XED_ENC_GROUPS')

View File

@ -161,11 +161,11 @@ def _reg_cmp(a,b):
def rearrange_regs(regs_list):
"""Return a list of enumer.enumer_values_t objects to be passed to
enum_txt_writer"""
groups = uniqueify(map(lambda(x) : x.rtype, regs_list))
groups = uniqueify( [ x.rtype for x in regs_list])
msgb("RGROUPS", str(groups))
enumvals = []
for g in groups:
k = filter(lambda(x): x.rtype == g, regs_list)
k = list(filter(lambda x: x.rtype == g, regs_list))
k.sort(cmp=_reg_cmp)
first = '%s_FIRST' % (g)
last = '%s_LAST' % (g)
@ -180,8 +180,8 @@ def rearrange_regs(regs_list):
# everything in the middle
if len(k) > 1:
enumvals.extend(
map(lambda(x) : enumer.enumer_value_t(x.name,
display_str=x.display_str), k[1:]))
[ enumer.enumer_value_t(x.name,
display_str=x.display_str) for x in k[1:] ] )
#last
enumvals.append(enumer.enumer_value_t(last,
value=k[-1].name,

View File

@ -722,7 +722,7 @@ def _gen_dynamic_part1_fo(agi):
nt_names = _get_nt_names_from_ii(rule)
#filter NTs that we want to skip
nt_names = filter(lambda(x): not _skip_nt(x), nt_names)
nt_names = list(filter(lambda x: not _skip_nt(x), nt_names))
fo = _gen_capture_chain_fo(nt_names, fname=_dynamic_part1_fn)
return fo