Compare commits
7 Commits
Author | SHA1 | Date | |
---|---|---|---|
aa01236cae | |||
49b8810a76 | |||
7a39df54c0 | |||
09e483b0d6 | |||
14a2579173 | |||
36a8aaa92e | |||
a17eb445c2 |
@ -161,8 +161,7 @@ def obi_eval(compiled_expr, loc_env, line):
|
||||
return obi_eval_result
|
||||
|
||||
|
||||
def Filter_generator(options, tax_filter):
|
||||
#taxfilter = taxonomyFilterGenerator(options)
|
||||
def Filter_generator(options, tax_filter, i_view):
|
||||
|
||||
# Initialize conditions
|
||||
predicates = None
|
||||
@ -171,6 +170,9 @@ def Filter_generator(options, tax_filter):
|
||||
attributes = None
|
||||
if "attributes" in options and len(options["attributes"]) > 0:
|
||||
attributes = options["attributes"]
|
||||
for attribute in attributes:
|
||||
if attribute not in i_view:
|
||||
return None
|
||||
lmax = None
|
||||
if "lmax" in options:
|
||||
lmax = options["lmax"]
|
||||
@ -196,6 +198,8 @@ def Filter_generator(options, tax_filter):
|
||||
if "attribute_patterns" in options and len(options["attribute_patterns"]) > 0:
|
||||
for p in options["attribute_patterns"]:
|
||||
attribute, pattern = p.split(":", 1)
|
||||
if attribute not in i_view:
|
||||
return None
|
||||
attribute_patterns[tobytes(attribute)] = re.compile(tobytes(pattern))
|
||||
|
||||
def filter(line, loc_env):
|
||||
@ -324,21 +328,29 @@ def run(config):
|
||||
|
||||
# Apply filter
|
||||
tax_filter = Taxonomy_filter_generator(taxo, config["grep"])
|
||||
filter = Filter_generator(config["grep"], tax_filter)
|
||||
filter = Filter_generator(config["grep"], tax_filter, i_view)
|
||||
selection = Line_selection(i_view)
|
||||
for i in range(len(i_view)):
|
||||
PyErr_CheckSignals()
|
||||
pb(i)
|
||||
line = i_view[i]
|
||||
|
||||
loc_env = {"sequence": line, "line": line, "taxonomy": taxo, "obi_eval_result": False}
|
||||
|
||||
good = filter(line, loc_env)
|
||||
|
||||
if good :
|
||||
selection.append(i)
|
||||
|
||||
pb(i, force=True)
|
||||
|
||||
if filter is None and config["grep"]["invert_selection"]: # all sequences are selected: filter is None if no line will be selected because some columns don't exist
|
||||
for i in range(len(i_view)):
|
||||
PyErr_CheckSignals()
|
||||
pb(i)
|
||||
selection.append(i)
|
||||
|
||||
elif filter is not None : # filter is None if no line will be selected because some columns don't exist
|
||||
for i in range(len(i_view)):
|
||||
PyErr_CheckSignals()
|
||||
pb(i)
|
||||
line = i_view[i]
|
||||
|
||||
loc_env = {"sequence": line, "line": line, "taxonomy": taxo, "obi_eval_result": False}
|
||||
|
||||
good = filter(line, loc_env)
|
||||
|
||||
if good :
|
||||
selection.append(i)
|
||||
|
||||
pb(len(i_view), force=True)
|
||||
print("", file=sys.stderr)
|
||||
|
||||
# Create output view with the line selection
|
||||
|
@ -34,9 +34,10 @@ def run(config):
|
||||
if input[2] == DMS and not config['ls']['longformat']:
|
||||
dms = input[0]
|
||||
l = []
|
||||
for view in input[0]:
|
||||
l.append(tostr(view) + "\t(Date created: " + str(bytes2str_object(dms[view].comments["Date created"]))+")")
|
||||
dms[view].close()
|
||||
for viewname in input[0]:
|
||||
view = dms[viewname]
|
||||
l.append(tostr(viewname) + "\t(Date created: " + str(bytes2str_object(view.comments["Date created"]))+")")
|
||||
view.close()
|
||||
l.sort()
|
||||
for v in l:
|
||||
print(v)
|
||||
|
@ -506,7 +506,7 @@ cdef tuple annotate(sequences, infos, no_tags, verbose=False):
|
||||
sample=None
|
||||
|
||||
if sample is None:
|
||||
sequences[0][b'error']=b"No tags found"
|
||||
sequences[0][b'error']=b"No sample with that tag combination"
|
||||
return False, sequences[0]
|
||||
|
||||
sequences[0].update(sample)
|
||||
|
@ -591,10 +591,11 @@ def run(config):
|
||||
# Initialize the progress bar
|
||||
pb = ProgressBar(len(entries), config, seconde=5)
|
||||
|
||||
try:
|
||||
uniq_sequences(entries, o_view, pb, config, mergedKeys_list=config['uniq']['merge'], taxonomy=taxo, mergeIds=config['uniq']['mergeids'], categories=config['uniq']['categories'], max_elts=config['obi']['maxelts'])
|
||||
except Exception, e:
|
||||
raise RollbackException("obi uniq error, rollbacking view: "+str(e), o_view)
|
||||
if len(entries) > 0:
|
||||
try:
|
||||
uniq_sequences(entries, o_view, pb, config, mergedKeys_list=config['uniq']['merge'], taxonomy=taxo, mergeIds=config['uniq']['mergeids'], categories=config['uniq']['categories'], max_elts=config['obi']['maxelts'])
|
||||
except Exception, e:
|
||||
raise RollbackException("obi uniq error, rollbacking view: "+str(e), o_view)
|
||||
|
||||
print("", file=sys.stderr)
|
||||
|
||||
|
@ -227,7 +227,9 @@ cdef class DMS(OBIWrapper):
|
||||
cdef str s
|
||||
s=""
|
||||
for view_name in self.keys():
|
||||
s = s + repr(self.get_view(view_name)) + "\n"
|
||||
view = self.get_view(view_name)
|
||||
s = s + repr(view) + "\n"
|
||||
view.close()
|
||||
return s
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
major = 3
|
||||
minor = 0
|
||||
serial= '0b24'
|
||||
serial= '0b25'
|
||||
|
||||
version ="%d.%d.%s" % (major,minor,serial)
|
||||
|
@ -1350,6 +1350,8 @@ OBIDMS_column_p obi_create_column(OBIDMS_p dms,
|
||||
}
|
||||
strncpy(header->indexer_name, final_indexer_name, INDEXER_MAX_NAME);
|
||||
}
|
||||
else
|
||||
new_column->indexer = NULL;
|
||||
|
||||
// Fill the data with NA values
|
||||
obi_ini_to_NA_values(new_column, 0, nb_lines);
|
||||
@ -1558,6 +1560,8 @@ OBIDMS_column_p obi_open_column(OBIDMS_p dms,
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
else
|
||||
column->indexer = NULL;
|
||||
|
||||
if (close(column_file_descriptor) < 0)
|
||||
{
|
||||
@ -1694,7 +1698,7 @@ int obi_close_column(OBIDMS_column_p column)
|
||||
ret_val = -1;
|
||||
|
||||
// If it's a tuple column or the data type is OBI_STR, OBI_SEQ or OBI_QUAL, the associated indexer is closed
|
||||
if (((column->header)->tuples) || (((column->header)->returned_data_type == OBI_STR) || ((column->header)->returned_data_type == OBI_SEQ) || ((column->header)->returned_data_type == OBI_QUAL)))
|
||||
if ((column->indexer) != NULL)
|
||||
if (obi_close_indexer(column->indexer) < 0)
|
||||
ret_val = -1;
|
||||
|
||||
|
Reference in New Issue
Block a user