Compare commits

...

18 Commits

Author SHA1 Message Date
3db93ee9c4 Fixed stdout output 2024-01-12 16:13:30 +13:00
4844b20770 Merge branch 'master' of https://git.metabarcoding.org/obitools/obitools3 2024-01-12 15:36:58 +13:00
0d98a4f717 Switch to version 3.0.1b26 2024-01-10 16:40:08 +13:00
837ff1a1ba Taxonomy: fixed an issue related to StopIteration behaviour in new
versions of python
2024-01-10 15:53:15 +13:00
aeed42456a export: columns are now in alphabetical order when exporting to tab
format
2024-01-10 15:52:28 +13:00
fb6e27bb5d Revert "Testing RAM instead of mmap for blob alignment"
This reverts commit 6d94cdcc0d
2023-11-29 04:22:31 +01:00
6d94cdcc0d Testing RAM instead of mmap for blob alignment 2023-11-29 16:19:34 +13:00
8a1f844645 obi import: fixed bug caused by new behaviour of StopIteration
exceptions in Python>=3.7
2023-09-21 17:47:40 +12:00
791ccfb92e Fixed include bug in previous version and switch to version 3.0.1b24 2023-05-15 11:35:42 +12:00
1c9a906f5b ngsfilter and ecopcr: now check for primers too long for apat library to
handle (31bp max) and switch to version 3.0.1b23
2023-05-12 17:04:21 +12:00
55b2679b23 New command obi rm and switch to version 3.0.1b22 2023-05-08 17:48:50 +12:00
9ea2124adc Switch to version 3.0.1b21 2023-02-13 11:00:01 +13:00
2130a949c7 New command: obi taxonomy to add local taxa (closes #64) 2023-02-13 10:59:20 +13:00
eeb93afa7d import: now automatically renames scientific_name tag to
`SCIENTIFIC_NAME`, and suggests using `--input-na-string` when a
sequence import fails
2023-02-13 10:40:38 +13:00
755ce179ad head: added output format options 2023-02-13 10:31:26 +13:00
7e492578b3 Switch to version 3.0.1b20 2022-09-21 11:33:03 +12:00
02e9df3ad1 alignpairedend and ngsfilter: ids of original sequences are now kept 2022-09-21 11:32:19 +12:00
55ada80500 import: made ngsfilter file parsing more resilient and switching to
version 3.0.1b19
2022-07-15 16:02:21 +12:00
19 changed files with 478 additions and 131 deletions

View File

@ -234,6 +234,10 @@ def run(config):
consensus = o_view[i]
if two_views:
consensus[b"R1_parent"] = forward[i].id
consensus[b"R2_parent"] = reverse[i].id
if not two_views:
seqF = entries[i]
else:

View File

@ -8,6 +8,7 @@ from obitools3.apps.optiongroups import addMinimalInputOption, addMinimalOutputO
from obitools3.dms.view import RollbackException
from obitools3.apps.config import logger
from obitools3.utils cimport str2bytes
from obitools3.apps.optiongroups import addExportOutputOption
import time
import sys
@ -23,6 +24,7 @@ def addOptions(parser):
addMinimalInputOption(parser)
addMinimalOutputOption(parser)
addExportOutputOption(parser)
addNoProgressBarOption(parser)
group=parser.add_argument_group('obi head specific options')

View File

@ -406,6 +406,8 @@ def run(config):
tag = TAXID_COLUMN
if tag == b"count":
tag = COUNT_COLUMN
if tag == b"scientific_name":
tag = SCIENTIFIC_NAME_COLUMN
if tag[:7] == b"merged_":
tag = MERGED_PREFIX+tag[7:]
@ -516,7 +518,7 @@ def run(config):
dcols[tag][0][i] = value
except Exception as e:
print("\nCould not import sequence:", repr(entry), "(error raised:", e, ")")
print("\nCould not import sequence:\n", repr(entry), "\nError raised:", e, "\n/!\ Check if '--input-na-string' option needs to be set")
if 'skiperror' in config['obi'] and not config['obi']['skiperror']:
raise e
else:

View File

@ -24,6 +24,9 @@ from cpython.exc cimport PyErr_CheckSignals
from io import BufferedWriter
MAX_PAT_LEN = 31
__title__="Assign sequence records to the corresponding experiment/sample based on DNA tags and primers"
@ -84,6 +87,8 @@ class Primer:
@type direct:
'''
assert len(sequence) <= MAX_PAT_LEN, "Primer %s is too long, 31 bp max" % sequence
assert sequence not in Primer.collection \
or Primer.collection[sequence]==taglength, \
"Primer %s must always be used with tags of the same length" % sequence
@ -265,6 +270,10 @@ cdef tuple annotate(sequences, infos, no_tags, verbose=False):
not_aligned = len(sequences) > 1
sequences[0] = sequences[0].clone()
if not_aligned:
sequences[0][b"R1_parent"] = sequences[0].id
sequences[0][b"R2_parent"] = sequences[1].id
if not_aligned:
sequences[1] = sequences[1].clone()
sequences[0][REVERSE_SEQUENCE_COLUMN] = sequences[1].seq # used by alignpairedend tool

View File

@ -5,7 +5,9 @@ from obitools3.apps.config import logger
from obitools3.dms import DMS
from obitools3.apps.optiongroups import addMinimalInputOption
from obitools3.dms.view.view cimport View
from obitools3.utils cimport tostr
import os
import shutil
__title__="Delete a view"
@ -30,15 +32,56 @@ def run(config):
else:
raise NotImplementedError()
dms = input[0]
# Get the path to the view file to remove
path = input[0].full_path # dms path
path+=b"/VIEWS/"
path+=view.name
path+=b".obiview"
path = dms.full_path # dms path
view_path=path+b"/VIEWS/"
view_path+=view.name
view_path+=b".obiview"
to_remove = {}
# For each column:
for col_alias in view.keys():
col = view[col_alias]
col_name = col.original_name
col_version = col.version
col_type = col.data_type
col_ref = (col_name, col_version)
# build file name and AVL file names
col_file_name = f"{tostr(path)}/{tostr(col.original_name)}.obicol/{tostr(col.original_name)}@{col.version}.odc"
if col_type in [b'OBI_CHAR', b'OBI_QUAL', b'OBI_STR', b'OBI_SEQ']:
avl_file_name = f"{tostr(path)}/OBIBLOB_INDEXERS/{tostr(col.original_name)}_{col.version}_indexer"
else:
avl_file_name = None
to_remove[col_ref] = [col_file_name, avl_file_name]
# For each view:
do_not_remove = []
for vn in dms:
v = dms[vn]
# ignore the one being deleted
if v.name != view.name:
# check that none of the column is referenced, if referenced, remove from list to remove
cols = [(v[c].original_name, v[c].version) for c in v.keys()]
for col_ref in to_remove:
if col_ref in cols:
do_not_remove.append(col_ref)
for nr in do_not_remove:
to_remove.pop(nr)
# Close the view and the DMS
view.close()
input[0].close(force=True)
# Rm
os.remove(path)
#print(to_remove)
# rm AFTER view and DMS close
os.remove(view_path)
for col in to_remove:
os.remove(to_remove[col][0])
if to_remove[col][1] is not None:
shutil.rmtree(to_remove[col][1])

View File

@ -0,0 +1,230 @@
#cython: language_level=3
from obitools3.apps.progress cimport ProgressBar # @UnresolvedImport
from obitools3.dms import DMS
from obitools3.dms.view.view cimport View, Line_selection
from obitools3.uri.decode import open_uri
from obitools3.apps.optiongroups import addMinimalInputOption, addTaxonomyOption, addMinimalOutputOption, addNoProgressBarOption
from obitools3.dms.view import RollbackException
from obitools3.dms.column.column cimport Column
from functools import reduce
from obitools3.apps.config import logger
from obitools3.utils cimport tobytes, str2bytes, tostr
from io import BufferedWriter
from obitools3.dms.capi.obiview cimport NUC_SEQUENCE_COLUMN, \
ID_COLUMN, \
DEFINITION_COLUMN, \
QUALITY_COLUMN, \
COUNT_COLUMN, \
TAXID_COLUMN
from obitools3.dms.capi.obitypes cimport OBI_INT
from obitools3.dms.capi.obitaxonomy cimport MIN_LOCAL_TAXID
import time
import math
import sys
from cpython.exc cimport PyErr_CheckSignals
__title__="Add taxa with a new generated taxid to an NCBI taxonomy database"
def addOptions(parser):
addMinimalInputOption(parser)
addTaxonomyOption(parser)
addMinimalOutputOption(parser)
addNoProgressBarOption(parser)
group=parser.add_argument_group('obi taxonomy specific options')
group.add_argument('-n', '--taxon-name-tag',
action="store",
dest="taxonomy:taxon_name_tag",
metavar="<SCIENTIFIC_NAME_TAG>",
default=b"SCIENTIFIC_NAME",
help="Name of the tag giving the scientific name of the taxon "
"(default: 'SCIENTIFIC_NAME').")
# group.add_argument('-g', '--try-genus-match',
# action="store_true", dest="taxonomy:try_genus_match",
# default=False,
# help="Try matching the first word of <SCIENTIFIC_NAME_TAG> when can't find corresponding taxid for a taxon. "
# "If there is a match it is added in the 'parent_taxid' tag. (Can be used by 'obi taxonomy' to add the taxon under that taxid).")
group.add_argument('-a', '--restricting-ancestor',
action="store",
dest="taxonomy:restricting_ancestor",
metavar="<RESTRICTING_ANCESTOR>",
default=None,
help="Enables to restrict the addition of taxids under an ancestor specified by its taxid.")
group.add_argument('-t', '--taxid-tag',
action="store",
dest="taxonomy:taxid_tag",
metavar="<TAXID_TAG>",
default=b"TAXID",
help="Name of the tag to store the new taxid "
"(default: 'TAXID').")
group.add_argument('-l', '--log-file',
action="store",
dest="taxonomy:log_file",
metavar="<LOG_FILE>",
default='',
help="Path to a log file to write informations about added taxids.")
def run(config):
DMS.obi_atexit()
logger("info", "obi taxonomy")
# Open the input
input = open_uri(config['obi']['inputURI'])
if input is None:
raise Exception("Could not read input view")
i_dms = input[0]
i_view = input[1]
i_view_name = input[1].name
# Open the output: only the DMS, as the output view is going to be created by cloning the input view
# (could eventually be done via an open_uri() argument)
output = open_uri(config['obi']['outputURI'],
input=False,
dms_only=True)
if output is None:
raise Exception("Could not create output view")
o_dms = output[0]
output_0 = output[0]
o_view_name = output[1]
# stdout output: create temporary view
if type(output_0)==BufferedWriter:
o_dms = i_dms
i=0
o_view_name = b"temp"
while o_view_name in i_dms: # Making sure view name is unique in output DMS
o_view_name = o_view_name+b"_"+str2bytes(str(i))
i+=1
imported_view_name = o_view_name
# If the input and output DMS are not the same, import the input view in the output DMS before cloning it to modify it
# (could be the other way around: clone and modify in the input DMS then import the new view in the output DMS)
if i_dms != o_dms:
imported_view_name = i_view_name
i=0
while imported_view_name in o_dms: # Making sure view name is unique in output DMS
imported_view_name = i_view_name+b"_"+str2bytes(str(i))
i+=1
View.import_view(i_dms.full_path[:-7], o_dms.full_path[:-7], i_view_name, imported_view_name)
i_view = o_dms[imported_view_name]
# Clone output view from input view
o_view = i_view.clone(o_view_name)
if o_view is None:
raise Exception("Couldn't create output view")
i_view.close()
# Open taxonomy
taxo_uri = open_uri(config['obi']['taxoURI'])
if taxo_uri is None or taxo_uri[2] == bytes:
raise Exception("Couldn't open taxonomy")
taxo = taxo_uri[1]
# Initialize the progress bar
if config['obi']['noprogressbar'] == False:
pb = ProgressBar(len(o_view), config)
else:
pb = None
try:
if config['taxonomy']['log_file']:
logfile = open(config['taxonomy']['log_file'], 'w')
else:
logfile = sys.stdout
if 'restricting_ancestor' in config['taxonomy']:
res_anc = int(config['taxonomy']['restricting_ancestor'])
else:
res_anc = None
taxid_column_name = config['taxonomy']['taxid_tag']
parent_taxid_column_name = "PARENT_TAXID" # TODO macro
taxon_name_column_name = config['taxonomy']['taxon_name_tag']
taxid_column = Column.new_column(o_view, taxid_column_name, OBI_INT)
if parent_taxid_column_name in o_view:
parent_taxid_column = o_view[parent_taxid_column_name]
else:
parent_taxid_column = None
#parent_taxid_column = Column.new_column(o_view, parent_taxid_column_name, OBI_INT)
taxon_name_column = o_view[taxon_name_column_name]
for i in range(len(o_view)):
PyErr_CheckSignals()
#if pb is not None:
# pb(i)
taxon_name = taxon_name_column[i]
taxon = taxo.get_taxon_by_name(taxon_name, res_anc)
if taxon is not None:
taxid_column[i] = taxon.taxid
if logfile:
print(f"Found taxon '{tostr(taxon_name)}' already existing with taxid {taxid_column[i]}", file=logfile)
else: # try finding genus or other parent taxon from the first word
#print(i, o_view[i].id)
if parent_taxid_column is not None and parent_taxid_column[i] is not None:
taxid_column[i] = taxo.add_taxon(taxon_name, 'species', parent_taxid_column[i])
if logfile:
print(f"Adding taxon '{tostr(taxon_name)}' under provided parent {parent_taxid_column[i]} with taxid {taxid_column[i]}", file=logfile)
else:
taxon_name_sp = taxon_name.split(b" ")
taxon = taxo.get_taxon_by_name(taxon_name_sp[0], res_anc)
if taxon is not None:
parent_taxid_column[i] = taxon.taxid
taxid_column[i] = taxo.add_taxon(taxon_name, 'species', taxon.taxid)
if logfile:
print(f"Adding taxon '{tostr(taxon_name)}' under '{tostr(taxon.name)}' ({taxon.taxid}) with taxid {taxid_column[i]}", file=logfile)
else:
taxid_column[i] = taxo.add_taxon(taxon_name, 'species', res_anc)
if logfile:
print(f"Adding taxon '{tostr(taxon_name)}' under provided restricting ancestor {res_anc} with taxid {taxid_column[i]}", file=logfile)
taxo.write(taxo.name, update=True)
except Exception, e:
raise RollbackException("obi taxonomy error, rollbacking view: "+str(e), o_view)
#if pb is not None:
# pb(i, force=True)
# print("", file=sys.stderr)
#logger("info", "\nTaxa already in the taxonomy: "+str(found_count)+"/"+str(len(o_view))+" ("+str(round(found_count*100.0/len(o_view), 2))+"%)")
#logger("info", "\nParent taxids found: "+str(parent_found_count)+"/"+str(len(o_view))+" ("+str(round(parent_found_count*100.0/len(o_view), 2))+"%)")
#logger("info", "\nTaxids not found: "+str(not_found_count)+"/"+str(len(o_view))+" ("+str(round(not_found_count*100.0/len(o_view), 2))+"%)")
# Save command config in View and DMS comments
command_line = " ".join(sys.argv[1:])
input_dms_name=[input[0].name]
input_view_name=[i_view_name]
if 'taxoURI' in config['obi'] and config['obi']['taxoURI'] is not None:
input_dms_name.append(config['obi']['taxoURI'].split("/")[-3])
input_view_name.append("taxonomy/"+config['obi']['taxoURI'].split("/")[-1])
o_view.write_config(config, "taxonomy", command_line, input_dms_name=input_dms_name, input_view_name=input_view_name)
o_dms.record_command_line(command_line)
#print("\n\nOutput view:\n````````````", file=sys.stderr)
#print(repr(o_view), file=sys.stderr)
# stdout output: write to buffer
if type(output_0)==BufferedWriter:
logger("info", "Printing to output...")
o_view.print_to_output(output_0, noprogressbar=config['obi']['noprogressbar'])
o_view.close()
# If the input and the output DMS are different or if stdout output, delete the temporary imported view used to create the final view
if i_dms != o_dms or type(output_0)==BufferedWriter:
View.delete_view(o_dms, imported_view_name)
o_dms.close(force=True)
i_dms.close(force=True)
logger("info", "Done.")

View File

@ -58,7 +58,7 @@ cdef extern from "obidms_taxonomy.h" nogil:
OBIDMS_taxonomy_p obi_read_taxdump(const_char_p taxdump)
int obi_write_taxonomy(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const_char_p tax_name)
int obi_write_taxonomy(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const_char_p tax_name, bint update)
int obi_close_taxonomy(OBIDMS_taxonomy_p taxonomy)

View File

@ -19,8 +19,8 @@ cdef class Taxonomy(OBIWrapper) :
cpdef Taxon get_taxon_by_idx(self, int idx)
cpdef Taxon get_taxon_by_taxid(self, int taxid)
cpdef Taxon get_taxon_by_name(self, object taxon_name, object restricting_taxid=*)
cpdef write(self, object prefix)
cpdef int add_taxon(self, str name, str rank_name, int parent_taxid, int min_taxid=*)
cpdef write(self, object prefix, bint update=*)
cpdef int add_taxon(self, object name, object rank_name, int parent_taxid, int min_taxid=*)
cpdef object get_species(self, int taxid)
cpdef object get_genus(self, int taxid)
cpdef object get_family(self, int taxid)

View File

@ -174,6 +174,7 @@ cdef class Taxonomy(OBIWrapper) :
cpdef Taxon get_taxon_by_name(self, object taxon_name, object restricting_taxid=None):
#print(taxon_name)
taxon = self._name_dict.get(tobytes(taxon_name), None)
if not taxon:
return None
@ -282,12 +283,12 @@ cdef class Taxonomy(OBIWrapper) :
yield Taxon(taxon_capsule, self)
cpdef write(self, object prefix) :
if obi_write_taxonomy(self._dms.pointer(), self.pointer(), tobytes(prefix)) < 0 :
cpdef write(self, object prefix, bint update=False) :
if obi_write_taxonomy(self._dms.pointer(), self.pointer(), tobytes(prefix), update) < 0 :
raise Exception("Error writing the taxonomy to binary files")
cpdef int add_taxon(self, str name, str rank_name, int parent_taxid, int min_taxid=10000000) :
cpdef int add_taxon(self, object name, object rank_name, int parent_taxid, int min_taxid=10000000) :
cdef int taxid
taxid = obi_taxo_add_local_taxon(self.pointer(), tobytes(name), tobytes(rank_name), parent_taxid, min_taxid)
if taxid < 0 :
@ -324,15 +325,17 @@ cdef class Taxonomy(OBIWrapper) :
cdef Taxon taxon
try:
taxon = self.get_taxon_by_taxid(taxid)
except:
raise StopIteration
except Exception as e:
print('\n'+e, file=sys.stderr)
return
if taxon is not None:
while taxon.taxid != 1:
yield taxon
#print(taxon.taxid)
taxon = taxon.parent
yield taxon
else:
raise StopIteration
return
def is_ancestor(self, int ancestor_taxid, int taxid):

View File

@ -23,15 +23,16 @@ cdef class TabFormat:
@cython.boundscheck(False)
def __call__(self, object data):
cdef set ktags
cdef object ktags
cdef list tags = [key for key in data]
line = []
if self.tags is not None and self.tags:
ktags = self.tags
if self.tags != None and self.tags:
ktags = list(self.tags)
else:
ktags = set(tags)
ktags = list(set(tags))
ktags.sort()
if self.header and self.first_line:
for k in ktags:

View File

@ -103,7 +103,11 @@ def fastqWithQualityIterator(lineiterator,
yield seq
read+=1
try:
hline = next(i)
except StopIteration:
return
def fastqWithoutQualityIterator(lineiterator,
@ -174,5 +178,7 @@ def fastqWithoutQualityIterator(lineiterator,
yield seq
read+=1
try:
hline = next(i)
except StopIteration:
return

View File

@ -48,13 +48,14 @@ def ngsfilterIterator(lineiterator,
all_lines.insert(0, firstline)
# Insert header for column names
column_names = [b"experiment", b"sample", b"forward_tag", b"reverse_tag", b"forward_primer", b"reverse_primer",b"additional_info"]
column_names = [b"experiment", b"sample", b"forward_tag", b"reverse_tag", b"forward_primer", b"reverse_primer"] #,b"additional_info"]
header = out_sep.join(column_names)
new_lines.append(header)
for line in all_lines:
split_line = line.split(maxsplit=5)
if split_line:
tags = split_line.pop(2)
tags = tags.split(b":")
for t_idx in range(len(tags)):
@ -64,7 +65,7 @@ def ngsfilterIterator(lineiterator,
tags.append(tags[0])
split_line.insert(2, tags[0])
split_line.insert(3, tags[1])
new_lines.append(out_sep.join(split_line[0:7]))
new_lines.append(out_sep.join(split_line[0:6]))
return tabIterator(iter(new_lines),
header = True,

View File

@ -99,7 +99,10 @@ def tabIterator(lineiterator,
read+=1
try:
line = next(iterator)
except StopIteration:
return

View File

@ -280,7 +280,7 @@ def open_uri(uri,
iseq = urib
objclass = bytes
else: # TODO update uopen to be able to write?
if config['obi']['outputformat'] == b'metabaR':
if 'outputformat' in config['obi'] and config['obi']['outputformat'] == b'metabaR':
if 'metabarprefix' not in config['obi']:
raise Exception("Prefix needed when exporting for metabaR (--metabaR-prefix option)")
else:

View File

@ -1,5 +1,5 @@
major = 3
minor = 0
serial= '1b18'
serial= '1b26'
version ="%d.%d.%s" % (major,minor,serial)

1
src/.gitignore vendored
View File

@ -3,3 +3,4 @@
/cmake_install.cmake
/libcobitools3.dylib
/Makefile
/build/

View File

@ -365,8 +365,6 @@ static int print_seq(Obiview_p i_view, Obiview_p o_view,
int32_t i;
// TODO add check for primer longer than MAX_PAT_LEN (32)
// Get sequence id
seq_id = obi_get_str_with_elt_idx_and_col_p_in_view(i_view, i_id_column, i_idx, 0);
@ -751,6 +749,20 @@ int obi_ecopcr(const char* i_dms_name,
o1c = complementPattern(o1);
o2c = complementPattern(o2);
// check for primers equal or longer than MAX_PAT_LEN (32)
if (strlen(primer1) >= MAX_PAT_LEN)
{
obi_set_errno(OBI_ECOPCR_ERROR);
obidebug(1, "\nError: first primer is too long, needs to be < 32bp (%s)", primer1);
return -1;
}
if (strlen(primer2) >= MAX_PAT_LEN)
{
obi_set_errno(OBI_ECOPCR_ERROR);
obidebug(1, "\nError: second primer is too long, needs to be < 32bp (%s)", primer2);
return -1;
}
// Open input DMS
i_dms = obi_open_dms(i_dms_name, false);
if (i_dms == NULL)

View File

@ -1092,7 +1092,7 @@ static int write_ranks_idx(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const char* taxo
free(taxonomy_path);
// Create file
file_descriptor = open(file_name, O_RDWR | O_CREAT | O_EXCL, 0777);
file_descriptor = open(file_name, O_RDWR | O_CREAT, 0777);
if (file_descriptor < 0)
{
obi_set_errno(OBI_TAXONOMY_ERROR);
@ -1196,7 +1196,7 @@ static int write_taxonomy_idx(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const char* t
free(taxonomy_path);
// Create file
file_descriptor = open(file_name, O_RDWR | O_CREAT | O_EXCL, 0777);
file_descriptor = open(file_name, O_RDWR | O_CREAT, 0777);
if (file_descriptor < 0)
{
obi_set_errno(OBI_TAXONOMY_ERROR);
@ -1472,7 +1472,7 @@ static int write_names_idx(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const char* taxo
free(taxonomy_path);
// Create file
file_descriptor = open(file_name, O_RDWR | O_CREAT | O_EXCL, 0777);
file_descriptor = open(file_name, O_RDWR | O_CREAT, 0777);
if (file_descriptor < 0)
{
obi_set_errno(OBI_TAXONOMY_ERROR);
@ -1760,7 +1760,7 @@ static int write_merged_idx(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const char* tax
free(taxonomy_path);
// Create file
file_descriptor = open(file_name, O_RDWR | O_CREAT | O_EXCL, 0777);
file_descriptor = open(file_name, O_RDWR | O_CREAT, 0777);
if (file_descriptor < 0)
{
obi_set_errno(OBI_TAXONOMY_ERROR);
@ -3250,15 +3250,15 @@ OBIDMS_taxonomy_p obi_read_taxonomy(OBIDMS_p dms, const char* taxonomy_name, boo
}
int obi_write_taxonomy(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const char* tax_name)
int obi_write_taxonomy(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const char* tax_name, bool update)
{
char* taxonomy_path;
if (!update) {
// Build the taxonomy directory path
taxonomy_path = get_taxonomy_path(dms, tax_name);
if (taxonomy_path == NULL)
return -1;
// Try to create the directory
if (mkdir(taxonomy_path, 00777) < 0)
{
@ -3268,8 +3268,8 @@ int obi_write_taxonomy(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const char* tax_name
free(taxonomy_path);
return -1;
}
free(taxonomy_path);
}
if (write_ranks_idx(dms, tax, tax_name) < 0)
return -1;
@ -3279,18 +3279,19 @@ int obi_write_taxonomy(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const char* tax_name
return -1;
if (write_merged_idx(dms, tax, tax_name) < 0)
return -1;
// Check if there are local taxa (if so last taxon is local)
if ((tax->taxa)->local_count > 0)
{
if (write_local_taxonomy_idx(dms, tax, tax_name) < 0)
return -1;
}
// Write preferred names if there are some
if (tax->preferred_names != NULL)
{
if (write_preferred_names_idx(dms, tax, tax_name) < 0)
return -1;
}
// Write local taxa if there are some
if ((tax->taxa)->local_count > 0)
{
if (write_local_taxonomy_idx(dms, tax, tax_name) < 0)
return -1;
}
return 0;
}
@ -3302,16 +3303,17 @@ int obi_close_taxonomy(OBIDMS_taxonomy_p taxonomy)
if (taxonomy)
{
// Update local informations (local taxa and preferred names) if there are any
if ((taxonomy->taxa)->local_count > 0)
{
if (taxonomy->dms == NULL)
{
obi_set_errno(OBI_TAXONOMY_ERROR);
obidebug(1, "\nError closing a taxonomy with local files but no DMS associated (probably read directly from taxdump)"); // TODO discuss
}
if (write_local_taxonomy_idx(taxonomy->dms, taxonomy, taxonomy->tax_name) < 0)
return -1;
}
// Done with write_taxo, edits all needed files. Only ldx file was edited in OBI1 but it led to issues. Discussable
// if ((taxonomy->taxa)->local_count > 0)
// {
// if (taxonomy->dms == NULL)
// {
// obi_set_errno(OBI_TAXONOMY_ERROR);
// obidebug(1, "\nError closing a taxonomy with local files but no DMS associated (probably read directly from taxdump)"); // TODO discuss
// }
// if (write_local_taxonomy_idx(taxonomy->dms, taxonomy, taxonomy->tax_name) < 0)
// return -1;
// }
// Write preferred names if there are some
if (taxonomy->preferred_names)
@ -3377,9 +3379,10 @@ int obi_close_taxonomy(OBIDMS_taxonomy_p taxonomy)
int obi_taxo_add_local_taxon(OBIDMS_taxonomy_p tax, const char* name, const char* rank_name, int32_t parent_taxid, int32_t min_taxid)
{
int32_t taxid;
int32_t count;
ecotx_t* taxon;
int i;
// econame_t* name_struct;
econame_t* name_struct;
// Enlarge the structure memory for a new taxon
tax->taxa = (ecotxidx_t*) realloc(tax->taxa, sizeof(ecotxidx_t) + sizeof(ecotx_t) * (((tax->taxa)->count) + 1));
@ -3441,42 +3444,65 @@ int obi_taxo_add_local_taxon(OBIDMS_taxonomy_p tax, const char* name, const char
((tax->taxa)->local_count)++;
(tax->taxa)->buffer_size = (tax->taxa)->count;
// // Add new name in names structure // Commented because the new name was not added in the .ndx file in the OBITools1
// // Allocate memory for new name
// tax->names = (econameidx_t*) realloc(tax->names, sizeof(econameidx_t) + sizeof(econame_t) * ((tax->names)->count + 1));
// if (tax->names == NULL)
// {
// obi_set_errno(OBI_MALLOC_ERROR);
// obidebug(1, "\nError reallocating memory for a taxonomy structure to add a new taxon");
// return -1;
// }
//
// // Add new name
// name_struct = (tax->names)->names + ((tax->names)->count);
// name_struct->name = (char*) malloc((strlen(name) + 1) * sizeof(char));
// if (name_struct->name == NULL)
// {
// obi_set_errno(OBI_MALLOC_ERROR);
// obidebug(1, "\nError allocating memory for a taxon name to add a new taxon");
// return -1;
// }
// strcpy(name_struct->name, name);
// name_struct->class_name = (char*) malloc((strlen("scientific name") + 1) * sizeof(char));
// if (name_struct->class_name == NULL)
// {
// obi_set_errno(OBI_MALLOC_ERROR);
// obidebug(1, "\nError allocating memory for a taxon class name to add a new taxon");
// return -1;
// }
// strcpy(name_struct->class_name, "scientific name");
// name_struct->is_scientific_name = true;
// name_struct->taxon = ((tax->taxa)->taxon) + ((tax->taxa)->count) - 1;
//
// // Sort names in alphabetical order
// qsort((tax->names)->names, (tax->names)->count, sizeof(econame_t), cmp_names);
//
// // Update name count
// ((tax->names)->count)++;
// Add new name in names structure // On the OBI1, the new name was not added in the .ndx file but it could create issues
// Allocate memory for new name
tax->names = (econameidx_t*) realloc(tax->names, sizeof(econameidx_t) + sizeof(econame_t) * ((tax->names)->count + 1));
if (tax->names == NULL)
{
obi_set_errno(OBI_MALLOC_ERROR);
obidebug(1, "\nError reallocating memory for a taxonomy structure to add a new taxon");
return -1;
}
// Add new name
name_struct = (tax->names)->names + ((tax->names)->count);
name_struct->name = (char*) malloc((strlen(name) + 1) * sizeof(char));
if (name_struct->name == NULL)
{
obi_set_errno(OBI_MALLOC_ERROR);
obidebug(1, "\nError allocating memory for a taxon name to add a new taxon");
return -1;
}
strcpy(name_struct->name, name);
name_struct->class_name = (char*) malloc((strlen("scientific name") + 1) * sizeof(char));
if (name_struct->class_name == NULL)
{
obi_set_errno(OBI_MALLOC_ERROR);
obidebug(1, "\nError allocating memory for a taxon class name to add a new taxon");
return -1;
}
strcpy(name_struct->class_name, "scientific name");
name_struct->is_scientific_name = true;
name_struct->taxon = ((tax->taxa)->taxon) + ((tax->taxa)->count) - 1;
// Update name count
((tax->names)->count)++;
// Sort names in alphabetical order
qsort((tax->names)->names, (tax->names)->count, sizeof(econame_t), cmp_names);
// Add to merged index
tax->merged_idx = (ecomergedidx_t*) realloc(tax->merged_idx, sizeof(ecomergedidx_t) + sizeof(ecomerged_t) * ((tax->merged_idx)->count + 1));
if (tax->merged_idx == NULL)
{
obi_set_errno(OBI_MALLOC_ERROR);
obidebug(1, "\nError reallocating memory for a taxonomy structure");
return -1;
}
count = (tax->merged_idx)->count;
(tax->merged_idx)->count = count + 1;
(tax->merged_idx)->merged[count].taxid = taxid;
(tax->merged_idx)->merged[count].idx = taxon->idx;
//fprintf(stderr, "\nEntered in merged taxon.idx=%d", (tax->merged_idx)->merged[(tax->merged_idx)->count -1].idx);
//fprintf(stderr, "\nEntered in merged taxon.taxid=%d", (tax->merged_idx)->merged[(tax->merged_idx)->count -1].taxid);
//fprintf(stderr, "\nEntered in merged at %d", (tax->merged_idx)->count -1);
//taxon = obi_taxo_get_taxon_with_taxid(tax, taxid);
//fprintf(stderr, "\ntaxon=%x", taxon);
//fprintf(stderr, "\ntaxon.taxid=%d", taxon->taxid);
//fprintf(stderr, "\ntaxon.name=%s", taxon->name);
//fprintf(stderr, "\ntaxon.idx=%d\n\n", ((tax->merged_idx)->count));
return taxid;
}
@ -3547,11 +3573,12 @@ int obi_taxo_add_preferred_name_with_taxon(OBIDMS_taxonomy_p tax, ecotx_t* taxon
name_struct->is_scientific_name = false;
name_struct->taxon = taxon;
// Update preferred name count
((tax->preferred_names)->count)++;
// Sort preferred names in alphabetical order
qsort((tax->preferred_names)->names, (tax->preferred_names)->count, sizeof(econame_t), cmp_names);
// Update preferred name count
((tax->preferred_names)->count)++;
return 0;
}
@ -3669,8 +3696,10 @@ ecotx_t* obi_taxo_get_taxon_with_taxid(OBIDMS_taxonomy_p taxonomy, int32_t taxid
else if (indexed_taxon->idx == -1)
current_taxon = NULL; // TODO discuss what to do when old deleted taxon
else
{
current_taxon = (taxonomy->taxa->taxon)+(indexed_taxon->idx);
//fprintf(stderr, "\n>>>idx %d, taxid %d<<<\n", indexed_taxon->idx, indexed_taxon->taxid);
}
return current_taxon;
}

View File

@ -239,6 +239,7 @@ OBIDMS_taxonomy_p obi_read_taxonomy(OBIDMS_p dms, const char* taxonomy_name, boo
* @param dms A pointer on the DMS to which the taxonomy belongs.
* @param tax A pointer on the taxonomy structure.
* @param tax_name The name (prefix) of the taxonomy.
* @param update Whether files should be rewritten or if it's a new taxonomy (set to true e.g. after adding local taxa).
*
* @returns An integer value indicating the success of the operation.
* @retval 0 on success.
@ -247,7 +248,7 @@ OBIDMS_taxonomy_p obi_read_taxonomy(OBIDMS_p dms, const char* taxonomy_name, boo
* @since 2016
* @author Celine Mercier (celine.mercier@metabarcoding.org)
*/
int obi_write_taxonomy(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const char* tax_name);
int obi_write_taxonomy(OBIDMS_p dms, OBIDMS_taxonomy_p tax, const char* tax_name, bool update);
/**