Code refactoring

This commit is contained in:
2022-01-14 17:32:12 +01:00
parent 5753723618
commit ef66ca4972
16 changed files with 260 additions and 260 deletions

View File

@ -163,12 +163,12 @@ func ReadEcoPCRBatch(reader io.Reader, options ...WithOption) obiseq.IBioSequenc
opt := MakeOptions(options) opt := MakeOptions(options)
new_iter := obiseq.MakeIBioSequenceBatch(opt.BufferSize()) newIter := obiseq.MakeIBioSequenceBatch(opt.BufferSize())
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.Channel()) close(newIter.Channel())
}() }()
go func() { go func() {
@ -181,7 +181,7 @@ func ReadEcoPCRBatch(reader io.Reader, options ...WithOption) obiseq.IBioSequenc
slice = append(slice, seq) slice = append(slice, seq)
ii++ ii++
if ii >= opt.BatchSize() { if ii >= opt.BatchSize() {
new_iter.Channel() <- obiseq.MakeBioSequenceBatch(i, slice...) newIter.Channel() <- obiseq.MakeBioSequenceBatch(i, slice...)
slice = make(obiseq.BioSequenceSlice, 0, opt.BatchSize()) slice = make(obiseq.BioSequenceSlice, 0, opt.BatchSize())
i++ i++
@ -192,10 +192,10 @@ func ReadEcoPCRBatch(reader io.Reader, options ...WithOption) obiseq.IBioSequenc
} }
if len(slice) > 0 { if len(slice) > 0 {
new_iter.Channel() <- obiseq.MakeBioSequenceBatch(i, slice...) newIter.Channel() <- obiseq.MakeBioSequenceBatch(i, slice...)
} }
new_iter.Done() newIter.Done()
if err != nil && err != io.EOF { if err != nil && err != io.EOF {
log.Panicf("%+v", err) log.Panicf("%+v", err)
@ -203,7 +203,7 @@ func ReadEcoPCRBatch(reader io.Reader, options ...WithOption) obiseq.IBioSequenc
}() }()
return new_iter return newIter
} }
func ReadEcoPCR(reader io.Reader, options ...WithOption) obiseq.IBioSequence { func ReadEcoPCR(reader io.Reader, options ...WithOption) obiseq.IBioSequence {

View File

@ -14,14 +14,14 @@ import (
"git.metabarcoding.org/lecasofts/go/obitools/pkg/obiseq" "git.metabarcoding.org/lecasofts/go/obitools/pkg/obiseq"
) )
var __FILE_CHUNK_SIZE__ = 1 << 20 var _FileChunkSize = 1 << 20
type __file_chunk__ struct { type _FileChunk struct {
raw io.Reader raw io.Reader
order int order int
} }
func __end_of_last_entry__(buff []byte) int { func _EndOfLastEntry(buff []byte) int {
// 6 5 43 2 1 // 6 5 43 2 1
// <CR>?<LF>//<CR>?<LF> // <CR>?<LF>//<CR>?<LF>
var i int var i int
@ -76,22 +76,22 @@ func __end_of_last_entry__(buff []byte) int {
if i > 0 { if i > 0 {
return start return start
} else {
return -1
} }
return -1
} }
func __parse_embl_file__(input <-chan __file_chunk__, out obiseq.IBioSequenceBatch) { func _ParseEmblFile(input <-chan _FileChunk, out obiseq.IBioSequenceBatch) {
for chunks := range input { for chunks := range input {
scanner := bufio.NewScanner(chunks.raw) scanner := bufio.NewScanner(chunks.raw)
order := chunks.order order := chunks.order
sequences := make(obiseq.BioSequenceSlice, 0, 100) sequences := make(obiseq.BioSequenceSlice, 0, 100)
id := "" id := ""
scientific_name := "" scientificName := ""
def_bytes := new(bytes.Buffer) defBytes := new(bytes.Buffer)
feat_bytes := new(bytes.Buffer) featBytes := new(bytes.Buffer)
seq_bytes := new(bytes.Buffer) seqBytes := new(bytes.Buffer)
taxid := 1 taxid := 1
for scanner.Scan() { for scanner.Scan() {
@ -101,43 +101,43 @@ func __parse_embl_file__(input <-chan __file_chunk__, out obiseq.IBioSequenceBat
case strings.HasPrefix(line, "ID "): case strings.HasPrefix(line, "ID "):
id = strings.SplitN(line[5:], ";", 2)[0] id = strings.SplitN(line[5:], ";", 2)[0]
case strings.HasPrefix(line, "OS "): case strings.HasPrefix(line, "OS "):
scientific_name = strings.TrimSpace(line[5:]) scientificName = strings.TrimSpace(line[5:])
case strings.HasPrefix(line, "DE "): case strings.HasPrefix(line, "DE "):
if def_bytes.Len() > 0 { if defBytes.Len() > 0 {
def_bytes.WriteByte(' ') defBytes.WriteByte(' ')
} }
def_bytes.WriteString(strings.TrimSpace(line[5:])) defBytes.WriteString(strings.TrimSpace(line[5:]))
case strings.HasPrefix(line, "FH "): case strings.HasPrefix(line, "FH "):
feat_bytes.WriteString(line) featBytes.WriteString(line)
case line == "FH": case line == "FH":
feat_bytes.WriteByte('\n') featBytes.WriteByte('\n')
feat_bytes.WriteString(line) featBytes.WriteString(line)
case strings.HasPrefix(line, "FT "): case strings.HasPrefix(line, "FT "):
feat_bytes.WriteByte('\n') featBytes.WriteByte('\n')
feat_bytes.WriteString(line) featBytes.WriteString(line)
if strings.HasPrefix(line, `FT /db_xref="taxon:`) { if strings.HasPrefix(line, `FT /db_xref="taxon:`) {
taxid, _ = strconv.Atoi(strings.SplitN(line[37:], `"`, 2)[0]) taxid, _ = strconv.Atoi(strings.SplitN(line[37:], `"`, 2)[0])
} }
case strings.HasPrefix(line, " "): case strings.HasPrefix(line, " "):
parts := strings.SplitN(line[5:], " ", 7) parts := strings.SplitN(line[5:], " ", 7)
for i := 0; i < 6; i++ { for i := 0; i < 6; i++ {
seq_bytes.WriteString(parts[i]) seqBytes.WriteString(parts[i])
} }
case line == "//": case line == "//":
sequence := obiseq.MakeBioSequence(id, sequence := obiseq.MakeBioSequence(id,
seq_bytes.Bytes(), seqBytes.Bytes(),
def_bytes.String()) defBytes.String())
sequence.SetFeatures(feat_bytes.String()) sequence.SetFeatures(featBytes.String())
annot := sequence.Annotations() annot := sequence.Annotations()
annot["scientific_name"] = scientific_name annot["scientific_name"] = scientificName
annot["taxid"] = taxid annot["taxid"] = taxid
// log.Println(FormatFasta(sequence, FormatFastSeqJsonHeader)) // log.Println(FormatFasta(sequence, FormatFastSeqJsonHeader))
sequences = append(sequences, sequence) sequences = append(sequences, sequence)
def_bytes = new(bytes.Buffer) defBytes = new(bytes.Buffer)
feat_bytes = new(bytes.Buffer) featBytes = new(bytes.Buffer)
seq_bytes = new(bytes.Buffer) seqBytes = new(bytes.Buffer)
} }
} }
out.Channel() <- obiseq.MakeBioSequenceBatch(order, sequences...) out.Channel() <- obiseq.MakeBioSequenceBatch(order, sequences...)
@ -148,7 +148,7 @@ func __parse_embl_file__(input <-chan __file_chunk__, out obiseq.IBioSequenceBat
} }
func __read_flat_file_chunk__(reader io.Reader, readers chan __file_chunk__) { func _ReadFlatFileChunk(reader io.Reader, readers chan _FileChunk) {
var err error var err error
var buff []byte var buff []byte
@ -162,13 +162,13 @@ func __read_flat_file_chunk__(reader io.Reader, readers chan __file_chunk__) {
size, err = reader.Read(buff[l:]) size, err = reader.Read(buff[l:])
} }
buff = buff[:l] buff = buff[:l]
end := __end_of_last_entry__(buff) end := _EndOfLastEntry(buff)
remains := buff[end:] remains := buff[end:]
buff = buff[:end] buff = buff[:end]
io := bytes.NewBuffer(buff) io := bytes.NewBuffer(buff)
readers <- __file_chunk__{io, i} readers <- _FileChunk{io, i}
i++ i++
buff = make([]byte, __FILE_CHUNK_SIZE__) buff = make([]byte, _FileChunkSize)
copy(buff, remains) copy(buff, remains)
l = len(remains) l = len(remains)
} }
@ -180,29 +180,29 @@ func __read_flat_file_chunk__(reader io.Reader, readers chan __file_chunk__) {
// <CR>?<LF>//<CR>?<LF> // <CR>?<LF>//<CR>?<LF>
func ReadEMBLBatch(reader io.Reader, options ...WithOption) obiseq.IBioSequenceBatch { func ReadEMBLBatch(reader io.Reader, options ...WithOption) obiseq.IBioSequenceBatch {
opt := MakeOptions(options) opt := MakeOptions(options)
entry_channel := make(chan __file_chunk__, opt.BufferSize()) entry_channel := make(chan _FileChunk, opt.BufferSize())
new_iter := obiseq.MakeIBioSequenceBatch(opt.BufferSize()) newIter := obiseq.MakeIBioSequenceBatch(opt.BufferSize())
// new_iter.Add(opt.ParallelWorkers()) // newIter.Add(opt.ParallelWorkers())
new_iter.Add(2) newIter.Add(2)
go func() { go func() {
new_iter.Wait() newIter.Wait()
for len(new_iter.Channel()) > 0 { for len(newIter.Channel()) > 0 {
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
} }
close(new_iter.Channel()) close(newIter.Channel())
}() }()
// for j := 0; j < opt.ParallelWorkers(); j++ { // for j := 0; j < opt.ParallelWorkers(); j++ {
for j := 0; j < 2; j++ { for j := 0; j < 2; j++ {
go __parse_embl_file__(entry_channel, new_iter) go _ParseEmblFile(entry_channel, newIter)
} }
go __read_flat_file_chunk__(reader, entry_channel) go _ReadFlatFileChunk(reader, entry_channel)
return new_iter return newIter
} }
func ReadEMBL(reader io.Reader, options ...WithOption) obiseq.IBioSequence { func ReadEMBL(reader io.Reader, options ...WithOption) obiseq.IBioSequence {

View File

@ -101,28 +101,28 @@ func ReadFastSeqBatchFromFile(filename string, options ...WithOption) (obiseq.IB
size = -1 size = -1
} }
new_iter := obiseq.MakeIBioSequenceBatch(opt.BufferSize()) newIter := obiseq.MakeIBioSequenceBatch(opt.BufferSize())
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
for len(new_iter.Channel()) > 0 { for len(newIter.Channel()) > 0 {
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
} }
close(new_iter.Channel()) close(newIter.Channel())
log.Println("End of the fastq file reading") log.Println("End of the fastq file reading")
}() }()
log.Println("Start of the fastq file reading") log.Println("Start of the fastq file reading")
go __fastseq_reader__(pointer, new_iter, opt.BatchSize()) go __fastseq_reader__(pointer, newIter, opt.BatchSize())
parser := opt.ParseFastSeqHeader() parser := opt.ParseFastSeqHeader()
if parser != nil { if parser != nil {
return IParseFastSeqHeaderBatch(new_iter, options...), err return IParseFastSeqHeaderBatch(newIter, options...), err
} }
return new_iter, err return newIter, err
} }
func ReadFastSeqFromFile(filename string, options ...WithOption) (obiseq.IBioSequence, error) { func ReadFastSeqFromFile(filename string, options ...WithOption) (obiseq.IBioSequence, error) {
@ -132,18 +132,18 @@ func ReadFastSeqFromFile(filename string, options ...WithOption) (obiseq.IBioSeq
func ReadFastSeqBatchFromStdin(options ...WithOption) obiseq.IBioSequenceBatch { func ReadFastSeqBatchFromStdin(options ...WithOption) obiseq.IBioSequenceBatch {
opt := MakeOptions(options) opt := MakeOptions(options)
new_iter := obiseq.MakeIBioSequenceBatch(opt.BufferSize()) newIter := obiseq.MakeIBioSequenceBatch(opt.BufferSize())
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.Channel()) close(newIter.Channel())
}() }()
go __fastseq_reader__(C.open_fast_sek_stdin(C.int32_t(opt.QualityShift())), new_iter, opt.BatchSize()) go __fastseq_reader__(C.open_fast_sek_stdin(C.int32_t(opt.QualityShift())), newIter, opt.BatchSize())
return new_iter return newIter
} }
func ReadFastSeqFromStdin(options ...WithOption) obiseq.IBioSequence { func ReadFastSeqFromStdin(options ...WithOption) obiseq.IBioSequence {

View File

@ -84,7 +84,7 @@ func WriteFastaToStdout(iterator obiseq.IBioSequence, options ...WithOption) err
func WriteFastaBatch(iterator obiseq.IBioSequenceBatch, file io.Writer, options ...WithOption) error { func WriteFastaBatch(iterator obiseq.IBioSequenceBatch, file io.Writer, options ...WithOption) error {
buffsize := iterator.BufferSize() buffsize := iterator.BufferSize()
new_iter := obiseq.MakeIBioSequenceBatch(buffsize) newIter := obiseq.MakeIBioSequenceBatch(buffsize)
opt := MakeOptions(options) opt := MakeOptions(options)
nwriters := 4 nwriters := 4
@ -111,9 +111,9 @@ func WriteFastaBatch(iterator obiseq.IBioSequenceBatch, file io.Writer, options
FormatFastaBatch(batch, header_format), FormatFastaBatch(batch, header_format),
batch.Order(), batch.Order(),
} }
new_iter.Channel() <- batch newIter.Channel() <- batch
} }
new_iter.Done() newIter.Done()
} }
for i := 0; i < nwriters; i++ { for i := 0; i < nwriters; i++ {

View File

@ -83,7 +83,7 @@ type FileChunck struct {
func WriteFastqBatch(iterator obiseq.IBioSequenceBatch, file io.Writer, options ...WithOption) (obiseq.IBioSequenceBatch, error) { func WriteFastqBatch(iterator obiseq.IBioSequenceBatch, file io.Writer, options ...WithOption) (obiseq.IBioSequenceBatch, error) {
buffsize := iterator.BufferSize() buffsize := iterator.BufferSize()
new_iter := obiseq.MakeIBioSequenceBatch(buffsize) newIter := obiseq.MakeIBioSequenceBatch(buffsize)
opt := MakeOptions(options) opt := MakeOptions(options)
nwriters := 4 nwriters := 4
@ -93,18 +93,18 @@ func WriteFastqBatch(iterator obiseq.IBioSequenceBatch, file io.Writer, options
header_format := opt.FormatFastSeqHeader() header_format := opt.FormatFastSeqHeader()
quality := opt.QualityShift() quality := opt.QualityShift()
new_iter.Add(nwriters) newIter.Add(nwriters)
go func() { go func() {
new_iter.Wait() newIter.Wait()
for len(chunkchan) > 0 { for len(chunkchan) > 0 {
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
} }
close(chunkchan) close(chunkchan)
for len(new_iter.Channel()) > 0 { for len(newIter.Channel()) > 0 {
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
} }
close(new_iter.Channel()) close(newIter.Channel())
}() }()
ff := func(iterator obiseq.IBioSequenceBatch) { ff := func(iterator obiseq.IBioSequenceBatch) {
@ -114,9 +114,9 @@ func WriteFastqBatch(iterator obiseq.IBioSequenceBatch, file io.Writer, options
FormatFastqBatch(batch, quality, header_format), FormatFastqBatch(batch, quality, header_format),
batch.Order(), batch.Order(),
} }
new_iter.Channel() <- batch newIter.Channel() <- batch
} }
new_iter.Done() newIter.Done()
} }
log.Println("Start of the fastq file reading") log.Println("Start of the fastq file reading")
@ -146,7 +146,7 @@ func WriteFastqBatch(iterator obiseq.IBioSequenceBatch, file io.Writer, options
} }
}() }()
return new_iter, nil return newIter, nil
} }
func WriteFastqBatchToStdout(iterator obiseq.IBioSequenceBatch, options ...WithOption) (obiseq.IBioSequenceBatch, error) { func WriteFastqBatchToStdout(iterator obiseq.IBioSequenceBatch, options ...WithOption) (obiseq.IBioSequenceBatch, error) {

View File

@ -103,8 +103,8 @@ func (iterator IBioSequenceBatch) Split() IBioSequenceBatch {
buffer_size: iterator.pointer.buffer_size, buffer_size: iterator.pointer.buffer_size,
finished: false, finished: false,
p_finished: iterator.pointer.p_finished} p_finished: iterator.pointer.p_finished}
new_iter := IBioSequenceBatch{&i} newIter := IBioSequenceBatch{&i}
return new_iter return newIter
} }
func (iterator IBioSequenceBatch) Next() bool { func (iterator IBioSequenceBatch) Next() bool {
@ -144,13 +144,13 @@ func (iterator IBioSequenceBatch) IBioSequence(sizes ...int) IBioSequence {
buffsize = sizes[0] buffsize = sizes[0]
} }
new_iter := MakeIBioSequence(buffsize) newIter := MakeIBioSequence(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
go func() { go func() {
@ -158,13 +158,13 @@ func (iterator IBioSequenceBatch) IBioSequence(sizes ...int) IBioSequence {
batch := iterator.Get() batch := iterator.Get()
for _, s := range batch.slice { for _, s := range batch.slice {
new_iter.pointer.channel <- s newIter.pointer.channel <- s
} }
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }
func (iterator IBioSequenceBatch) SortBatches(sizes ...int) IBioSequenceBatch { func (iterator IBioSequenceBatch) SortBatches(sizes ...int) IBioSequenceBatch {
@ -174,13 +174,13 @@ func (iterator IBioSequenceBatch) SortBatches(sizes ...int) IBioSequenceBatch {
buffsize = sizes[0] buffsize = sizes[0]
} }
new_iter := MakeIBioSequenceBatch(buffsize) newIter := MakeIBioSequenceBatch(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
next_to_send := 0 next_to_send := 0
@ -189,11 +189,11 @@ func (iterator IBioSequenceBatch) SortBatches(sizes ...int) IBioSequenceBatch {
for iterator.Next() { for iterator.Next() {
batch := iterator.Get() batch := iterator.Get()
if batch.order == next_to_send { if batch.order == next_to_send {
new_iter.pointer.channel <- batch newIter.pointer.channel <- batch
next_to_send++ next_to_send++
batch, ok := received[next_to_send] batch, ok := received[next_to_send]
for ok { for ok {
new_iter.pointer.channel <- batch newIter.pointer.channel <- batch
delete(received, next_to_send) delete(received, next_to_send)
next_to_send++ next_to_send++
batch, ok = received[next_to_send] batch, ok = received[next_to_send]
@ -202,10 +202,10 @@ func (iterator IBioSequenceBatch) SortBatches(sizes ...int) IBioSequenceBatch {
received[batch.order] = batch received[batch.order] = batch
} }
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }
@ -216,13 +216,13 @@ func (iterator IBioSequenceBatch) Concat(iterators ...IBioSequenceBatch) IBioSeq
} }
buffsize := iterator.BufferSize() buffsize := iterator.BufferSize()
new_iter := MakeIBioSequenceBatch(buffsize) newIter := MakeIBioSequenceBatch(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.Channel()) close(newIter.Channel())
}() }()
go func() { go func() {
@ -234,7 +234,7 @@ func (iterator IBioSequenceBatch) Concat(iterators ...IBioSequenceBatch) IBioSeq
if s.order > max_order { if s.order > max_order {
max_order = s.order max_order = s.order
} }
new_iter.Channel() <- MakeBioSequenceBatch(s.order+previous_max, s.slice...) newIter.Channel() <- MakeBioSequenceBatch(s.order+previous_max, s.slice...)
} }
previous_max = max_order + 1 previous_max = max_order + 1
@ -245,14 +245,14 @@ func (iterator IBioSequenceBatch) Concat(iterators ...IBioSequenceBatch) IBioSeq
max_order = s.order + previous_max max_order = s.order + previous_max
} }
new_iter.Channel() <- MakeBioSequenceBatch(s.order+previous_max, s.slice...) newIter.Channel() <- MakeBioSequenceBatch(s.order+previous_max, s.slice...)
} }
previous_max = max_order + 1 previous_max = max_order + 1
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }
// Redistributes sequences from a IBioSequenceBatch into a new // Redistributes sequences from a IBioSequenceBatch into a new
@ -266,13 +266,13 @@ func (iterator IBioSequenceBatch) Rebatch(size int, sizes ...int) IBioSequenceBa
buffsize = sizes[0] buffsize = sizes[0]
} }
new_iter := MakeIBioSequenceBatch(buffsize) newIter := MakeIBioSequenceBatch(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
go func() { go func() {
@ -285,7 +285,7 @@ func (iterator IBioSequenceBatch) Rebatch(size int, sizes ...int) IBioSequenceBa
for _, s := range seqs.slice { for _, s := range seqs.slice {
buffer = append(buffer, s) buffer = append(buffer, s)
if len(buffer) == size { if len(buffer) == size {
new_iter.Channel() <- MakeBioSequenceBatch(order, buffer...) newIter.Channel() <- MakeBioSequenceBatch(order, buffer...)
order++ order++
buffer = make(BioSequenceSlice, 0, size) buffer = make(BioSequenceSlice, 0, size)
} }
@ -293,14 +293,14 @@ func (iterator IBioSequenceBatch) Rebatch(size int, sizes ...int) IBioSequenceBa
} }
if len(buffer) > 0 { if len(buffer) > 0 {
new_iter.Channel() <- MakeBioSequenceBatch(order, buffer...) newIter.Channel() <- MakeBioSequenceBatch(order, buffer...)
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }
func (iterator IBioSequenceBatch) Destroy() { func (iterator IBioSequenceBatch) Destroy() {
@ -331,13 +331,13 @@ func (iterator IBioSequenceBatch) PairWith(reverse IBioSequenceBatch, sizes ...i
iterator = iterator.Rebatch(batchsize) iterator = iterator.Rebatch(batchsize)
reverse = reverse.Rebatch(batchsize) reverse = reverse.Rebatch(batchsize)
new_iter := MakeIPairedBioSequenceBatch(buffsize) newIter := MakeIPairedBioSequenceBatch(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
log.Println("End of association of paired reads") log.Println("End of association of paired reads")
}() }()
@ -347,12 +347,12 @@ func (iterator IBioSequenceBatch) PairWith(reverse IBioSequenceBatch, sizes ...i
if !reverse.Next() { if !reverse.Next() {
log.Panicln("Etrange reverse pas prêt") log.Panicln("Etrange reverse pas prêt")
} }
new_iter.Channel() <- MakePairedBioSequenceBatch(iterator.Get(), newIter.Channel() <- MakePairedBioSequenceBatch(iterator.Get(),
reverse.Get()) reverse.Get())
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }

View File

@ -73,8 +73,8 @@ func (iterator IBioSequence) Split() IBioSequence {
all_done: iterator.pointer.all_done, all_done: iterator.pointer.all_done,
buffer_size: iterator.pointer.buffer_size, buffer_size: iterator.pointer.buffer_size,
p_finished: iterator.pointer.p_finished} p_finished: iterator.pointer.p_finished}
new_iter := IBioSequence{&i} newIter := IBioSequence{&i}
return new_iter return newIter
} }
func (iterator IBioSequence) Next() bool { func (iterator IBioSequence) Next() bool {
@ -132,16 +132,16 @@ func (iterator IBioSequence) IBioSequenceBatch(sizes ...int) IBioSequenceBatch {
buffsize = sizes[1] buffsize = sizes[1]
} }
new_iter := MakeIBioSequenceBatch(buffsize) newIter := MakeIBioSequenceBatch(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
for len(new_iter.Channel()) > 0 { for len(newIter.Channel()) > 0 {
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
} }
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
go func() { go func() {
@ -153,12 +153,12 @@ func (iterator IBioSequence) IBioSequenceBatch(sizes ...int) IBioSequenceBatch {
seq := iterator.Get() seq := iterator.Get()
batch.slice = append(batch.slice, seq) batch.slice = append(batch.slice, seq)
} }
new_iter.pointer.channel <- batch newIter.pointer.channel <- batch
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }
func (iterator IBioSequence) IBioSequence(sizes ...int) IBioSequence { func (iterator IBioSequence) IBioSequence(sizes ...int) IBioSequence {
@ -168,24 +168,24 @@ func (iterator IBioSequence) IBioSequence(sizes ...int) IBioSequence {
buffsize = sizes[0] buffsize = sizes[0]
} }
new_iter := MakeIBioSequence(buffsize) newIter := MakeIBioSequence(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
go func() { go func() {
for iterator.Next() { for iterator.Next() {
s := iterator.Get() s := iterator.Get()
new_iter.pointer.channel <- s newIter.pointer.channel <- s
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }
func (iterator IBioSequence) Skip(n int, sizes ...int) IBioSequence { func (iterator IBioSequence) Skip(n int, sizes ...int) IBioSequence {
@ -195,26 +195,26 @@ func (iterator IBioSequence) Skip(n int, sizes ...int) IBioSequence {
buffsize = sizes[0] buffsize = sizes[0]
} }
new_iter := MakeIBioSequence(buffsize) newIter := MakeIBioSequence(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
go func() { go func() {
for i := 0; iterator.Next(); i++ { for i := 0; iterator.Next(); i++ {
if i >= n { if i >= n {
s := iterator.Get() s := iterator.Get()
new_iter.pointer.channel <- s newIter.pointer.channel <- s
} }
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }
func (iterator IBioSequence) Head(n int, sizes ...int) IBioSequence { func (iterator IBioSequence) Head(n int, sizes ...int) IBioSequence {
@ -224,13 +224,13 @@ func (iterator IBioSequence) Head(n int, sizes ...int) IBioSequence {
buffsize = sizes[0] buffsize = sizes[0]
} }
new_iter := MakeIBioSequence(buffsize) newIter := MakeIBioSequence(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
go func() { go func() {
@ -238,17 +238,17 @@ func (iterator IBioSequence) Head(n int, sizes ...int) IBioSequence {
for i := 0; iterator.Next(); i++ { for i := 0; iterator.Next(); i++ {
if i < n { if i < n {
s := iterator.Get() s := iterator.Get()
new_iter.pointer.channel <- s newIter.pointer.channel <- s
} else { } else {
if not_done { if not_done {
new_iter.Done() newIter.Done()
not_done = false not_done = false
} }
} }
} }
}() }()
return new_iter return newIter
} }
// The 'Tail' method discard every data from the source iterator // The 'Tail' method discard every data from the source iterator
@ -260,14 +260,14 @@ func (iterator IBioSequence) Tail(n int, sizes ...int) IBioSequence {
buffsize = sizes[0] buffsize = sizes[0]
} }
new_iter := MakeIBioSequence(buffsize) newIter := MakeIBioSequence(buffsize)
buffseq := make(BioSequenceSlice, n) buffseq := make(BioSequenceSlice, n)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
go func() { go func() {
@ -277,18 +277,18 @@ func (iterator IBioSequence) Tail(n int, sizes ...int) IBioSequence {
} }
if i > n { if i > n {
for j := 0; j < n; j++ { for j := 0; j < n; j++ {
new_iter.Channel() <- buffseq[(i+j)%n] newIter.Channel() <- buffseq[(i+j)%n]
} }
} else { } else {
for j := 0; j < i; j++ { for j := 0; j < i; j++ {
new_iter.Channel() <- buffseq[j] newIter.Channel() <- buffseq[j]
} }
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }
func (iterator IBioSequence) Concat(iterators ...IBioSequence) IBioSequence { func (iterator IBioSequence) Concat(iterators ...IBioSequence) IBioSequence {
@ -298,29 +298,29 @@ func (iterator IBioSequence) Concat(iterators ...IBioSequence) IBioSequence {
} }
buffsize := iterator.BufferSize() buffsize := iterator.BufferSize()
new_iter := MakeIBioSequence(buffsize) newIter := MakeIBioSequence(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
go func() { go func() {
for iterator.Next() { for iterator.Next() {
s := iterator.Get() s := iterator.Get()
new_iter.pointer.channel <- s newIter.pointer.channel <- s
} }
for _, iter := range iterators { for _, iter := range iterators {
for iter.Next() { for iter.Next() {
s := iter.Get() s := iter.Get()
new_iter.pointer.channel <- s newIter.pointer.channel <- s
} }
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }

View File

@ -119,8 +119,8 @@ func (iterator IPairedBioSequenceBatch) Split() IPairedBioSequenceBatch {
buffer_size: iterator.pointer.buffer_size, buffer_size: iterator.pointer.buffer_size,
finished: false, finished: false,
p_finished: iterator.pointer.p_finished} p_finished: iterator.pointer.p_finished}
new_iter := IPairedBioSequenceBatch{&i} newIter := IPairedBioSequenceBatch{&i}
return new_iter return newIter
} }
func (iterator IPairedBioSequenceBatch) Next() bool { func (iterator IPairedBioSequenceBatch) Next() bool {
@ -160,13 +160,13 @@ func (iterator IPairedBioSequenceBatch) SortBatches(sizes ...int) IPairedBioSequ
buffsize = sizes[0] buffsize = sizes[0]
} }
new_iter := MakeIPairedBioSequenceBatch(buffsize) newIter := MakeIPairedBioSequenceBatch(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
next_to_send := 0 next_to_send := 0
@ -175,11 +175,11 @@ func (iterator IPairedBioSequenceBatch) SortBatches(sizes ...int) IPairedBioSequ
for iterator.Next() { for iterator.Next() {
batch := iterator.Get() batch := iterator.Get()
if batch.order == next_to_send { if batch.order == next_to_send {
new_iter.pointer.channel <- batch newIter.pointer.channel <- batch
next_to_send++ next_to_send++
batch, ok := received[next_to_send] batch, ok := received[next_to_send]
for ok { for ok {
new_iter.pointer.channel <- batch newIter.pointer.channel <- batch
delete(received, next_to_send) delete(received, next_to_send)
next_to_send++ next_to_send++
batch, ok = received[next_to_send] batch, ok = received[next_to_send]
@ -188,9 +188,9 @@ func (iterator IPairedBioSequenceBatch) SortBatches(sizes ...int) IPairedBioSequ
received[batch.order] = batch received[batch.order] = batch
} }
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }

View File

@ -25,25 +25,25 @@ func (iterator IBioSequence) MakeIWorker(worker SeqWorker, sizes ...int) IBioSeq
buffsize = sizes[0] buffsize = sizes[0]
} }
new_iter := MakeIBioSequence(buffsize) newIter := MakeIBioSequence(buffsize)
new_iter.Add(1) newIter.Add(1)
go func() { go func() {
new_iter.Wait() newIter.Wait()
close(new_iter.pointer.channel) close(newIter.pointer.channel)
}() }()
go func() { go func() {
for iterator.Next() { for iterator.Next() {
seq := iterator.Get() seq := iterator.Get()
seq = worker(seq) seq = worker(seq)
new_iter.pointer.channel <- seq newIter.pointer.channel <- seq
} }
new_iter.Done() newIter.Done()
}() }()
return new_iter return newIter
} }
func (iterator IBioSequenceBatch) MakeIWorker(worker SeqWorker, sizes ...int) IBioSequenceBatch { func (iterator IBioSequenceBatch) MakeIWorker(worker SeqWorker, sizes ...int) IBioSequenceBatch {
@ -58,16 +58,16 @@ func (iterator IBioSequenceBatch) MakeIWorker(worker SeqWorker, sizes ...int) IB
buffsize = sizes[1] buffsize = sizes[1]
} }
new_iter := MakeIBioSequenceBatch(buffsize) newIter := MakeIBioSequenceBatch(buffsize)
new_iter.Add(nworkers) newIter.Add(nworkers)
go func() { go func() {
new_iter.Wait() newIter.Wait()
for len(new_iter.Channel()) > 0 { for len(newIter.Channel()) > 0 {
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
} }
close(new_iter.pointer.channel) close(newIter.pointer.channel)
log.Println("End of the batch workers") log.Println("End of the batch workers")
}() }()
@ -78,9 +78,9 @@ func (iterator IBioSequenceBatch) MakeIWorker(worker SeqWorker, sizes ...int) IB
for i, seq := range batch.slice { for i, seq := range batch.slice {
batch.slice[i] = worker(seq) batch.slice[i] = worker(seq)
} }
new_iter.pointer.channel <- batch newIter.pointer.channel <- batch
} }
new_iter.Done() newIter.Done()
} }
log.Println("Start of the batch workers") log.Println("Start of the batch workers")
@ -88,7 +88,7 @@ func (iterator IBioSequenceBatch) MakeIWorker(worker SeqWorker, sizes ...int) IB
go f(iterator.Split()) go f(iterator.Split())
} }
return new_iter return newIter
} }
func (iterator IBioSequenceBatch) MakeISliceWorker(worker SeqSliceWorker, sizes ...int) IBioSequenceBatch { func (iterator IBioSequenceBatch) MakeISliceWorker(worker SeqSliceWorker, sizes ...int) IBioSequenceBatch {
@ -103,16 +103,16 @@ func (iterator IBioSequenceBatch) MakeISliceWorker(worker SeqSliceWorker, sizes
buffsize = sizes[1] buffsize = sizes[1]
} }
new_iter := MakeIBioSequenceBatch(buffsize) newIter := MakeIBioSequenceBatch(buffsize)
new_iter.Add(nworkers) newIter.Add(nworkers)
go func() { go func() {
new_iter.Wait() newIter.Wait()
for len(new_iter.Channel()) > 0 { for len(newIter.Channel()) > 0 {
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
} }
close(new_iter.pointer.channel) close(newIter.pointer.channel)
log.Println("End of the batch slice workers") log.Println("End of the batch slice workers")
}() }()
@ -120,9 +120,9 @@ func (iterator IBioSequenceBatch) MakeISliceWorker(worker SeqSliceWorker, sizes
for iterator.Next() { for iterator.Next() {
batch := iterator.Get() batch := iterator.Get()
batch.slice = worker(batch.slice) batch.slice = worker(batch.slice)
new_iter.pointer.channel <- batch newIter.pointer.channel <- batch
} }
new_iter.Done() newIter.Done()
} }
log.Println("Start of the batch slice workers") log.Println("Start of the batch slice workers")
@ -130,5 +130,5 @@ func (iterator IBioSequenceBatch) MakeISliceWorker(worker SeqSliceWorker, sizes
go f(iterator.Split()) go f(iterator.Split())
} }
return new_iter return newIter
} }

View File

@ -19,7 +19,7 @@ func (taxonomy *Taxonomy) IFilterOnName(name string, strict bool) *ITaxonSet {
} }
func (iterator *ITaxonSet) IFilterOnName(name string, strict bool) *ITaxonSet { func (iterator *ITaxonSet) IFilterOnName(name string, strict bool) *ITaxonSet {
new_iterator := NewITaxonSet() newIterator := NewITaxonSet()
sentTaxa := make(map[int]bool) sentTaxa := make(map[int]bool)
if strict { if strict {
@ -29,11 +29,11 @@ func (iterator *ITaxonSet) IFilterOnName(name string, strict bool) *ITaxonSet {
if _, ok := sentTaxa[taxon.taxid]; !ok { if _, ok := sentTaxa[taxon.taxid]; !ok {
if taxon.IsNameEqual(name) { if taxon.IsNameEqual(name) {
sentTaxa[taxon.taxid] = true sentTaxa[taxon.taxid] = true
new_iterator.source <- taxon newIterator.source <- taxon
} }
} }
} }
close(new_iterator.source) close(newIterator.source)
}() }()
} else { } else {
pattern := regexp.MustCompile(name) pattern := regexp.MustCompile(name)
@ -44,13 +44,13 @@ func (iterator *ITaxonSet) IFilterOnName(name string, strict bool) *ITaxonSet {
if _, ok := sentTaxa[taxon.taxid]; !ok { if _, ok := sentTaxa[taxon.taxid]; !ok {
if taxon.IsNameMatching(pattern) { if taxon.IsNameMatching(pattern) {
sentTaxa[taxon.taxid] = true sentTaxa[taxon.taxid] = true
new_iterator.source <- taxon newIterator.source <- taxon
} }
} }
} }
close(new_iterator.source) close(newIterator.source)
}() }()
} }
return new_iterator return newIterator
} }

View File

@ -1,19 +1,19 @@
package obitax package obitax
func (iterator *ITaxonSet) IFilterOnTaxRank(rank string) *ITaxonSet { func (iterator *ITaxonSet) IFilterOnTaxRank(rank string) *ITaxonSet {
new_iter := NewITaxonSet() newIter := NewITaxonSet()
go func() { go func() {
for iterator.Next() { for iterator.Next() {
taxon := iterator.Get() taxon := iterator.Get()
if taxon.rank == rank { if taxon.rank == rank {
new_iter.source <- taxon newIter.source <- taxon
} }
} }
close(new_iter.source) close(newIter.source)
}() }()
return new_iter return newIter
} }
func (set *TaxonSet) IFilterOnTaxRank(rank string) *ITaxonSet { func (set *TaxonSet) IFilterOnTaxRank(rank string) *ITaxonSet {

View File

@ -3,19 +3,19 @@ package obitax
import "reflect" import "reflect"
func (iterator *ITaxonSet) IFilterOnSubcladeOf(taxon *TaxNode) *ITaxonSet { func (iterator *ITaxonSet) IFilterOnSubcladeOf(taxon *TaxNode) *ITaxonSet {
new_iter := NewITaxonSet() newIter := NewITaxonSet()
go func() { go func() {
for iterator.Next() { for iterator.Next() {
tx := iterator.Get() tx := iterator.Get()
if tx.IsSubCladeOf(taxon) { if tx.IsSubCladeOf(taxon) {
new_iter.source <- tx newIter.source <- tx
} }
} }
close(new_iter.source) close(newIter.source)
}() }()
return new_iter return newIter
} }
func (set *TaxonSet) IFilterOnSubcladeOf(taxon *TaxNode) *ITaxonSet { func (set *TaxonSet) IFilterOnSubcladeOf(taxon *TaxNode) *ITaxonSet {
@ -43,17 +43,17 @@ func (iterator *ITaxonSet) IFilterBelongingSubclades(clades *TaxonSet) *ITaxonSe
return iterator.IFilterOnSubcladeOf((*clades)[int(keys[0].Int())]) return iterator.IFilterOnSubcladeOf((*clades)[int(keys[0].Int())])
} }
new_iter := NewITaxonSet() newIter := NewITaxonSet()
go func() { go func() {
for iterator.Next() { for iterator.Next() {
tx := iterator.Get() tx := iterator.Get()
if tx.IsBelongingSubclades(clades) { if tx.IsBelongingSubclades(clades) {
new_iter.source <- tx newIter.source <- tx
} }
} }
close(new_iter.source) close(newIter.source)
}() }()
return new_iter return newIter
} }

View File

@ -74,8 +74,8 @@ func (iterator *ITaxonSet) Finished() bool {
} }
func (iterator *ITaxonSet) Split() *ITaxonSet { func (iterator *ITaxonSet) Split() *ITaxonSet {
new_iter := ITaxonSet{iterator.source, nil, false, iterator.p_finished} newIter := ITaxonSet{iterator.source, nil, false, iterator.p_finished}
return &new_iter return &newIter
} }
func (iterator *ITaxonSet) TaxonSet() *TaxonSet { func (iterator *ITaxonSet) TaxonSet() *TaxonSet {

View File

@ -6,32 +6,32 @@ import (
"github.com/DavidGamba/go-getoptions" "github.com/DavidGamba/go-getoptions"
) )
var __forward_files__ = make([]string, 0, 10) var _ForwardFiles = make([]string, 0, 10)
var __reverse_files__ = make([]string, 0, 10) var _ReverseFiles = make([]string, 0, 10)
var __delta__ = 5 var _Delta = 5
var __min_overlap__ = 20 var _MinOverlap = 20
var __gap_penality__ = 2 var _GapPenality = 2
var __without_stats__ = false var _WithoutStats = false
func PairingOptionSet(options *getoptions.GetOpt) { func PairingOptionSet(options *getoptions.GetOpt) {
options.StringSliceVar(&__forward_files__, "forward-reads", options.StringSliceVar(&_ForwardFiles, "forward-reads",
1, 1000, 1, 1000,
options.Alias("F"), options.Alias("F"),
options.Description("The file names containing the forward reads")) options.Description("The file names containing the forward reads"))
options.StringSliceVar(&__reverse_files__, "reverse-reads", options.StringSliceVar(&_ReverseFiles, "reverse-reads",
1, 1000, 1, 1000,
options.Alias("R"), options.Alias("R"),
options.Description("The file names containing the reverse reads")) options.Description("The file names containing the reverse reads"))
options.IntVar(&__delta__, "delta", 5, options.IntVar(&_Delta, "delta", 5,
options.Alias("D"), options.Alias("D"),
options.Description("Length added to the fast detected overlap for the precise alignement (default 5).")) options.Description("Length added to the fast detected overlap for the precise alignement (default 5)."))
options.IntVar(&__min_overlap__, "min-overlap", 20, options.IntVar(&_MinOverlap, "min-overlap", 20,
options.Alias("O"), options.Alias("O"),
options.Description("Minimum ovelap between both the reads to consider the aligment (default 20).")) options.Description("Minimum ovelap between both the reads to consider the aligment (default 20)."))
options.IntVar(&__gap_penality__, "gap-penality", 2, options.IntVar(&_GapPenality, "gap-penality", 2,
options.Alias("G"), options.Alias("G"),
options.Description("Gap penality expressed as the multiply factor applied to the mismatch score between two nucleotides with a quality of 40 (default 2).")) options.Description("Gap penality expressed as the multiply factor applied to the mismatch score between two nucleotides with a quality of 40 (default 2)."))
options.BoolVar(&__without_stats__, "without-stat", false, options.BoolVar(&_WithoutStats, "without-stat", false,
options.Alias("S"), options.Alias("S"),
options.Description("Remove alignment statistics from the produced consensus sequences.")) options.Description("Remove alignment statistics from the produced consensus sequences."))
} }
@ -42,12 +42,12 @@ func OptionSet(options *getoptions.GetOpt) {
} }
func IBatchPairedSequence() (obiseq.IPairedBioSequenceBatch, error) { func IBatchPairedSequence() (obiseq.IPairedBioSequenceBatch, error) {
forward, err := obiconvert.ReadBioSequencesBatch(__forward_files__...) forward, err := obiconvert.ReadBioSequencesBatch(_ForwardFiles...)
if err != nil { if err != nil {
return obiseq.NilIPairedBioSequenceBatch, err return obiseq.NilIPairedBioSequenceBatch, err
} }
reverse, err := obiconvert.ReadBioSequencesBatch(__reverse_files__...) reverse, err := obiconvert.ReadBioSequencesBatch(_ReverseFiles...)
if err != nil { if err != nil {
return obiseq.NilIPairedBioSequenceBatch, err return obiseq.NilIPairedBioSequenceBatch, err
} }
@ -58,17 +58,17 @@ func IBatchPairedSequence() (obiseq.IPairedBioSequenceBatch, error) {
} }
func Delta() int { func Delta() int {
return __delta__ return _Delta
} }
func MinOverlap() int { func MinOverlap() int {
return __min_overlap__ return _MinOverlap
} }
func GapPenality() int { func GapPenality() int {
return __gap_penality__ return _GapPenality
} }
func WithStats() bool { func WithStats() bool {
return !__without_stats__ return !_WithoutStats
} }

View File

@ -112,16 +112,16 @@ func IAssemblePESequencesBatch(iterator obiseq.IPairedBioSequenceBatch,
buffsize = sizes[1] buffsize = sizes[1]
} }
new_iter := obiseq.MakeIBioSequenceBatch(buffsize) newIter := obiseq.MakeIBioSequenceBatch(buffsize)
new_iter.Add(nworkers) newIter.Add(nworkers)
go func() { go func() {
new_iter.Wait() newIter.Wait()
for len(new_iter.Channel()) > 0 { for len(newIter.Channel()) > 0 {
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
} }
close(new_iter.Channel()) close(newIter.Channel())
log.Printf("End of the sequence Pairing") log.Printf("End of the sequence Pairing")
}() }()
@ -157,14 +157,14 @@ func IAssemblePESequencesBatch(iterator obiseq.IPairedBioSequenceBatch,
B.Destroy() B.Destroy()
} }
bar.Add(batch.Length() - processed) bar.Add(batch.Length() - processed)
new_iter.Channel() <- obiseq.MakeBioSequenceBatch( newIter.Channel() <- obiseq.MakeBioSequenceBatch(
batch.Order(), batch.Order(),
cons..., cons...,
) )
// log.Printf("\n==> %d Wait data to align\n", wid) // log.Printf("\n==> %d Wait data to align\n", wid)
// start = time.Now() // start = time.Now()
} }
new_iter.Done() newIter.Done()
} }
log.Printf("Start of the sequence Pairing") log.Printf("Start of the sequence Pairing")
@ -173,6 +173,6 @@ func IAssemblePESequencesBatch(iterator obiseq.IPairedBioSequenceBatch,
go f(iterator.Split(), i) go f(iterator.Split(), i)
} }
return new_iter return newIter
} }

View File

@ -8,34 +8,34 @@ import (
"github.com/DavidGamba/go-getoptions" "github.com/DavidGamba/go-getoptions"
) )
var __circular__ = false var _Circular = false
var __forward_primer__ string var _ForwardPrimer string
var __reverse_primer__ string var _ReversePrimer string
var __allowed_mismatch__ = 0 var _AllowedMismatch = 0
var __minimum_length__ = 0 var _MinimumLength = 0
var __maximum_length__ = -1 var _MaximumLength = -1
func PCROptionSet(options *getoptions.GetOpt) { func PCROptionSet(options *getoptions.GetOpt) {
options.BoolVar(&__circular__, "circular", false, options.BoolVar(&_Circular, "circular", false,
options.Alias("c"), options.Alias("c"),
options.Description("Considers that sequences are [c]ircular.")) options.Description("Considers that sequences are [c]ircular."))
options.StringVar(&__forward_primer__, "forward", "", options.StringVar(&_ForwardPrimer, "forward", "",
options.Required("You must provide a forward primer"), options.Required("You must provide a forward primer"),
options.Description("The forward primer used for the electronic PCR.")) options.Description("The forward primer used for the electronic PCR."))
options.StringVar(&__reverse_primer__, "reverse", "", options.StringVar(&_ReversePrimer, "reverse", "",
options.Required("You must provide a reverse primer"), options.Required("You must provide a reverse primer"),
options.Description("The reverse primer used for the electronic PCR.")) options.Description("The reverse primer used for the electronic PCR."))
options.IntVar(&__allowed_mismatch__, "allowed-mismatches", 0, options.IntVar(&_AllowedMismatch, "allowed-mismatches", 0,
options.Alias("e"), options.Alias("e"),
options.Description("Maximum number of mismatches allowed for each primer.")) options.Description("Maximum number of mismatches allowed for each primer."))
options.IntVar(&__minimum_length__, "min-length", 0, options.IntVar(&_MinimumLength, "min-length", 0,
options.Alias("l"), options.Alias("l"),
options.Description("Minimum length of the barcode (primers excluded).")) options.Description("Minimum length of the barcode (primers excluded)."))
options.IntVar(&__maximum_length__, "max-length", -1, options.IntVar(&_MaximumLength, "max-length", -1,
options.Alias("L"), options.Alias("L"),
options.Description("Maximum length of the barcode (primers excluded).")) options.Description("Maximum length of the barcode (primers excluded)."))
} }
@ -46,7 +46,7 @@ func OptionSet(options *getoptions.GetOpt) {
} }
func ForwardPrimer() string { func ForwardPrimer() string {
pattern, err := obiapat.MakeApatPattern(__forward_primer__, __allowed_mismatch__) pattern, err := obiapat.MakeApatPattern(_ForwardPrimer, _AllowedMismatch)
if err != nil { if err != nil {
log.Fatalf("%+v", err) log.Fatalf("%+v", err)
@ -54,11 +54,11 @@ func ForwardPrimer() string {
pattern.Free() pattern.Free()
return __forward_primer__ return _ForwardPrimer
} }
func ReversePrimer() string { func ReversePrimer() string {
pattern, err := obiapat.MakeApatPattern(__reverse_primer__, __allowed_mismatch__) pattern, err := obiapat.MakeApatPattern(_ReversePrimer, _AllowedMismatch)
if err != nil { if err != nil {
log.Fatalf("%+v", err) log.Fatalf("%+v", err)
@ -66,21 +66,21 @@ func ReversePrimer() string {
pattern.Free() pattern.Free()
return __reverse_primer__ return _ReversePrimer
} }
func AllowedMismatch() int { func AllowedMismatch() int {
return __allowed_mismatch__ return _AllowedMismatch
} }
func Circular() bool { func Circular() bool {
return __circular__ return _Circular
} }
func MinLength() int { func MinLength() int {
return __minimum_length__ return _MinimumLength
} }
func MaxLength() int { func MaxLength() int {
return __maximum_length__ return _MaximumLength
} }