Files
obitools4/pkg/obiformats/embl_read.go

288 lines
7.0 KiB
Go
Raw Normal View History

2022-01-13 23:27:39 +01:00
package obiformats
import (
"bufio"
"bytes"
"io"
"os"
"strconv"
"strings"
gzip "github.com/klauspost/pgzip"
log "github.com/sirupsen/logrus"
"git.metabarcoding.org/lecasofts/go/obitools/pkg/obiiter"
2022-01-13 23:43:01 +01:00
"git.metabarcoding.org/lecasofts/go/obitools/pkg/obiseq"
2022-01-13 23:27:39 +01:00
)
var _FileChunkSize = 1 << 26
2022-01-13 23:27:39 +01:00
2022-01-14 17:32:12 +01:00
type _FileChunk struct {
2022-01-13 23:27:39 +01:00
raw io.Reader
order int
}
2023-02-16 13:30:42 +01:00
// _EndOfLastEntry finds the index of the last entry in the given byte slice 'buff'
// using a pattern match of the form:
// <CR>?<LF>//<CR>?<LF>
// where <CR> and <LF> are the ASCII codes for carriage return and line feed,
// respectively. The function returns the index of the end of the last entry
// or -1 if no match is found.
//
// Arguments:
// buff []byte - a byte slice to search for the end of the last entry
//
// Returns:
// int - the index of the end of the last entry or -1 if no match is found.
2022-01-14 17:32:12 +01:00
func _EndOfLastEntry(buff []byte) int {
2022-01-13 23:27:39 +01:00
// 6 5 43 2 1
// <CR>?<LF>//<CR>?<LF>
var i int
var state = 0
var start = 0
for i = len(buff) - 1; i >= 0 && state < 5; i-- {
switch state {
case 0: // outside of the pattern
if buff[i] == '\n' {
state = 1
}
case 1: // a \n have been matched
start = i + 2
switch buff[i] {
case '\r':
state = 2
case '/':
state = 3
case '\n':
state = 1
default:
state = 0
}
case 2: // a \r have been matched
switch buff[i] {
case '/':
state = 3
case '\n':
state = 1
default:
state = 0
}
case 3: // the first / have been matched
switch buff[i] {
case '/':
state = 4
case '\n':
state = 1
default:
state = 0
}
case 4: // the second / have been matched
switch buff[i] {
case '\n':
state = 5
default:
state = 0
}
}
}
if i > 0 {
return start
}
2022-01-14 17:32:12 +01:00
return -1
2022-01-13 23:27:39 +01:00
}
2023-01-22 22:04:17 +01:00
func _ParseEmblFile(input <-chan _FileChunk, out obiiter.IBioSequence) {
2022-01-13 23:27:39 +01:00
for chunks := range input {
scanner := bufio.NewScanner(chunks.raw)
order := chunks.order
sequences := make(obiseq.BioSequenceSlice, 0, 100)
id := ""
2022-01-14 17:32:12 +01:00
scientificName := ""
defBytes := new(bytes.Buffer)
featBytes := new(bytes.Buffer)
seqBytes := new(bytes.Buffer)
2022-01-13 23:27:39 +01:00
taxid := 1
for scanner.Scan() {
line := scanner.Text()
switch {
case strings.HasPrefix(line, "ID "):
id = strings.SplitN(line[5:], ";", 2)[0]
case strings.HasPrefix(line, "OS "):
2022-01-14 17:32:12 +01:00
scientificName = strings.TrimSpace(line[5:])
2022-01-13 23:27:39 +01:00
case strings.HasPrefix(line, "DE "):
2022-01-14 17:32:12 +01:00
if defBytes.Len() > 0 {
defBytes.WriteByte(' ')
2022-01-13 23:27:39 +01:00
}
2022-01-14 17:32:12 +01:00
defBytes.WriteString(strings.TrimSpace(line[5:]))
2022-01-13 23:27:39 +01:00
case strings.HasPrefix(line, "FH "):
2022-01-14 17:32:12 +01:00
featBytes.WriteString(line)
2022-01-13 23:27:39 +01:00
case line == "FH":
2022-01-14 17:32:12 +01:00
featBytes.WriteByte('\n')
featBytes.WriteString(line)
2022-01-13 23:27:39 +01:00
case strings.HasPrefix(line, "FT "):
2022-01-14 17:32:12 +01:00
featBytes.WriteByte('\n')
featBytes.WriteString(line)
2022-01-13 23:27:39 +01:00
if strings.HasPrefix(line, `FT /db_xref="taxon:`) {
taxid, _ = strconv.Atoi(strings.SplitN(line[37:], `"`, 2)[0])
}
case strings.HasPrefix(line, " "):
parts := strings.SplitN(line[5:], " ", 7)
for i := 0; i < 6; i++ {
2022-01-14 17:32:12 +01:00
seqBytes.WriteString(parts[i])
2022-01-13 23:27:39 +01:00
}
case line == "//":
sequence := obiseq.NewBioSequence(id,
bytes.ToLower(seqBytes.Bytes()),
2022-01-14 17:32:12 +01:00
defBytes.String())
2022-01-13 23:27:39 +01:00
2022-01-16 00:21:42 +01:00
sequence.SetFeatures(featBytes.Bytes())
2022-01-13 23:27:39 +01:00
annot := sequence.Annotations()
2022-01-14 17:32:12 +01:00
annot["scientific_name"] = scientificName
2022-01-13 23:27:39 +01:00
annot["taxid"] = taxid
// log.Println(FormatFasta(sequence, FormatFastSeqJsonHeader))
sequences = append(sequences, sequence)
2022-01-14 17:32:12 +01:00
defBytes = new(bytes.Buffer)
featBytes = new(bytes.Buffer)
seqBytes = new(bytes.Buffer)
2022-01-13 23:27:39 +01:00
}
}
out.Push(obiiter.MakeBioSequenceBatch(order, sequences))
2022-01-13 23:27:39 +01:00
}
out.Done()
}
2023-02-16 13:30:42 +01:00
// _ReadFlatFileChunk reads a chunk of data from the given 'reader' and sends it to the
// 'readers' channel as a _FileChunk struct. The function reads from the reader until
// the end of the last entry is found, then sends the chunk to the channel. If the end
// of the last entry is not found in the current chunk, the function reads from the reader
// in 1 MB increments until the end of the last entry is found. The function repeats this
// process until the end of the file is reached.
//
// Arguments:
// reader io.Reader - an io.Reader to read data from
// readers chan _FileChunk - a channel to send the data as a _FileChunk struct
//
// Returns:
// None
2022-01-14 17:32:12 +01:00
func _ReadFlatFileChunk(reader io.Reader, readers chan _FileChunk) {
2022-01-13 23:27:39 +01:00
var err error
var buff []byte
size := 0
l := 0
i := 0
2023-02-16 13:30:42 +01:00
// Initialize the buffer to the size of a chunk of data
buff = make([]byte, _FileChunkSize)
2023-02-16 13:30:42 +01:00
// Read from the reader until the end of the last entry is found or the end of the file is reached
2022-01-13 23:27:39 +01:00
for err == nil {
2023-02-16 13:30:42 +01:00
// Read from the reader until the buffer is full or the end of the file is reached
2022-01-13 23:27:39 +01:00
for ; err == nil && l < len(buff); l += size {
size, err = reader.Read(buff[l:])
}
2023-02-16 13:30:42 +01:00
// Create an extended buffer to read from if the end of the last entry is not found in the current buffer
extbuff := make([]byte, 1<<20)
2022-01-13 23:27:39 +01:00
buff = buff[:l]
end := 0
ic := 0
2023-02-16 13:30:42 +01:00
// Read from the reader in 1 MB increments until the end of the last entry is found
for end = _EndOfLastEntry(buff); err == nil && end < 0; end = _EndOfLastEntry(extbuff[:size]) {
ic++
size, err = reader.Read(extbuff)
buff = append(buff, extbuff[:size]...)
}
2023-02-16 13:30:42 +01:00
end = _EndOfLastEntry(buff)
// If an extension was read, log the size and number of extensions
if len(buff) > 0 {
remains := buff[end:]
buff = buff[:end]
2023-02-16 13:30:42 +01:00
// Send the chunk of data as a _FileChunk struct to the readers channel
io := bytes.NewBuffer(buff)
log.Debugf("Flat File chunck : final buff size %d bytes (%d) (%d extensions) -> end = %d\n",
len(buff),
io.Cap(),
ic,
end,
)
2023-02-16 13:30:42 +01:00
readers <- _FileChunk{io, i}
i++
2023-02-16 13:30:42 +01:00
// Set the buffer to the size of a chunk of data and copy any remaining data to the new buffer
buff = make([]byte, _FileChunkSize)
copy(buff, remains)
l = len(remains)
}
2022-01-13 23:27:39 +01:00
}
2023-02-16 13:30:42 +01:00
// Close the readers channel when the end of the file is reached
2022-01-13 23:27:39 +01:00
close(readers)
2023-02-16 13:30:42 +01:00
2022-01-13 23:27:39 +01:00
}
// 6 5 43 2 1
//
2022-01-13 23:27:39 +01:00
// <CR>?<LF>//<CR>?<LF>
2023-01-22 22:04:17 +01:00
func ReadEMBL(reader io.Reader, options ...WithOption) obiiter.IBioSequence {
2022-01-13 23:27:39 +01:00
opt := MakeOptions(options)
entry_channel := make(chan _FileChunk)
2022-01-13 23:27:39 +01:00
newIter := obiiter.MakeIBioSequence()
2022-01-13 23:27:39 +01:00
nworkers := opt.ParallelWorkers()
newIter.Add(nworkers)
2022-01-13 23:27:39 +01:00
go func() {
newIter.WaitAndClose()
2022-01-13 23:27:39 +01:00
}()
// for j := 0; j < opt.ParallelWorkers(); j++ {
for j := 0; j < nworkers; j++ {
2022-01-14 17:32:12 +01:00
go _ParseEmblFile(entry_channel, newIter)
2022-01-13 23:27:39 +01:00
}
2022-01-14 17:32:12 +01:00
go _ReadFlatFileChunk(reader, entry_channel)
2022-01-13 23:27:39 +01:00
2022-01-14 17:32:12 +01:00
return newIter
2022-01-13 23:27:39 +01:00
}
2023-01-22 22:04:17 +01:00
func ReadEMBLFromFile(filename string, options ...WithOption) (obiiter.IBioSequence, error) {
2022-01-13 23:27:39 +01:00
var reader io.Reader
var greader io.Reader
var err error
reader, err = os.Open(filename)
if err != nil {
log.Printf("open file error: %+v", err)
2023-01-22 22:04:17 +01:00
return obiiter.NilIBioSequence, err
2022-01-13 23:27:39 +01:00
}
// Test if the flux is compressed by gzip
//greader, err = gzip.NewReader(reader)
greader, err = gzip.NewReaderN(reader, 1<<24, 2)
2022-01-13 23:27:39 +01:00
if err == nil {
reader = greader
}
2022-11-16 17:13:03 +01:00
return ReadEMBL(reader, options...), nil
2022-01-13 23:27:39 +01:00
}