2022-08-23 11:04:57 +02:00
|
|
|
package obiformats
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"bufio"
|
|
|
|
|
"bytes"
|
|
|
|
|
"io"
|
2023-03-27 19:51:10 +07:00
|
|
|
"path"
|
2023-03-28 22:42:58 +07:00
|
|
|
"regexp"
|
2022-08-23 11:04:57 +02:00
|
|
|
"strconv"
|
|
|
|
|
"strings"
|
|
|
|
|
|
|
|
|
|
log "github.com/sirupsen/logrus"
|
|
|
|
|
|
2023-11-29 12:14:37 +01:00
|
|
|
"git.metabarcoding.org/obitools/obitools4/obitools4/pkg/obiiter"
|
|
|
|
|
"git.metabarcoding.org/obitools/obitools4/obitools4/pkg/obiseq"
|
|
|
|
|
"git.metabarcoding.org/obitools/obitools4/obitools4/pkg/obiutils"
|
2022-08-23 11:04:57 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
type gbstate int
|
|
|
|
|
|
|
|
|
|
const (
|
|
|
|
|
inHeader gbstate = 0
|
2022-11-16 17:13:03 +01:00
|
|
|
inEntry gbstate = 1
|
|
|
|
|
inDefinition gbstate = 2
|
|
|
|
|
inFeature gbstate = 3
|
|
|
|
|
inSequence gbstate = 4
|
2024-02-27 07:28:56 +01:00
|
|
|
inContig gbstate = 5
|
2022-08-23 11:04:57 +02:00
|
|
|
)
|
|
|
|
|
|
2023-03-28 22:42:58 +07:00
|
|
|
var _seqlenght_rx = regexp.MustCompile(" +([0-9]+) bp")
|
|
|
|
|
|
2026-03-10 15:35:23 +01:00
|
|
|
// extractSequence scans the ORIGIN section byte-by-byte directly on the rope,
|
|
|
|
|
// appending compacted bases to dest. Returns the extended slice.
|
|
|
|
|
// Stops and returns when "//" is found at the start of a line.
|
|
|
|
|
// The scanner is left positioned after the "//" line.
|
2026-03-10 16:46:53 +01:00
|
|
|
func (s *ropeScanner) extractSequence(dest []byte, UtoT bool) []byte {
|
2026-03-10 15:35:23 +01:00
|
|
|
lineStart := true
|
|
|
|
|
skipDigits := true
|
|
|
|
|
|
|
|
|
|
for s.current != nil {
|
|
|
|
|
data := s.current.data[s.pos:]
|
|
|
|
|
for i, b := range data {
|
|
|
|
|
if lineStart {
|
|
|
|
|
if b == '/' {
|
|
|
|
|
// End-of-record marker "//"
|
|
|
|
|
s.pos += i + 1
|
|
|
|
|
if s.pos >= len(s.current.data) {
|
|
|
|
|
s.current = s.current.Next()
|
|
|
|
|
s.pos = 0
|
|
|
|
|
}
|
|
|
|
|
s.skipToNewline()
|
|
|
|
|
return dest
|
|
|
|
|
}
|
|
|
|
|
lineStart = false
|
|
|
|
|
skipDigits = true
|
|
|
|
|
}
|
|
|
|
|
switch {
|
|
|
|
|
case b == '\n':
|
|
|
|
|
lineStart = true
|
|
|
|
|
case b == '\r':
|
|
|
|
|
// skip
|
|
|
|
|
case skipDigits:
|
|
|
|
|
if b != ' ' && (b < '0' || b > '9') {
|
|
|
|
|
skipDigits = false
|
|
|
|
|
if UtoT && b == 'u' {
|
|
|
|
|
b = 't'
|
|
|
|
|
}
|
|
|
|
|
dest = append(dest, b)
|
|
|
|
|
}
|
|
|
|
|
case b != ' ':
|
|
|
|
|
if UtoT && b == 'u' {
|
|
|
|
|
b = 't'
|
|
|
|
|
}
|
|
|
|
|
dest = append(dest, b)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
s.current = s.current.Next()
|
|
|
|
|
s.pos = 0
|
|
|
|
|
}
|
|
|
|
|
return dest
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// parseLseqFromLocus extracts the declared sequence length from a LOCUS line.
|
|
|
|
|
// Format: "LOCUS <id> <length> bp ..."
|
|
|
|
|
// Returns -1 if not found or parse error.
|
|
|
|
|
func parseLseqFromLocus(line []byte) int {
|
|
|
|
|
if len(line) < 13 {
|
|
|
|
|
return -1
|
|
|
|
|
}
|
|
|
|
|
i := 12
|
|
|
|
|
for i < len(line) && line[i] != ' ' {
|
|
|
|
|
i++
|
|
|
|
|
}
|
|
|
|
|
for i < len(line) && line[i] == ' ' {
|
|
|
|
|
i++
|
|
|
|
|
}
|
|
|
|
|
start := i
|
|
|
|
|
for i < len(line) && line[i] >= '0' && line[i] <= '9' {
|
|
|
|
|
i++
|
|
|
|
|
}
|
|
|
|
|
if i == start {
|
|
|
|
|
return -1
|
|
|
|
|
}
|
|
|
|
|
n, err := strconv.Atoi(string(line[start:i]))
|
|
|
|
|
if err != nil {
|
|
|
|
|
return -1
|
|
|
|
|
}
|
|
|
|
|
return n
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Prefix constants for GenBank section headers (byte slices for zero-alloc comparison).
|
|
|
|
|
var (
|
|
|
|
|
gbPfxLocus = []byte("LOCUS ")
|
|
|
|
|
gbPfxDefinition = []byte("DEFINITION ")
|
|
|
|
|
gbPfxContinue = []byte(" ")
|
|
|
|
|
gbPfxSource = []byte("SOURCE ")
|
|
|
|
|
gbPfxFeatures = []byte("FEATURES ")
|
|
|
|
|
gbPfxOrigin = []byte("ORIGIN")
|
|
|
|
|
gbPfxContig = []byte("CONTIG")
|
|
|
|
|
gbPfxEnd = []byte("//")
|
|
|
|
|
gbPfxDbXref = []byte(` /db_xref="taxon:`)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
// GenbankChunkParserRope parses a GenBank FileChunk directly from the rope
|
|
|
|
|
// (PieceOfChunk linked list) without calling Pack(). This eliminates the large
|
|
|
|
|
// contiguous allocation required for chromosomal-scale sequences.
|
|
|
|
|
func GenbankChunkParserRope(source string, rope *PieceOfChunk,
|
|
|
|
|
withFeatureTable, UtoT bool) (obiseq.BioSequenceSlice, error) {
|
|
|
|
|
|
|
|
|
|
state := inHeader
|
2026-03-10 16:46:53 +01:00
|
|
|
scanner := newRopeScanner(rope)
|
2026-03-10 15:35:23 +01:00
|
|
|
sequences := obiseq.MakeBioSequenceSlice(100)[:0]
|
|
|
|
|
|
|
|
|
|
id := ""
|
|
|
|
|
lseq := -1
|
|
|
|
|
scientificName := ""
|
|
|
|
|
defBytes := new(bytes.Buffer)
|
|
|
|
|
featBytes := new(bytes.Buffer)
|
|
|
|
|
var seqDest []byte
|
|
|
|
|
taxid := 1
|
|
|
|
|
nl := 0
|
|
|
|
|
|
|
|
|
|
for bline := scanner.ReadLine(); bline != nil; bline = scanner.ReadLine() {
|
|
|
|
|
nl++
|
|
|
|
|
processed := false
|
|
|
|
|
for !processed {
|
|
|
|
|
switch {
|
|
|
|
|
|
|
|
|
|
case bytes.HasPrefix(bline, gbPfxLocus):
|
|
|
|
|
if state != inHeader {
|
|
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading LOCUS: %s", nl, state, bline)
|
|
|
|
|
}
|
|
|
|
|
rest := bline[12:]
|
|
|
|
|
sp := bytes.IndexByte(rest, ' ')
|
|
|
|
|
if sp < 0 {
|
|
|
|
|
id = string(rest)
|
|
|
|
|
} else {
|
|
|
|
|
id = string(rest[:sp])
|
|
|
|
|
}
|
|
|
|
|
lseq = parseLseqFromLocus(bline)
|
|
|
|
|
cap0 := lseq + 20
|
|
|
|
|
if cap0 < 1024 {
|
|
|
|
|
cap0 = 1024
|
|
|
|
|
}
|
|
|
|
|
seqDest = make([]byte, 0, cap0)
|
|
|
|
|
state = inEntry
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case bytes.HasPrefix(bline, gbPfxDefinition):
|
|
|
|
|
if state != inEntry {
|
|
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading DEFINITION: %s", nl, state, bline)
|
|
|
|
|
}
|
|
|
|
|
defBytes.Write(bytes.TrimSpace(bline[12:]))
|
|
|
|
|
state = inDefinition
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case state == inDefinition:
|
|
|
|
|
if bytes.HasPrefix(bline, gbPfxContinue) {
|
|
|
|
|
defBytes.WriteByte(' ')
|
|
|
|
|
defBytes.Write(bytes.TrimSpace(bline[12:]))
|
|
|
|
|
processed = true
|
|
|
|
|
} else {
|
|
|
|
|
state = inEntry
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case bytes.HasPrefix(bline, gbPfxSource):
|
|
|
|
|
if state != inEntry {
|
|
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading SOURCE: %s", nl, state, bline)
|
|
|
|
|
}
|
|
|
|
|
scientificName = string(bytes.TrimSpace(bline[12:]))
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case bytes.HasPrefix(bline, gbPfxFeatures):
|
|
|
|
|
if state != inEntry {
|
|
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading FEATURES: %s", nl, state, bline)
|
|
|
|
|
}
|
|
|
|
|
if withFeatureTable {
|
|
|
|
|
featBytes.Write(bline)
|
|
|
|
|
}
|
|
|
|
|
state = inFeature
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case bytes.HasPrefix(bline, gbPfxOrigin):
|
|
|
|
|
if state != inFeature && state != inContig {
|
|
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading ORIGIN: %s", nl, state, bline)
|
|
|
|
|
}
|
|
|
|
|
// Use fast byte-scan to extract sequence and consume through "//"
|
|
|
|
|
seqDest = scanner.extractSequence(seqDest, UtoT)
|
|
|
|
|
// Emit record
|
|
|
|
|
if id == "" {
|
|
|
|
|
log.Warn("Empty id when parsing genbank file")
|
|
|
|
|
}
|
2026-03-10 15:51:28 +01:00
|
|
|
sequence := obiseq.NewBioSequenceOwning(id, seqDest, defBytes.String())
|
2026-03-10 15:35:23 +01:00
|
|
|
sequence.SetSource(source)
|
|
|
|
|
if withFeatureTable {
|
|
|
|
|
sequence.SetFeatures(featBytes.Bytes())
|
|
|
|
|
}
|
|
|
|
|
annot := sequence.Annotations()
|
|
|
|
|
annot["scientific_name"] = scientificName
|
|
|
|
|
annot["taxid"] = taxid
|
|
|
|
|
sequences = append(sequences, sequence)
|
|
|
|
|
|
|
|
|
|
defBytes = bytes.NewBuffer(obiseq.GetSlice(200))
|
|
|
|
|
featBytes = new(bytes.Buffer)
|
|
|
|
|
nl = 0
|
|
|
|
|
taxid = 1
|
|
|
|
|
seqDest = nil
|
|
|
|
|
state = inHeader
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case bytes.HasPrefix(bline, gbPfxContig):
|
|
|
|
|
if state != inFeature && state != inContig {
|
|
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading CONTIG: %s", nl, state, bline)
|
|
|
|
|
}
|
|
|
|
|
state = inContig
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case bytes.Equal(bline, gbPfxEnd):
|
|
|
|
|
// Reached for CONTIG records (no ORIGIN section)
|
|
|
|
|
if state != inContig {
|
|
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading end of record %s", nl, state, id)
|
|
|
|
|
}
|
|
|
|
|
if id == "" {
|
|
|
|
|
log.Warn("Empty id when parsing genbank file")
|
|
|
|
|
}
|
2026-03-10 15:51:28 +01:00
|
|
|
sequence := obiseq.NewBioSequenceOwning(id, seqDest, defBytes.String())
|
2026-03-10 15:35:23 +01:00
|
|
|
sequence.SetSource(source)
|
|
|
|
|
if withFeatureTable {
|
|
|
|
|
sequence.SetFeatures(featBytes.Bytes())
|
|
|
|
|
}
|
|
|
|
|
annot := sequence.Annotations()
|
|
|
|
|
annot["scientific_name"] = scientificName
|
|
|
|
|
annot["taxid"] = taxid
|
|
|
|
|
sequences = append(sequences, sequence)
|
|
|
|
|
|
|
|
|
|
defBytes = bytes.NewBuffer(obiseq.GetSlice(200))
|
|
|
|
|
featBytes = new(bytes.Buffer)
|
|
|
|
|
nl = 0
|
|
|
|
|
taxid = 1
|
|
|
|
|
seqDest = nil
|
|
|
|
|
state = inHeader
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
switch state {
|
|
|
|
|
case inFeature:
|
|
|
|
|
if withFeatureTable {
|
|
|
|
|
featBytes.WriteByte('\n')
|
|
|
|
|
featBytes.Write(bline)
|
|
|
|
|
}
|
|
|
|
|
if bytes.HasPrefix(bline, gbPfxDbXref) {
|
|
|
|
|
rest := bline[len(gbPfxDbXref):]
|
|
|
|
|
q := bytes.IndexByte(rest, '"')
|
|
|
|
|
if q >= 0 {
|
|
|
|
|
taxid, _ = strconv.Atoi(string(rest[:q]))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
processed = true
|
|
|
|
|
case inHeader, inEntry, inContig:
|
|
|
|
|
processed = true
|
|
|
|
|
default:
|
|
|
|
|
log.Fatalf("Unexpected state %d while reading: %s", state, bline)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return sequences, nil
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-07 15:29:07 +02:00
|
|
|
func GenbankChunkParser(withFeatureTable, UtoT bool) func(string, io.Reader) (obiseq.BioSequenceSlice, error) {
|
2024-08-02 12:35:46 +02:00
|
|
|
return func(source string, input io.Reader) (obiseq.BioSequenceSlice, error) {
|
|
|
|
|
state := inHeader
|
|
|
|
|
scanner := bufio.NewReader(input)
|
|
|
|
|
sequences := obiseq.MakeBioSequenceSlice(100)[:0]
|
2022-08-23 11:04:57 +02:00
|
|
|
id := ""
|
2023-03-28 22:42:58 +07:00
|
|
|
lseq := -1
|
2022-08-23 11:04:57 +02:00
|
|
|
scientificName := ""
|
|
|
|
|
defBytes := new(bytes.Buffer)
|
|
|
|
|
featBytes := new(bytes.Buffer)
|
|
|
|
|
seqBytes := new(bytes.Buffer)
|
|
|
|
|
taxid := 1
|
2023-02-17 10:54:03 +01:00
|
|
|
nl := 0
|
|
|
|
|
sl := 0
|
2024-02-16 15:20:37 +01:00
|
|
|
var line string
|
|
|
|
|
for bline, is_prefix, err := scanner.ReadLine(); err != io.EOF; bline, is_prefix, err = scanner.ReadLine() {
|
2023-02-17 10:54:03 +01:00
|
|
|
nl++
|
2024-02-16 15:20:37 +01:00
|
|
|
line = string(bline)
|
|
|
|
|
if is_prefix || len(line) > 100 {
|
2024-08-02 12:35:46 +02:00
|
|
|
log.Fatalf("From %s:Line too long: %s", source, line)
|
2024-02-16 15:20:37 +01:00
|
|
|
}
|
|
|
|
|
processed := false
|
|
|
|
|
for !processed {
|
|
|
|
|
switch {
|
|
|
|
|
|
|
|
|
|
case strings.HasPrefix(line, "LOCUS "):
|
|
|
|
|
if state != inHeader {
|
2025-06-17 08:52:45 +02:00
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading LOCUS: %s", nl, state, line)
|
2024-02-16 15:20:37 +01:00
|
|
|
}
|
|
|
|
|
id = strings.SplitN(line[12:], " ", 2)[0]
|
|
|
|
|
match_length := _seqlenght_rx.FindStringSubmatch(line)
|
|
|
|
|
if len(match_length) > 0 {
|
|
|
|
|
lseq, err = strconv.Atoi(match_length[1])
|
|
|
|
|
if err != nil {
|
|
|
|
|
lseq = -1
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if lseq > 0 {
|
|
|
|
|
seqBytes = bytes.NewBuffer(obiseq.GetSlice(lseq + 20))
|
|
|
|
|
} else {
|
|
|
|
|
seqBytes = new(bytes.Buffer)
|
|
|
|
|
}
|
|
|
|
|
state = inEntry
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case strings.HasPrefix(line, "DEFINITION "):
|
|
|
|
|
if state != inEntry {
|
2025-06-17 08:52:45 +02:00
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading DEFINITION: %s", nl, state, line)
|
2024-02-16 15:20:37 +01:00
|
|
|
}
|
|
|
|
|
defBytes.WriteString(strings.TrimSpace(line[12:]))
|
|
|
|
|
state = inDefinition
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case state == inDefinition:
|
|
|
|
|
if strings.HasPrefix(line, " ") {
|
|
|
|
|
defBytes.WriteByte(' ')
|
|
|
|
|
defBytes.WriteString(strings.TrimSpace(line[12:]))
|
|
|
|
|
processed = true
|
|
|
|
|
} else {
|
|
|
|
|
state = inEntry
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case strings.HasPrefix(line, "SOURCE "):
|
|
|
|
|
if state != inEntry {
|
2025-06-17 08:52:45 +02:00
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading SOURCE: %s", nl, state, line)
|
2024-02-16 15:20:37 +01:00
|
|
|
}
|
|
|
|
|
scientificName = strings.TrimSpace(line[12:])
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case strings.HasPrefix(line, "FEATURES "):
|
|
|
|
|
if state != inEntry {
|
2025-06-17 08:52:45 +02:00
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading FEATURES: %s", nl, state, line)
|
2023-09-01 09:30:12 +02:00
|
|
|
}
|
2022-08-23 11:04:57 +02:00
|
|
|
featBytes.WriteString(line)
|
2024-02-16 15:20:37 +01:00
|
|
|
state = inFeature
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case strings.HasPrefix(line, "ORIGIN"):
|
2025-06-17 08:52:45 +02:00
|
|
|
if state != inFeature && state != inContig {
|
|
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading ORIGIN: %s", nl, state, line)
|
2024-02-16 15:20:37 +01:00
|
|
|
}
|
|
|
|
|
state = inSequence
|
|
|
|
|
processed = true
|
|
|
|
|
|
2024-02-27 07:28:56 +01:00
|
|
|
case strings.HasPrefix(line, "CONTIG"):
|
|
|
|
|
if state != inFeature && state != inContig {
|
2025-06-17 08:52:45 +02:00
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading ORIGIN: %s", nl, state, line)
|
2024-02-27 07:28:56 +01:00
|
|
|
}
|
|
|
|
|
state = inContig
|
|
|
|
|
processed = true
|
|
|
|
|
|
2024-02-16 15:20:37 +01:00
|
|
|
case line == "//":
|
|
|
|
|
|
2024-02-27 07:28:56 +01:00
|
|
|
if state != inSequence && state != inContig {
|
2025-06-17 08:52:45 +02:00
|
|
|
log.Fatalf("Line %d - Unexpected state %d while reading end of record %s", nl, state, id)
|
2024-02-16 15:20:37 +01:00
|
|
|
}
|
|
|
|
|
if id == "" {
|
|
|
|
|
log.Warn("Empty id when parsing genbank file")
|
2022-08-23 11:04:57 +02:00
|
|
|
}
|
2024-02-16 15:20:37 +01:00
|
|
|
|
|
|
|
|
sequence := obiseq.NewBioSequence(id,
|
|
|
|
|
seqBytes.Bytes(),
|
|
|
|
|
defBytes.String())
|
|
|
|
|
sequence.SetSource(source)
|
2024-02-20 13:23:07 +01:00
|
|
|
|
|
|
|
|
if withFeatureTable {
|
|
|
|
|
sequence.SetFeatures(featBytes.Bytes())
|
|
|
|
|
}
|
2024-02-16 15:20:37 +01:00
|
|
|
|
|
|
|
|
annot := sequence.Annotations()
|
|
|
|
|
annot["scientific_name"] = scientificName
|
|
|
|
|
annot["taxid"] = taxid
|
|
|
|
|
|
|
|
|
|
sequences = append(sequences, sequence)
|
|
|
|
|
|
|
|
|
|
defBytes = bytes.NewBuffer(obiseq.GetSlice(200))
|
|
|
|
|
featBytes = new(bytes.Buffer)
|
|
|
|
|
nl = 0
|
|
|
|
|
sl = 0
|
|
|
|
|
|
|
|
|
|
state = inHeader
|
|
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
case state == inSequence:
|
2023-02-17 10:54:03 +01:00
|
|
|
sl++
|
2026-02-20 10:39:14 +01:00
|
|
|
cleanline := strings.TrimSpace(line)
|
|
|
|
|
parts := strings.SplitN(cleanline, " ", 7)
|
2022-08-23 11:04:57 +02:00
|
|
|
lparts := len(parts)
|
2026-02-20 10:39:14 +01:00
|
|
|
for i := 1; i < lparts; i++ {
|
2025-07-07 15:29:07 +02:00
|
|
|
if UtoT {
|
|
|
|
|
parts[i] = strings.ReplaceAll(parts[i], "u", "t")
|
|
|
|
|
}
|
2022-08-23 11:04:57 +02:00
|
|
|
seqBytes.WriteString(parts[i])
|
|
|
|
|
}
|
2024-02-16 15:20:37 +01:00
|
|
|
processed = true
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
switch state {
|
|
|
|
|
case inFeature:
|
2024-02-20 13:23:07 +01:00
|
|
|
if withFeatureTable {
|
|
|
|
|
featBytes.WriteByte('\n')
|
|
|
|
|
featBytes.WriteString(line)
|
|
|
|
|
}
|
2024-02-16 15:20:37 +01:00
|
|
|
if strings.HasPrefix(line, ` /db_xref="taxon:`) {
|
|
|
|
|
taxid, _ = strconv.Atoi(strings.SplitN(line[37:], `"`, 2)[0])
|
|
|
|
|
}
|
|
|
|
|
processed = true
|
|
|
|
|
case inHeader:
|
|
|
|
|
processed = true
|
|
|
|
|
case inEntry:
|
|
|
|
|
processed = true
|
2024-03-11 10:53:25 +01:00
|
|
|
case inContig:
|
|
|
|
|
processed = true
|
|
|
|
|
default:
|
|
|
|
|
log.Fatalf("Unexpected state %d while reading: %s", state, line)
|
2024-02-16 15:20:37 +01:00
|
|
|
}
|
2022-08-23 11:04:57 +02:00
|
|
|
}
|
|
|
|
|
}
|
2023-02-17 10:54:03 +01:00
|
|
|
|
2022-08-23 11:04:57 +02:00
|
|
|
}
|
2024-02-16 15:20:37 +01:00
|
|
|
|
2026-03-10 15:35:23 +01:00
|
|
|
_ = sl
|
2024-08-02 12:35:46 +02:00
|
|
|
return sequences, nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-11-29 18:15:03 +01:00
|
|
|
func _ParseGenbankFile(input ChannelFileChunk,
|
2024-08-02 12:35:46 +02:00
|
|
|
out obiiter.IBioSequence,
|
2025-07-07 15:29:07 +02:00
|
|
|
withFeatureTable, UtoT bool) {
|
2024-08-02 12:35:46 +02:00
|
|
|
|
|
|
|
|
for chunks := range input {
|
2026-03-10 15:35:23 +01:00
|
|
|
var sequences obiseq.BioSequenceSlice
|
|
|
|
|
var err error
|
|
|
|
|
|
|
|
|
|
if chunks.Rope != nil {
|
|
|
|
|
sequences, err = GenbankChunkParserRope(chunks.Source, chunks.Rope, withFeatureTable, UtoT)
|
|
|
|
|
} else {
|
|
|
|
|
parser := GenbankChunkParser(withFeatureTable, UtoT)
|
|
|
|
|
sequences, err = parser(chunks.Source, chunks.Raw)
|
|
|
|
|
}
|
2024-08-02 12:35:46 +02:00
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
log.Fatalf("File %s : Cannot parse the genbank file : %v", chunks.Source, err)
|
2023-03-28 19:37:05 +07:00
|
|
|
}
|
2024-08-02 12:35:46 +02:00
|
|
|
|
|
|
|
|
out.Push(obiiter.MakeBioSequenceBatch(chunks.Source, chunks.Order, sequences))
|
2023-09-01 09:30:12 +02:00
|
|
|
}
|
2022-08-23 11:04:57 +02:00
|
|
|
|
2024-06-26 18:39:42 +02:00
|
|
|
log.Debug("End of the Genbank thread")
|
2022-08-23 11:04:57 +02:00
|
|
|
out.Done()
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-02 12:35:46 +02:00
|
|
|
func ReadGenbank(reader io.Reader, options ...WithOption) (obiiter.IBioSequence, error) {
|
2022-08-23 11:04:57 +02:00
|
|
|
opt := MakeOptions(options)
|
|
|
|
|
|
2024-11-29 18:15:03 +01:00
|
|
|
entry_channel := ReadFileChunk(
|
2024-08-02 12:35:46 +02:00
|
|
|
opt.Source(),
|
|
|
|
|
reader,
|
2025-03-12 13:29:41 +01:00
|
|
|
1024*1024*128,
|
2024-08-02 12:35:46 +02:00
|
|
|
EndOfLastFlatFileEntry,
|
2025-03-12 13:29:41 +01:00
|
|
|
"\nLOCUS ",
|
2026-03-10 15:35:23 +01:00
|
|
|
false, // do not pack: rope-based parser avoids contiguous allocation
|
2024-08-02 12:35:46 +02:00
|
|
|
)
|
|
|
|
|
|
2023-03-07 11:12:13 +07:00
|
|
|
newIter := obiiter.MakeIBioSequence()
|
2022-08-23 11:04:57 +02:00
|
|
|
|
|
|
|
|
nworkers := opt.ParallelWorkers()
|
|
|
|
|
|
|
|
|
|
for j := 0; j < nworkers; j++ {
|
2024-05-01 00:50:23 +02:00
|
|
|
newIter.Add(1)
|
2024-08-02 12:35:46 +02:00
|
|
|
go _ParseGenbankFile(
|
|
|
|
|
entry_channel,
|
|
|
|
|
newIter,
|
2024-05-01 00:50:23 +02:00
|
|
|
opt.WithFeatureTable(),
|
2025-07-07 15:29:07 +02:00
|
|
|
opt.UtoT(),
|
2024-08-02 12:35:46 +02:00
|
|
|
)
|
2022-08-23 11:04:57 +02:00
|
|
|
}
|
|
|
|
|
|
2024-05-01 00:50:23 +02:00
|
|
|
go func() {
|
|
|
|
|
newIter.WaitAndClose()
|
2024-06-26 18:39:42 +02:00
|
|
|
log.Debug("End of the genbank file ", opt.Source())
|
2024-05-01 00:50:23 +02:00
|
|
|
}()
|
2022-08-23 11:04:57 +02:00
|
|
|
|
2024-05-01 00:50:23 +02:00
|
|
|
if opt.FullFileBatch() {
|
2023-09-01 09:30:12 +02:00
|
|
|
newIter = newIter.CompleteFileIterator()
|
2023-03-27 19:51:10 +07:00
|
|
|
}
|
|
|
|
|
|
2024-08-02 12:35:46 +02:00
|
|
|
return newIter, nil
|
2022-08-23 11:04:57 +02:00
|
|
|
}
|
|
|
|
|
|
2023-01-22 22:04:17 +01:00
|
|
|
func ReadGenbankFromFile(filename string, options ...WithOption) (obiiter.IBioSequence, error) {
|
2022-08-23 11:04:57 +02:00
|
|
|
var reader io.Reader
|
|
|
|
|
var err error
|
|
|
|
|
|
2023-03-27 19:51:10 +07:00
|
|
|
options = append(options, OptionsSource(obiutils.RemoveAllExt((path.Base(filename)))))
|
|
|
|
|
|
2025-01-24 11:47:59 +01:00
|
|
|
reader, err = obiutils.Ropen(filename)
|
2023-10-16 15:34:06 +02:00
|
|
|
|
2025-01-24 11:47:59 +01:00
|
|
|
if err == obiutils.ErrNoContent {
|
2023-10-16 15:34:06 +02:00
|
|
|
log.Infof("file %s is empty", filename)
|
|
|
|
|
return ReadEmptyFile(options...)
|
|
|
|
|
}
|
|
|
|
|
|
2022-08-23 11:04:57 +02:00
|
|
|
if err != nil {
|
|
|
|
|
log.Printf("open file error: %+v", err)
|
2023-01-22 22:04:17 +01:00
|
|
|
return obiiter.NilIBioSequence, err
|
2022-08-23 11:04:57 +02:00
|
|
|
}
|
|
|
|
|
|
2024-08-02 12:35:46 +02:00
|
|
|
return ReadGenbank(reader, options...)
|
2022-08-23 11:04:57 +02:00
|
|
|
}
|