mirror of
https://github.com/metabarcoding/obitools4.git
synced 2026-03-25 13:30:52 +00:00
Merge pull request #72 from metabarcoding/push-zsprzlqxurrp
Push zsprzlqxurrp
This commit is contained in:
69
Makefile
69
Makefile
@@ -62,10 +62,10 @@ OUTPUT:=$(shell mktemp)
|
||||
|
||||
all: install-githook obitools
|
||||
|
||||
obitools: $(patsubst %,$(OBITOOLS_PREFIX)%,$(OBITOOLS))
|
||||
obitools: $(patsubst %,$(OBITOOLS_PREFIX)%,$(OBITOOLS))
|
||||
|
||||
install-githook: $(GITHOOKS)
|
||||
|
||||
|
||||
$(GITHOOK_DIR)/%: $(GITHOOK_SRC_DIR)/%
|
||||
@echo installing $$(basename $@)...
|
||||
@mkdir -p $(GITHOOK_DIR)
|
||||
@@ -79,10 +79,10 @@ update-deps:
|
||||
test: .FORCE
|
||||
$(GOTEST) ./...
|
||||
|
||||
obitests:
|
||||
obitests:
|
||||
@for t in $$(find obitests -name test.sh -print) ; do \
|
||||
bash $${t} || exit 1;\
|
||||
done
|
||||
done
|
||||
|
||||
githubtests: obitools obitests
|
||||
|
||||
@@ -94,19 +94,60 @@ $(foreach P,$(PACKAGE_DIRS),$(eval $(call MAKE_PKG_RULE,$(P))))
|
||||
|
||||
$(foreach P,$(OBITOOLS_DIRS),$(eval $(call MAKE_OBITOOLS_RULE,$(P))))
|
||||
|
||||
pkg/obioptions/version.go: .FORCE
|
||||
ifneq ($(strip $(COMMIT_ID)),)
|
||||
@cat $@ \
|
||||
| sed -E 's/^var _Commit = "[^"]*"/var _Commit = "'$(COMMIT_ID)'"/' \
|
||||
| sed -E 's/^var _Version = "[^"]*"/var _Version = "'"$(LAST_TAG)"'"/' \
|
||||
pkg/obioptions/version.go: version.txt .FORCE
|
||||
@version=$$(cat version.txt); \
|
||||
cat $@ \
|
||||
| sed -E 's/^var _Version = "[^"]*"/var _Version = "Release '$$version'"/' \
|
||||
> $(OUTPUT)
|
||||
|
||||
@diff $@ $(OUTPUT) 2>&1 > /dev/null \
|
||||
|| echo "Update version.go : $@ to $(LAST_TAG) ($(COMMIT_ID))" \
|
||||
&& mv $(OUTPUT) $@
|
||||
|| (echo "Update version.go to $$(cat version.txt)" && mv $(OUTPUT) $@)
|
||||
|
||||
@rm -f $(OUTPUT)
|
||||
endif
|
||||
|
||||
.PHONY: all obitools update-deps obitests githubtests .FORCE
|
||||
.FORCE:
|
||||
bump-version:
|
||||
@echo "Incrementing version..."
|
||||
@current=$$(cat version.txt); \
|
||||
echo " Current version: $$current"; \
|
||||
major=$$(echo $$current | cut -d. -f1); \
|
||||
minor=$$(echo $$current | cut -d. -f2); \
|
||||
patch=$$(echo $$current | cut -d. -f3); \
|
||||
new_patch=$$((patch + 1)); \
|
||||
new_version="$$major.$$minor.$$new_patch"; \
|
||||
echo " New version: $$new_version"; \
|
||||
echo "$$new_version" > version.txt
|
||||
@echo "✓ Version updated in version.txt"
|
||||
@$(MAKE) pkg/obioptions/version.go
|
||||
|
||||
jjnew:
|
||||
@echo "$(YELLOW)→ Creating a new commit...$(NC)"
|
||||
@echo "$(BLUE)→ Documenting current commit...$(NC)"
|
||||
@jj auto-describe
|
||||
@echo "$(BLUE)→ Done.$(NC)"
|
||||
@jj new
|
||||
@echo "$(GREEN)✓ New commit created$(NC)"
|
||||
|
||||
jjpush:
|
||||
@echo "$(YELLOW)→ Pushing commit to repository...$(NC)"
|
||||
@echo "$(BLUE)→ Documenting current commit...$(NC)"
|
||||
@jj auto-describe
|
||||
@echo "$(BLUE)→ Creating new commit for version bump...$(NC)"
|
||||
@jj new
|
||||
@$(MAKE) bump-version
|
||||
@echo "$(BLUE)→ Documenting version bump commit...$(NC)"
|
||||
@jj auto-describe
|
||||
@version=$$(cat version.txt); \
|
||||
echo "$(BLUE)→ Pushing commits and creating tag v$$version...$(NC)"; \
|
||||
jj git push --change @; \
|
||||
git tag -a "v$$version" -m "Release $$version" 2>/dev/null || echo "Tag v$$version already exists"; \
|
||||
git push origin "v$$version" 2>/dev/null || echo "Tag already pushed"
|
||||
@echo "$(GREEN)✓ Commits and tag pushed to repository$(NC)"
|
||||
|
||||
jjfetch:
|
||||
@echo "$(YELLOW)→ Pulling latest commits...$(NC)"
|
||||
@jj git fetch
|
||||
@jj new master@origin
|
||||
@echo "$(GREEN)✓ Latest commits pulled$(NC)"
|
||||
|
||||
.PHONY: all obitools update-deps obitests githubtests jjnew jjpush jjfetch bump-version .FORCE
|
||||
.FORCE:
|
||||
|
||||
213
blackboard/Prospective/kmer_index_design.md
Normal file
213
blackboard/Prospective/kmer_index_design.md
Normal file
@@ -0,0 +1,213 @@
|
||||
# Index de k-mers pour génomes de grande taille
|
||||
|
||||
## Contexte et objectifs
|
||||
|
||||
### Cas d'usage
|
||||
|
||||
- Indexation de k-mers longs (k=31) pour des génomes de grande taille (< 10 Go par génome)
|
||||
- Nombre de génomes : plusieurs dizaines à quelques centaines
|
||||
- Indexation en parallèle
|
||||
- Stockage sur disque
|
||||
- Possibilité d'ajouter des génomes, mais pas de modifier un génome existant
|
||||
|
||||
### Requêtes cibles
|
||||
|
||||
- **Présence/absence** d'un k-mer dans un génome
|
||||
- **Intersection** entre génomes
|
||||
- **Distances** : Jaccard (présence/absence) et potentiellement Bray-Curtis (comptage)
|
||||
|
||||
### Ressources disponibles
|
||||
|
||||
- 128 Go de RAM
|
||||
- Stockage disque
|
||||
|
||||
---
|
||||
|
||||
## Estimation des volumes
|
||||
|
||||
### Par génome
|
||||
|
||||
- **10 Go de séquence** → ~10¹⁰ k-mers bruts (chevauchants)
|
||||
- **Après déduplication** : typiquement 10-50% de k-mers uniques → **~1-5 × 10⁹ k-mers distincts**
|
||||
|
||||
### Espace théorique
|
||||
|
||||
- **k=31** → 62 bits → ~4.6 × 10¹⁸ k-mers possibles
|
||||
- Table d'indexation directe impossible
|
||||
|
||||
---
|
||||
|
||||
## Métriques de distance
|
||||
|
||||
### Présence/absence (binaire)
|
||||
|
||||
- **Jaccard** : |A ∩ B| / |A ∪ B|
|
||||
- **Sørensen-Dice** : 2|A ∩ B| / (|A| + |B|)
|
||||
|
||||
### Comptage (abondance)
|
||||
|
||||
- **Bray-Curtis** : 1 - (2 × Σ min(aᵢ, bᵢ)) / (Σ aᵢ + Σ bᵢ)
|
||||
|
||||
Note : Pour Bray-Curtis, le stockage des comptages est nécessaire, ce qui augmente significativement la taille de l'index.
|
||||
|
||||
---
|
||||
|
||||
## Options d'indexation
|
||||
|
||||
### Option 1 : Bloom Filter par génome
|
||||
|
||||
**Principe** : Structure probabiliste pour test d'appartenance.
|
||||
|
||||
**Avantages :**
|
||||
- Très compact : ~10 bits/élément pour FPR ~1%
|
||||
- Construction rapide, streaming
|
||||
- Facile à sérialiser/désérialiser
|
||||
- Intersection et Jaccard estimables via formules analytiques
|
||||
|
||||
**Inconvénients :**
|
||||
- Faux positifs (pas de faux négatifs)
|
||||
- Distances approximatives
|
||||
|
||||
**Taille estimée** : 1-6 Go par génome (selon FPR cible)
|
||||
|
||||
#### Dimensionnement des Bloom filters
|
||||
|
||||
```
|
||||
\mathrm{FPR} ;=; \left(1 - e^{-h n / m}\right)^h
|
||||
```
|
||||
|
||||
|
||||
| Bits/élément | FPR optimal | k (hash functions) |
|
||||
|--------------|-------------|---------------------|
|
||||
| 8 | ~2% | 5-6 |
|
||||
| 10 | ~1% | 7 |
|
||||
| 12 | ~0.3% | 8 |
|
||||
| 16 | ~0.01% | 11 |
|
||||
|
||||
Formule du taux de faux positifs :
|
||||
```
|
||||
FPR ≈ (1 - e^(-kn/m))^k
|
||||
```
|
||||
Où n = nombre d'éléments, m = nombre de bits, k = nombre de hash functions.
|
||||
|
||||
### Option 2 : Ensemble trié de k-mers
|
||||
|
||||
**Principe** : Stocker les k-mers (uint64) triés, avec compression possible.
|
||||
|
||||
**Avantages :**
|
||||
- Exact (pas de faux positifs)
|
||||
- Intersection/union par merge sort O(n+m)
|
||||
- Compression efficace (delta encoding sur k-mers triés)
|
||||
|
||||
**Inconvénients :**
|
||||
- Plus volumineux : 8 octets/k-mer
|
||||
- Construction plus lente (tri nécessaire)
|
||||
|
||||
**Taille estimée** : 8-40 Go par génome (non compressé)
|
||||
|
||||
### Option 3 : MPHF (Minimal Perfect Hash Function)
|
||||
|
||||
**Principe** : Fonction de hash parfaite minimale pour les k-mers présents.
|
||||
|
||||
**Avantages :**
|
||||
- Très compact : ~3-4 bits/élément
|
||||
- Lookup O(1)
|
||||
- Exact pour les k-mers présents
|
||||
|
||||
**Inconvénients :**
|
||||
- Construction coûteuse (plusieurs passes)
|
||||
- Statique (pas d'ajout de k-mers après construction)
|
||||
- Ne distingue pas "absent" vs "jamais vu" sans structure auxiliaire
|
||||
|
||||
### Option 4 : Hybride MPHF + Bloom filter
|
||||
|
||||
- MPHF pour mapping compact des k-mers présents
|
||||
- Bloom filter pour pré-filtrage des absents
|
||||
|
||||
---
|
||||
|
||||
## Optimisation : Indexation de (k-2)-mers pour requêtes k-mers
|
||||
|
||||
### Principe
|
||||
|
||||
Au lieu d'indexer directement les 31-mers dans un Bloom filter, on indexe les 29-mers. Pour tester la présence d'un 31-mer, on vérifie que les **trois 29-mers** qu'il contient sont présents :
|
||||
|
||||
- positions 0-28
|
||||
- positions 1-29
|
||||
- positions 2-30
|
||||
|
||||
### Analyse probabiliste
|
||||
|
||||
Si le Bloom filter a un FPR de p pour un 29-mer individuel, le FPR effectif pour un 31-mer devient **p³** (les trois requêtes doivent toutes être des faux positifs).
|
||||
|
||||
| FPR 29-mer | FPR 31-mer effectif |
|
||||
|------------|---------------------|
|
||||
| 10% | 0.1% |
|
||||
| 5% | 0.0125% |
|
||||
| 1% | 0.0001% |
|
||||
|
||||
### Avantages
|
||||
|
||||
1. **Moins d'éléments à stocker** : il y a moins de 29-mers distincts que de 31-mers distincts dans un génome (deux 31-mers différents peuvent partager un même 29-mer)
|
||||
|
||||
2. **FPR drastiquement réduit** : FPR³ avec seulement 3 requêtes
|
||||
|
||||
3. **Index plus compact** : on peut utiliser moins de bits par élément (FPR plus élevé acceptable sur le 29-mer) tout en obtenant un FPR très bas sur le 31-mer
|
||||
|
||||
### Trade-off
|
||||
|
||||
Un Bloom filter à **5-6 bits/élément** pour les 29-mers donnerait un FPR effectif < 0.01% pour les 31-mers, soit environ **2× plus compact** que l'approche directe à qualité égale.
|
||||
|
||||
**Coût** : 3× plus de requêtes par lookup (mais les requêtes Bloom sont très rapides).
|
||||
|
||||
---
|
||||
|
||||
## Accélération des calculs de distance : MinHash
|
||||
|
||||
### Principe
|
||||
|
||||
Pré-calculer une "signature" compacte (sketch) de chaque génome permettant d'estimer rapidement Jaccard sans charger les index complets.
|
||||
|
||||
### Avantages
|
||||
|
||||
- Matrice de distances entre 100+ génomes en quelques secondes
|
||||
- Signature de taille fixe (ex: 1000-10000 hash values) quel que soit le génome
|
||||
- Stockage minimal
|
||||
|
||||
### Utilisation
|
||||
|
||||
1. Construction : une passe sur les k-mers de chaque génome
|
||||
2. Distance : comparaison des sketches en O(taille du sketch)
|
||||
|
||||
---
|
||||
|
||||
## Architecture recommandée
|
||||
|
||||
### Pour présence/absence + Jaccard
|
||||
|
||||
1. **Index principal** : Bloom filter de (k-2)-mers avec l'optimisation décrite
|
||||
- Compact (~3-5 Go par génome)
|
||||
- FPR très bas pour les k-mers grâce aux requêtes triples
|
||||
|
||||
2. **Sketches MinHash** : pour calcul rapide des distances entre génomes
|
||||
- Quelques Ko par génome
|
||||
- Permet exploration rapide de la matrice de distances
|
||||
|
||||
### Pour comptage + Bray-Curtis
|
||||
|
||||
1. **Index principal** : k-mers triés + comptages
|
||||
- uint64 (k-mer) + uint8/uint16 (count)
|
||||
- Compression delta possible
|
||||
- Plus volumineux mais exact
|
||||
|
||||
2. **Sketches** : variantes de MinHash pour données pondérées (ex: HyperMinHash)
|
||||
|
||||
---
|
||||
|
||||
## Prochaines étapes
|
||||
|
||||
1. Implémenter un Bloom filter optimisé pour k-mers
|
||||
2. Implémenter l'optimisation (k-2)-mer → k-mer
|
||||
3. Implémenter MinHash pour les sketches
|
||||
4. Définir le format de sérialisation sur disque
|
||||
5. Benchmarker sur des génomes réels
|
||||
4
go.mod
4
go.mod
@@ -27,10 +27,14 @@ require (
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/RoaringBitmap/roaring v1.9.4 // indirect
|
||||
github.com/bits-and-blooms/bitset v1.12.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/goombaio/orderedmap v0.0.0-20180924084748-ba921b7e2419 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
github.com/mschoch/smat v0.2.0 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rogpeppe/go-internal v1.12.0 // indirect
|
||||
)
|
||||
|
||||
8
go.sum
8
go.sum
@@ -4,8 +4,12 @@ github.com/PaesslerAG/gval v1.2.2 h1:Y7iBzhgE09IGTt5QgGQ2IdaYYYOU134YGHBThD+wm9E
|
||||
github.com/PaesslerAG/gval v1.2.2/go.mod h1:XRFLwvmkTEdYziLdaCeCa5ImcGVrfQbeNUbVR+C6xac=
|
||||
github.com/PaesslerAG/jsonpath v0.1.0 h1:gADYeifvlqK3R3i2cR5B4DGgxLXIPb3TRTH1mGi0jPI=
|
||||
github.com/PaesslerAG/jsonpath v0.1.0/go.mod h1:4BzmtoM/PI8fPO4aQGIusjGxGir2BzcV0grWtFzq1Y8=
|
||||
github.com/RoaringBitmap/roaring v1.9.4 h1:yhEIoH4YezLYT04s1nHehNO64EKFTop/wBhxv2QzDdQ=
|
||||
github.com/RoaringBitmap/roaring v1.9.4/go.mod h1:6AXUsoIEzDTFFQCe1RbGA6uFONMhvejWj5rqITANK90=
|
||||
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df h1:GSoSVRLoBaFpOOds6QyY1L8AX7uoY+Ln3BHc22W40X0=
|
||||
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df/go.mod h1:hiVxq5OP2bUGBRNS3Z/bt/reCLFNbdcST6gISi1fiOM=
|
||||
github.com/bits-and-blooms/bitset v1.12.0 h1:U/q1fAF7xXRhFCrhROzIfffYnu+dlS38vCZtmFVPHmA=
|
||||
github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
|
||||
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
|
||||
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
|
||||
github.com/chen3feng/stl4go v0.1.1 h1:0L1+mDw7pomftKDruM23f1mA7miavOj6C6MZeadzN2Q=
|
||||
@@ -47,8 +51,12 @@ github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZ
|
||||
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
|
||||
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
|
||||
github.com/mschoch/smat v0.2.0 h1:8imxQsjDm8yFEAVBe7azKmKSgzSkZXDuKkSq9374khM=
|
||||
github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw=
|
||||
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0=
|
||||
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
|
||||
292
kmer_roaring_index/FREQUENCY_FILTER_FINAL.md
Normal file
292
kmer_roaring_index/FREQUENCY_FILTER_FINAL.md
Normal file
@@ -0,0 +1,292 @@
|
||||
# Filtre de Fréquence avec v Niveaux de Roaring Bitmaps
|
||||
|
||||
## Algorithme
|
||||
|
||||
```go
|
||||
Pour chaque k-mer rencontré dans les données:
|
||||
c = 0
|
||||
tant que (k-mer ∈ index[c] ET c < v):
|
||||
c++
|
||||
|
||||
si c < v:
|
||||
index[c].insert(k-mer)
|
||||
```
|
||||
|
||||
**Résultat** : `index[v-1]` contient les k-mers vus **≥ v fois**
|
||||
|
||||
---
|
||||
|
||||
## Exemple d'exécution (v=3)
|
||||
|
||||
```
|
||||
Données:
|
||||
Read1: kmer X
|
||||
Read2: kmer X
|
||||
Read3: kmer X (X vu 3 fois)
|
||||
Read4: kmer Y
|
||||
Read5: kmer Y (Y vu 2 fois)
|
||||
Read6: kmer Z (Z vu 1 fois)
|
||||
|
||||
Exécution:
|
||||
|
||||
Read1 (X):
|
||||
c=0: X ∉ index[0] → index[0].add(X)
|
||||
État: index[0]={X}, index[1]={}, index[2]={}
|
||||
|
||||
Read2 (X):
|
||||
c=0: X ∈ index[0] → c=1
|
||||
c=1: X ∉ index[1] → index[1].add(X)
|
||||
État: index[0]={X}, index[1]={X}, index[2]={}
|
||||
|
||||
Read3 (X):
|
||||
c=0: X ∈ index[0] → c=1
|
||||
c=1: X ∈ index[1] → c=2
|
||||
c=2: X ∉ index[2] → index[2].add(X)
|
||||
État: index[0]={X}, index[1]={X}, index[2]={X}
|
||||
|
||||
Read4 (Y):
|
||||
c=0: Y ∉ index[0] → index[0].add(Y)
|
||||
État: index[0]={X,Y}, index[1]={X}, index[2]={X}
|
||||
|
||||
Read5 (Y):
|
||||
c=0: Y ∈ index[0] → c=1
|
||||
c=1: Y ∉ index[1] → index[1].add(Y)
|
||||
État: index[0]={X,Y}, index[1]={X,Y}, index[2]={X}
|
||||
|
||||
Read6 (Z):
|
||||
c=0: Z ∉ index[0] → index[0].add(Z)
|
||||
État: index[0]={X,Y,Z}, index[1]={X,Y}, index[2]={X}
|
||||
|
||||
Résultat final:
|
||||
index[0] (freq≥1): {X, Y, Z}
|
||||
index[1] (freq≥2): {X, Y}
|
||||
index[2] (freq≥3): {X} ← K-mers filtrés ✓
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Utilisation
|
||||
|
||||
```go
|
||||
// Créer le filtre
|
||||
filter := obikmer.NewFrequencyFilter(31, 3) // k=31, minFreq=3
|
||||
|
||||
// Ajouter les séquences
|
||||
for _, read := range reads {
|
||||
filter.AddSequence(read)
|
||||
}
|
||||
|
||||
// Récupérer les k-mers filtrés (freq ≥ 3)
|
||||
filtered := filter.GetFilteredSet("filtered")
|
||||
fmt.Printf("K-mers de qualité: %d\n", filtered.Cardinality())
|
||||
|
||||
// Statistiques
|
||||
stats := filter.Stats()
|
||||
fmt.Println(stats.String())
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance
|
||||
|
||||
### Complexité
|
||||
|
||||
**Par k-mer** :
|
||||
- Lookups : Moyenne ~v/2, pire cas v
|
||||
- Insertions : 1 Add
|
||||
- **Pas de Remove** ✅
|
||||
|
||||
**Total pour n k-mers** :
|
||||
- Temps : O(n × v/2)
|
||||
- Mémoire : O(unique_kmers × v × 2 bytes)
|
||||
|
||||
### Early exit pour distribution skewed
|
||||
|
||||
Avec distribution typique (séquençage) :
|
||||
```
|
||||
80% singletons → 1 lookup (early exit)
|
||||
15% freq 2-3 → 2-3 lookups
|
||||
5% freq ≥4 → jusqu'à v lookups
|
||||
|
||||
Moyenne réelle : ~2 lookups/kmer (au lieu de v/2)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Mémoire
|
||||
|
||||
### Pour 10^8 k-mers uniques
|
||||
|
||||
| v (minFreq) | Nombre bitmaps | Mémoire | vs map simple |
|
||||
|-------------|----------------|---------|---------------|
|
||||
| v=2 | 2 | ~400 MB | 6x moins |
|
||||
| v=3 | 3 | ~600 MB | 4x moins |
|
||||
| v=5 | 5 | ~1 GB | 2.4x moins |
|
||||
| v=10 | 10 | ~2 GB | 1.2x moins |
|
||||
| v=20 | 20 | ~4 GB | ~égal |
|
||||
|
||||
**Note** : Avec distribution skewed (beaucoup de singletons), la mémoire réelle est bien plus faible car les niveaux hauts ont peu d'éléments.
|
||||
|
||||
### Exemple réaliste (séquençage)
|
||||
|
||||
Pour 10^8 k-mers totaux, v=3 :
|
||||
```
|
||||
Distribution:
|
||||
80% singletons → 80M dans index[0]
|
||||
15% freq 2-3 → 15M dans index[1]
|
||||
5% freq ≥3 → 5M dans index[2]
|
||||
|
||||
Mémoire:
|
||||
index[0]: 80M × 2 bytes = 160 MB
|
||||
index[1]: 15M × 2 bytes = 30 MB
|
||||
index[2]: 5M × 2 bytes = 10 MB
|
||||
Total: ~200 MB ✅
|
||||
|
||||
vs map simple: 80M × 24 bytes = ~2 GB
|
||||
Réduction: 10x
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Comparaison des approches
|
||||
|
||||
| Approche | Mémoire (10^8 kmers) | Passes | Lookups/kmer | Quand utiliser |
|
||||
|----------|----------------------|--------|--------------|----------------|
|
||||
| **v-Bitmaps** | **200-600 MB** | **1** | **~2 (avg)** | **Standard** ✅ |
|
||||
| Map simple | 2.4 GB | 1 | 1 | Si RAM illimitée |
|
||||
| Multi-pass | 400 MB | v | v | Si I/O pas cher |
|
||||
|
||||
---
|
||||
|
||||
## Avantages de v-Bitmaps
|
||||
|
||||
✅ **Une seule passe** sur les données
|
||||
✅ **Mémoire optimale** avec Roaring bitmaps
|
||||
✅ **Pas de Remove** (seulement Contains + Add)
|
||||
✅ **Early exit** efficace sur singletons
|
||||
✅ **Scalable** jusqu'à v~10-20
|
||||
✅ **Simple** à implémenter et comprendre
|
||||
|
||||
---
|
||||
|
||||
## Cas d'usage typiques
|
||||
|
||||
### 1. Éliminer erreurs de séquençage
|
||||
|
||||
```go
|
||||
filter := obikmer.NewFrequencyFilter(31, 3)
|
||||
|
||||
// Traiter FASTQ
|
||||
for read := range StreamFastq("sample.fastq") {
|
||||
filter.AddSequence(read)
|
||||
}
|
||||
|
||||
// K-mers de qualité (pas d'erreurs)
|
||||
cleaned := filter.GetFilteredSet("cleaned")
|
||||
```
|
||||
|
||||
**Résultat** : Élimine 70-80% des k-mers (erreurs)
|
||||
|
||||
### 2. Assemblage de génome
|
||||
|
||||
```go
|
||||
filter := obikmer.NewFrequencyFilter(31, 2)
|
||||
|
||||
// Filtrer avant l'assemblage
|
||||
for read := range reads {
|
||||
filter.AddSequence(read)
|
||||
}
|
||||
|
||||
solidKmers := filter.GetFilteredSet("solid")
|
||||
// Utiliser solidKmers pour le graphe de Bruijn
|
||||
```
|
||||
|
||||
### 3. Comparaison de génomes
|
||||
|
||||
```go
|
||||
collection := obikmer.NewKmerSetCollection(31)
|
||||
|
||||
for _, genome := range genomes {
|
||||
filter := obikmer.NewFrequencyFilter(31, 3)
|
||||
filter.AddSequences(genome.Reads)
|
||||
|
||||
cleaned := filter.GetFilteredSet(genome.ID)
|
||||
collection.Add(cleaned)
|
||||
}
|
||||
|
||||
// Analyses comparatives sur k-mers de qualité
|
||||
matrix := collection.ParallelPairwiseJaccard(8)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Limites
|
||||
|
||||
**Pour v > 20** :
|
||||
- Trop de lookups (v lookups/kmer)
|
||||
- Mémoire importante (v × 200MB pour 10^8 kmers)
|
||||
|
||||
**Solutions alternatives pour v > 20** :
|
||||
- Utiliser map simple (9 bytes/kmer) si RAM disponible
|
||||
- Algorithme différent (sketch, probabiliste)
|
||||
|
||||
---
|
||||
|
||||
## Optimisations possibles
|
||||
|
||||
### 1. Parallélisation
|
||||
|
||||
```go
|
||||
// Traiter plusieurs fichiers en parallèle
|
||||
filters := make([]*FrequencyFilter, numFiles)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for i, file := range files {
|
||||
wg.Add(1)
|
||||
go func(idx int, f string) {
|
||||
defer wg.Done()
|
||||
filters[idx] = ProcessFile(f, k, minFreq)
|
||||
}(i, file)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
// Merger les résultats
|
||||
merged := MergeFilters(filters)
|
||||
```
|
||||
|
||||
### 2. Streaming avec seuil adaptatif
|
||||
|
||||
```go
|
||||
// Commencer avec v=5, réduire progressivement
|
||||
filter := obikmer.NewFrequencyFilter(31, 5)
|
||||
|
||||
// ... traitement ...
|
||||
|
||||
// Si trop de mémoire, réduire à v=3
|
||||
if filter.MemoryUsage() > threshold {
|
||||
filter = ConvertToLowerThreshold(filter, 3)
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Récapitulatif final
|
||||
|
||||
**Pour filtrer les k-mers par fréquence ≥ v :**
|
||||
|
||||
1. **Créer** : `filter := NewFrequencyFilter(k, v)`
|
||||
2. **Traiter** : `filter.AddSequence(read)` pour chaque read
|
||||
3. **Résultat** : `filtered := filter.GetFilteredSet(id)`
|
||||
|
||||
**Mémoire** : ~2v MB par million de k-mers uniques
|
||||
**Temps** : Une seule passe, ~2 lookups/kmer en moyenne
|
||||
**Optimal pour** : v ≤ 20, distribution skewed (séquençage)
|
||||
|
||||
---
|
||||
|
||||
## Code fourni
|
||||
|
||||
1. **frequency_filter.go** - Implémentation complète
|
||||
2. **examples_frequency_filter_final.go** - Exemples d'utilisation
|
||||
|
||||
**Tout est prêt à utiliser !** 🚀
|
||||
320
kmer_roaring_index/examples_frequency_filter_final.go
Normal file
320
kmer_roaring_index/examples_frequency_filter_final.go
Normal file
@@ -0,0 +1,320 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"obikmer"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// ==========================================
|
||||
// EXEMPLE 1 : Utilisation basique
|
||||
// ==========================================
|
||||
fmt.Println("=== EXEMPLE 1 : Utilisation basique ===\n")
|
||||
|
||||
k := 31
|
||||
minFreq := 3 // Garder les k-mers vus ≥3 fois
|
||||
|
||||
// Créer le filtre
|
||||
filter := obikmer.NewFrequencyFilter(k, minFreq)
|
||||
|
||||
// Simuler des séquences avec différentes fréquences
|
||||
sequences := [][]byte{
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"), // Kmer X
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"), // Kmer X (freq=2)
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"), // Kmer X (freq=3) ✓
|
||||
[]byte("TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT"), // Kmer Y
|
||||
[]byte("TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT"), // Kmer Y (freq=2) ✗
|
||||
[]byte("GGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGG"), // Kmer Z (freq=1) ✗
|
||||
}
|
||||
|
||||
fmt.Printf("Traitement de %d séquences...\n", len(sequences))
|
||||
for _, seq := range sequences {
|
||||
filter.AddSequence(seq)
|
||||
}
|
||||
|
||||
// Récupérer les k-mers filtrés
|
||||
filtered := filter.GetFilteredSet("filtered")
|
||||
fmt.Printf("\nK-mers avec freq ≥ %d: %d\n", minFreq, filtered.Cardinality())
|
||||
|
||||
// Statistiques
|
||||
stats := filter.Stats()
|
||||
fmt.Println("\n" + stats.String())
|
||||
|
||||
// ==========================================
|
||||
// EXEMPLE 2 : Vérifier les niveaux
|
||||
// ==========================================
|
||||
fmt.Println("\n=== EXEMPLE 2 : Inspection des niveaux ===\n")
|
||||
|
||||
// Vérifier chaque niveau
|
||||
for level := 0; level < minFreq; level++ {
|
||||
levelSet := filter.GetKmersAtLevel(level)
|
||||
fmt.Printf("Niveau %d (freq≥%d): %d k-mers\n",
|
||||
level+1, level+1, levelSet.Cardinality())
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// EXEMPLE 3 : Données réalistes
|
||||
// ==========================================
|
||||
fmt.Println("\n=== EXEMPLE 3 : Simulation données séquençage ===\n")
|
||||
|
||||
filter2 := obikmer.NewFrequencyFilter(31, 3)
|
||||
|
||||
// Simuler un dataset réaliste :
|
||||
// - 1000 reads
|
||||
// - 80% contiennent des erreurs (singletons)
|
||||
// - 15% vrais k-mers à basse fréquence
|
||||
// - 5% vrais k-mers à haute fréquence
|
||||
|
||||
// Vraie séquence répétée
|
||||
trueSeq := []byte("ACGTACGTACGTACGTACGTACGTACGTACG")
|
||||
for i := 0; i < 50; i++ {
|
||||
filter2.AddSequence(trueSeq)
|
||||
}
|
||||
|
||||
// Séquence à fréquence moyenne
|
||||
mediumSeq := []byte("CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC")
|
||||
for i := 0; i < 5; i++ {
|
||||
filter2.AddSequence(mediumSeq)
|
||||
}
|
||||
|
||||
// Erreurs de séquençage (singletons)
|
||||
for i := 0; i < 100; i++ {
|
||||
errorSeq := []byte(fmt.Sprintf("TTTTTTTTTTTTTTTTTTTTTTTTTTTT%03d", i))
|
||||
filter2.AddSequence(errorSeq)
|
||||
}
|
||||
|
||||
stats2 := filter2.Stats()
|
||||
fmt.Println(stats2.String())
|
||||
|
||||
fmt.Println("Distribution attendue:")
|
||||
fmt.Println(" - Beaucoup de singletons (erreurs)")
|
||||
fmt.Println(" - Peu de k-mers à haute fréquence (signal)")
|
||||
fmt.Println(" → Filtrage efficace !")
|
||||
|
||||
// ==========================================
|
||||
// EXEMPLE 4 : Tester différents seuils
|
||||
// ==========================================
|
||||
fmt.Println("\n=== EXEMPLE 4 : Comparaison de seuils ===\n")
|
||||
|
||||
testSeqs := [][]byte{
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"),
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"),
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"),
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"),
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"), // freq=5
|
||||
[]byte("TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT"),
|
||||
[]byte("TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT"),
|
||||
[]byte("TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT"), // freq=3
|
||||
[]byte("GGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGG"), // freq=1
|
||||
}
|
||||
|
||||
for _, minFreq := range []int{2, 3, 5} {
|
||||
f := obikmer.NewFrequencyFilter(31, minFreq)
|
||||
f.AddSequences(testSeqs)
|
||||
|
||||
fmt.Printf("minFreq=%d: %d k-mers retenus (%.2f MB)\n",
|
||||
minFreq,
|
||||
f.Cardinality(),
|
||||
float64(f.MemoryUsage())/1024/1024)
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// EXEMPLE 5 : Comparaison mémoire
|
||||
// ==========================================
|
||||
fmt.Println("\n=== EXEMPLE 5 : Comparaison mémoire ===\n")
|
||||
|
||||
filter3 := obikmer.NewFrequencyFilter(31, 3)
|
||||
|
||||
// Simuler 10000 séquences
|
||||
for i := 0; i < 10000; i++ {
|
||||
seq := make([]byte, 100)
|
||||
for j := range seq {
|
||||
seq[j] = "ACGT"[(i+j)%4]
|
||||
}
|
||||
filter3.AddSequence(seq)
|
||||
}
|
||||
|
||||
fmt.Println(filter3.CompareWithSimpleMap())
|
||||
|
||||
// ==========================================
|
||||
// EXEMPLE 6 : Workflow complet
|
||||
// ==========================================
|
||||
fmt.Println("\n=== EXEMPLE 6 : Workflow complet ===\n")
|
||||
|
||||
fmt.Println("1. Créer le filtre")
|
||||
finalFilter := obikmer.NewFrequencyFilter(31, 3)
|
||||
|
||||
fmt.Println("2. Traiter les données (simulation)")
|
||||
// En pratique : lire depuis FASTQ
|
||||
// for read := range ReadFastq("data.fastq") {
|
||||
// finalFilter.AddSequence(read)
|
||||
// }
|
||||
|
||||
// Simulation
|
||||
for i := 0; i < 1000; i++ {
|
||||
seq := []byte("ACGTACGTACGTACGTACGTACGTACGTACG")
|
||||
finalFilter.AddSequence(seq)
|
||||
}
|
||||
|
||||
fmt.Println("3. Récupérer les k-mers filtrés")
|
||||
result := finalFilter.GetFilteredSet("final")
|
||||
|
||||
fmt.Println("4. Utiliser le résultat")
|
||||
fmt.Printf(" K-mers de qualité: %d\n", result.Cardinality())
|
||||
fmt.Printf(" Mémoire utilisée: %.2f MB\n", float64(finalFilter.MemoryUsage())/1024/1024)
|
||||
|
||||
fmt.Println("5. Sauvegarder (optionnel)")
|
||||
// result.Save("filtered_kmers.bin")
|
||||
|
||||
// ==========================================
|
||||
// EXEMPLE 7 : Vérification individuelle
|
||||
// ==========================================
|
||||
fmt.Println("\n=== EXEMPLE 7 : Vérification de k-mers spécifiques ===\n")
|
||||
|
||||
checkFilter := obikmer.NewFrequencyFilter(31, 3)
|
||||
|
||||
testSeq := []byte("ACGTACGTACGTACGTACGTACGTACGTACG")
|
||||
for i := 0; i < 5; i++ {
|
||||
checkFilter.AddSequence(testSeq)
|
||||
}
|
||||
|
||||
var kmers []uint64
|
||||
kmers = obikmer.EncodeKmers(testSeq, 31, &kmers)
|
||||
|
||||
if len(kmers) > 0 {
|
||||
testKmer := kmers[0]
|
||||
|
||||
fmt.Printf("K-mer test: 0x%016X\n", testKmer)
|
||||
fmt.Printf(" Présent dans filtre: %v\n", checkFilter.Contains(testKmer))
|
||||
fmt.Printf(" Fréquence approx: %d\n", checkFilter.GetFrequency(testKmer))
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// EXEMPLE 8 : Intégration avec collection
|
||||
// ==========================================
|
||||
fmt.Println("\n=== EXEMPLE 8 : Intégration avec KmerSetCollection ===\n")
|
||||
|
||||
// Créer une collection de génomes filtrés
|
||||
collection := obikmer.NewKmerSetCollection(31)
|
||||
|
||||
genomes := map[string][][]byte{
|
||||
"Genome1": {
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"),
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"),
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"),
|
||||
[]byte("TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT"), // Erreur
|
||||
},
|
||||
"Genome2": {
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"),
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"),
|
||||
[]byte("ACGTACGTACGTACGTACGTACGTACGTACG"),
|
||||
[]byte("GGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGG"), // Erreur
|
||||
},
|
||||
}
|
||||
|
||||
for id, sequences := range genomes {
|
||||
// Filtrer chaque génome
|
||||
genomeFilter := obikmer.NewFrequencyFilter(31, 3)
|
||||
genomeFilter.AddSequences(sequences)
|
||||
|
||||
// Ajouter à la collection
|
||||
filteredSet := genomeFilter.GetFilteredSet(id)
|
||||
collection.Add(filteredSet)
|
||||
|
||||
fmt.Printf("%s: %d k-mers de qualité\n", id, filteredSet.Cardinality())
|
||||
}
|
||||
|
||||
// Analyser la collection
|
||||
fmt.Println("\nAnalyse comparative:")
|
||||
collectionStats := collection.ComputeStats()
|
||||
fmt.Printf(" Core genome: %d k-mers\n", collectionStats.CoreSize)
|
||||
fmt.Printf(" Pan genome: %d k-mers\n", collectionStats.PanGenomeSize)
|
||||
|
||||
// ==========================================
|
||||
// RÉSUMÉ
|
||||
// ==========================================
|
||||
fmt.Println("\n=== RÉSUMÉ ===\n")
|
||||
fmt.Println("Le FrequencyFilter permet de:")
|
||||
fmt.Println(" ✓ Filtrer les k-mers par fréquence minimale")
|
||||
fmt.Println(" ✓ Utiliser une mémoire optimale avec Roaring bitmaps")
|
||||
fmt.Println(" ✓ Une seule passe sur les données")
|
||||
fmt.Println(" ✓ Éliminer efficacement les erreurs de séquençage")
|
||||
fmt.Println("")
|
||||
fmt.Println("Workflow typique:")
|
||||
fmt.Println(" 1. filter := NewFrequencyFilter(k, minFreq)")
|
||||
fmt.Println(" 2. for each sequence: filter.AddSequence(seq)")
|
||||
fmt.Println(" 3. filtered := filter.GetFilteredSet(id)")
|
||||
fmt.Println(" 4. Utiliser filtered dans vos analyses")
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// FONCTION HELPER POUR BENCHMARKS
|
||||
// ==================================
|
||||
|
||||
func BenchmarkFrequencyFilter() {
|
||||
k := 31
|
||||
minFreq := 3
|
||||
|
||||
// Test avec différentes tailles
|
||||
sizes := []int{1000, 10000, 100000}
|
||||
|
||||
fmt.Println("\n=== BENCHMARK ===\n")
|
||||
|
||||
for _, size := range sizes {
|
||||
filter := obikmer.NewFrequencyFilter(k, minFreq)
|
||||
|
||||
// Générer des séquences
|
||||
for i := 0; i < size; i++ {
|
||||
seq := make([]byte, 100)
|
||||
for j := range seq {
|
||||
seq[j] = "ACGT"[(i+j)%4]
|
||||
}
|
||||
filter.AddSequence(seq)
|
||||
}
|
||||
|
||||
fmt.Printf("Size=%d reads:\n", size)
|
||||
fmt.Printf(" Filtered k-mers: %d\n", filter.Cardinality())
|
||||
fmt.Printf(" Memory: %.2f MB\n", float64(filter.MemoryUsage())/1024/1024)
|
||||
fmt.Println()
|
||||
}
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// FONCTION POUR DONNÉES RÉELLES
|
||||
// ==================================
|
||||
|
||||
func ProcessRealData() {
|
||||
// Exemple pour traiter de vraies données FASTQ
|
||||
|
||||
k := 31
|
||||
minFreq := 3
|
||||
|
||||
filter := obikmer.NewFrequencyFilter(k, minFreq)
|
||||
|
||||
// Pseudo-code pour lire un FASTQ
|
||||
/*
|
||||
fastqFile := "sample.fastq"
|
||||
reader := NewFastqReader(fastqFile)
|
||||
|
||||
for reader.HasNext() {
|
||||
read := reader.Next()
|
||||
filter.AddSequence(read.Sequence)
|
||||
}
|
||||
|
||||
// Récupérer le résultat
|
||||
filtered := filter.GetFilteredSet("sample_filtered")
|
||||
filtered.Save("sample_filtered_kmers.bin")
|
||||
|
||||
// Stats
|
||||
stats := filter.Stats()
|
||||
fmt.Println(stats.String())
|
||||
*/
|
||||
|
||||
fmt.Println("Workflow pour données réelles:")
|
||||
fmt.Println(" 1. Créer le filtre avec minFreq approprié (2-5 typique)")
|
||||
fmt.Println(" 2. Stream les reads depuis FASTQ")
|
||||
fmt.Println(" 3. Récupérer les k-mers filtrés")
|
||||
fmt.Println(" 4. Utiliser pour assemblage/comparaison/etc.")
|
||||
|
||||
_ = filter // unused
|
||||
}
|
||||
272
pkg/obidist/dist_matrix.go
Normal file
272
pkg/obidist/dist_matrix.go
Normal file
@@ -0,0 +1,272 @@
|
||||
package obidist
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// DistMatrix represents a symmetric matrix stored as a triangular matrix.
|
||||
// The diagonal has a constant value (typically 0 for distances, 1 for similarities).
|
||||
// Only the upper triangle (i < j) is stored to save memory.
|
||||
//
|
||||
// For an n×n matrix, we store n(n-1)/2 values.
|
||||
type DistMatrix struct {
|
||||
n int // Number of elements (matrix dimension)
|
||||
data []float64 // Triangular storage: upper triangle only
|
||||
labels []string // Optional labels for rows/columns
|
||||
diagonalValue float64 // Value on the diagonal
|
||||
}
|
||||
|
||||
// NewDistMatrix creates a new distance matrix of size n×n.
|
||||
// All distances are initialized to 0.0, diagonal is 0.0.
|
||||
func NewDistMatrix(n int) *DistMatrix {
|
||||
if n < 0 {
|
||||
panic("matrix size must be non-negative")
|
||||
}
|
||||
|
||||
// Number of elements in upper triangle: n(n-1)/2
|
||||
size := n * (n - 1) / 2
|
||||
|
||||
return &DistMatrix{
|
||||
n: n,
|
||||
data: make([]float64, size),
|
||||
labels: make([]string, n),
|
||||
diagonalValue: 0.0,
|
||||
}
|
||||
}
|
||||
|
||||
// NewDistMatrixWithLabels creates a new distance matrix with labels.
|
||||
// Diagonal is 0.0 by default.
|
||||
func NewDistMatrixWithLabels(labels []string) *DistMatrix {
|
||||
dm := NewDistMatrix(len(labels))
|
||||
copy(dm.labels, labels)
|
||||
return dm
|
||||
}
|
||||
|
||||
// NewSimilarityMatrix creates a new similarity matrix of size n×n.
|
||||
// All off-diagonal values are initialized to 0.0, diagonal is 1.0.
|
||||
func NewSimilarityMatrix(n int) *DistMatrix {
|
||||
if n < 0 {
|
||||
panic("matrix size must be non-negative")
|
||||
}
|
||||
|
||||
// Number of elements in upper triangle: n(n-1)/2
|
||||
size := n * (n - 1) / 2
|
||||
|
||||
return &DistMatrix{
|
||||
n: n,
|
||||
data: make([]float64, size),
|
||||
labels: make([]string, n),
|
||||
diagonalValue: 1.0,
|
||||
}
|
||||
}
|
||||
|
||||
// NewSimilarityMatrixWithLabels creates a new similarity matrix with labels.
|
||||
// Diagonal is 1.0.
|
||||
func NewSimilarityMatrixWithLabels(labels []string) *DistMatrix {
|
||||
dm := NewSimilarityMatrix(len(labels))
|
||||
copy(dm.labels, labels)
|
||||
return dm
|
||||
}
|
||||
|
||||
// Size returns the dimension of the matrix (n for an n×n matrix).
|
||||
func (dm *DistMatrix) Size() int {
|
||||
return dm.n
|
||||
}
|
||||
|
||||
// indexFor computes the index in the data array for element (i, j).
|
||||
// Assumes i < j (upper triangle).
|
||||
//
|
||||
// The upper triangle is stored row by row:
|
||||
// (0,1), (0,2), ..., (0,n-1), (1,2), (1,3), ..., (1,n-1), (2,3), ...
|
||||
//
|
||||
// For element (i, j) where i < j:
|
||||
// index = i*(n-1) + j - 1 - i*(i+1)/2
|
||||
//
|
||||
// This can be simplified to:
|
||||
// index = i*n - i*(i+1)/2 + j - i - 1
|
||||
// = i*(n - (i+1)/2 - 1) + j - 1
|
||||
// = i*(n - 1 - i/2 - 1/2) + j - 1
|
||||
//
|
||||
// But the clearest formula is:
|
||||
// index = i*n - i*(i+3)/2 + j - 1
|
||||
func (dm *DistMatrix) indexFor(i, j int) int {
|
||||
if i >= j {
|
||||
panic(fmt.Sprintf("indexFor expects i < j, got i=%d, j=%d", i, j))
|
||||
}
|
||||
// Formula: number of elements in previous rows + position in current row
|
||||
// Previous rows (0 to i-1): sum from k=0 to i-1 of (n-1-k) = i*n - i*(i+1)/2
|
||||
// Current row position: j - i - 1
|
||||
return i*dm.n - i*(i+1)/2 + j - i - 1
|
||||
}
|
||||
|
||||
// Get returns the value at position (i, j).
|
||||
// The matrix is symmetric, so Get(i, j) == Get(j, i).
|
||||
// The diagonal returns the diagonalValue (0.0 for distances, 1.0 for similarities).
|
||||
func (dm *DistMatrix) Get(i, j int) float64 {
|
||||
if i < 0 || i >= dm.n || j < 0 || j >= dm.n {
|
||||
panic(fmt.Sprintf("indices out of bounds: i=%d, j=%d, n=%d", i, j, dm.n))
|
||||
}
|
||||
|
||||
// Diagonal: return the diagonal value
|
||||
if i == j {
|
||||
return dm.diagonalValue
|
||||
}
|
||||
|
||||
// Ensure i < j for indexing
|
||||
if i > j {
|
||||
i, j = j, i
|
||||
}
|
||||
|
||||
return dm.data[dm.indexFor(i, j)]
|
||||
}
|
||||
|
||||
// Set sets the value at position (i, j).
|
||||
// The matrix is symmetric, so Set(i, j, v) also sets (j, i) to v.
|
||||
// Setting the diagonal (i == j) is ignored (diagonal has a fixed value).
|
||||
func (dm *DistMatrix) Set(i, j int, value float64) {
|
||||
if i < 0 || i >= dm.n || j < 0 || j >= dm.n {
|
||||
panic(fmt.Sprintf("indices out of bounds: i=%d, j=%d, n=%d", i, j, dm.n))
|
||||
}
|
||||
|
||||
// Ignore diagonal assignments (diagonal has a fixed value)
|
||||
if i == j {
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure i < j for indexing
|
||||
if i > j {
|
||||
i, j = j, i
|
||||
}
|
||||
|
||||
dm.data[dm.indexFor(i, j)] = value
|
||||
}
|
||||
|
||||
// GetLabel returns the label for element i.
|
||||
func (dm *DistMatrix) GetLabel(i int) string {
|
||||
if i < 0 || i >= dm.n {
|
||||
panic(fmt.Sprintf("index out of bounds: i=%d, n=%d", i, dm.n))
|
||||
}
|
||||
return dm.labels[i]
|
||||
}
|
||||
|
||||
// SetLabel sets the label for element i.
|
||||
func (dm *DistMatrix) SetLabel(i int, label string) {
|
||||
if i < 0 || i >= dm.n {
|
||||
panic(fmt.Sprintf("index out of bounds: i=%d, n=%d", i, dm.n))
|
||||
}
|
||||
dm.labels[i] = label
|
||||
}
|
||||
|
||||
// Labels returns a copy of all labels.
|
||||
func (dm *DistMatrix) Labels() []string {
|
||||
labels := make([]string, dm.n)
|
||||
copy(labels, dm.labels)
|
||||
return labels
|
||||
}
|
||||
|
||||
// GetRow returns the i-th row of the distance matrix.
|
||||
// The returned slice is a copy.
|
||||
func (dm *DistMatrix) GetRow(i int) []float64 {
|
||||
if i < 0 || i >= dm.n {
|
||||
panic(fmt.Sprintf("index out of bounds: i=%d, n=%d", i, dm.n))
|
||||
}
|
||||
|
||||
row := make([]float64, dm.n)
|
||||
for j := 0; j < dm.n; j++ {
|
||||
row[j] = dm.Get(i, j)
|
||||
}
|
||||
return row
|
||||
}
|
||||
|
||||
// GetColumn returns the j-th column of the distance matrix.
|
||||
// Since the matrix is symmetric, GetColumn(j) == GetRow(j).
|
||||
// The returned slice is a copy.
|
||||
func (dm *DistMatrix) GetColumn(j int) []float64 {
|
||||
return dm.GetRow(j)
|
||||
}
|
||||
|
||||
// MinDistance returns the minimum non-zero distance in the matrix,
|
||||
// along with the indices (i, j) where it occurs.
|
||||
// Returns (0.0, -1, -1) if the matrix is empty or all distances are 0.
|
||||
func (dm *DistMatrix) MinDistance() (float64, int, int) {
|
||||
if dm.n <= 1 {
|
||||
return 0.0, -1, -1
|
||||
}
|
||||
|
||||
minDist := -1.0
|
||||
minI, minJ := -1, -1
|
||||
|
||||
for i := 0; i < dm.n-1; i++ {
|
||||
for j := i + 1; j < dm.n; j++ {
|
||||
dist := dm.Get(i, j)
|
||||
if minDist < 0 || dist < minDist {
|
||||
minDist = dist
|
||||
minI = i
|
||||
minJ = j
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if minI < 0 {
|
||||
return 0.0, -1, -1
|
||||
}
|
||||
|
||||
return minDist, minI, minJ
|
||||
}
|
||||
|
||||
// MaxDistance returns the maximum distance in the matrix,
|
||||
// along with the indices (i, j) where it occurs.
|
||||
// Returns (0.0, -1, -1) if the matrix is empty.
|
||||
func (dm *DistMatrix) MaxDistance() (float64, int, int) {
|
||||
if dm.n <= 1 {
|
||||
return 0.0, -1, -1
|
||||
}
|
||||
|
||||
maxDist := -1.0
|
||||
maxI, maxJ := -1, -1
|
||||
|
||||
for i := 0; i < dm.n-1; i++ {
|
||||
for j := i + 1; j < dm.n; j++ {
|
||||
dist := dm.Get(i, j)
|
||||
if maxDist < 0 || dist > maxDist {
|
||||
maxDist = dist
|
||||
maxI = i
|
||||
maxJ = j
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if maxI < 0 {
|
||||
return 0.0, -1, -1
|
||||
}
|
||||
|
||||
return maxDist, maxI, maxJ
|
||||
}
|
||||
|
||||
// Copy creates a deep copy of the matrix.
|
||||
func (dm *DistMatrix) Copy() *DistMatrix {
|
||||
newDM := &DistMatrix{
|
||||
n: dm.n,
|
||||
data: make([]float64, len(dm.data)),
|
||||
labels: make([]string, dm.n),
|
||||
diagonalValue: dm.diagonalValue,
|
||||
}
|
||||
|
||||
copy(newDM.data, dm.data)
|
||||
copy(newDM.labels, dm.labels)
|
||||
|
||||
return newDM
|
||||
}
|
||||
|
||||
// ToFullMatrix returns a full n×n matrix representation.
|
||||
// This allocates n² values, so use only when needed.
|
||||
func (dm *DistMatrix) ToFullMatrix() [][]float64 {
|
||||
matrix := make([][]float64, dm.n)
|
||||
for i := 0; i < dm.n; i++ {
|
||||
matrix[i] = make([]float64, dm.n)
|
||||
for j := 0; j < dm.n; j++ {
|
||||
matrix[i][j] = dm.Get(i, j)
|
||||
}
|
||||
}
|
||||
return matrix
|
||||
}
|
||||
386
pkg/obidist/dist_matrix_test.go
Normal file
386
pkg/obidist/dist_matrix_test.go
Normal file
@@ -0,0 +1,386 @@
|
||||
package obidist
|
||||
|
||||
import (
|
||||
"math"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNewDistMatrix(t *testing.T) {
|
||||
dm := NewDistMatrix(5)
|
||||
|
||||
if dm.Size() != 5 {
|
||||
t.Errorf("Expected size 5, got %d", dm.Size())
|
||||
}
|
||||
|
||||
// Check that all values are initialized to 0
|
||||
for i := 0; i < 5; i++ {
|
||||
for j := 0; j < 5; j++ {
|
||||
if dm.Get(i, j) != 0.0 {
|
||||
t.Errorf("Expected 0.0 at (%d, %d), got %f", i, j, dm.Get(i, j))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixDiagonal(t *testing.T) {
|
||||
dm := NewDistMatrix(5)
|
||||
|
||||
// Diagonal should always be 0
|
||||
for i := 0; i < 5; i++ {
|
||||
if dm.Get(i, i) != 0.0 {
|
||||
t.Errorf("Expected diagonal element (%d, %d) to be 0.0, got %f", i, i, dm.Get(i, i))
|
||||
}
|
||||
}
|
||||
|
||||
// Try to set diagonal (should be ignored)
|
||||
dm.Set(2, 2, 5.0)
|
||||
if dm.Get(2, 2) != 0.0 {
|
||||
t.Errorf("Diagonal should remain 0.0 even after Set, got %f", dm.Get(2, 2))
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixSymmetry(t *testing.T) {
|
||||
dm := NewDistMatrix(4)
|
||||
|
||||
dm.Set(0, 1, 1.5)
|
||||
dm.Set(0, 2, 2.5)
|
||||
dm.Set(1, 3, 3.5)
|
||||
|
||||
// Check symmetry
|
||||
if dm.Get(0, 1) != dm.Get(1, 0) {
|
||||
t.Errorf("Matrix not symmetric: Get(0,1)=%f, Get(1,0)=%f", dm.Get(0, 1), dm.Get(1, 0))
|
||||
}
|
||||
|
||||
if dm.Get(0, 2) != dm.Get(2, 0) {
|
||||
t.Errorf("Matrix not symmetric: Get(0,2)=%f, Get(2,0)=%f", dm.Get(0, 2), dm.Get(2, 0))
|
||||
}
|
||||
|
||||
if dm.Get(1, 3) != dm.Get(3, 1) {
|
||||
t.Errorf("Matrix not symmetric: Get(1,3)=%f, Get(3,1)=%f", dm.Get(1, 3), dm.Get(3, 1))
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixSetGet(t *testing.T) {
|
||||
dm := NewDistMatrix(4)
|
||||
|
||||
testCases := []struct {
|
||||
i int
|
||||
j int
|
||||
value float64
|
||||
}{
|
||||
{0, 1, 1.5},
|
||||
{0, 2, 2.5},
|
||||
{0, 3, 3.5},
|
||||
{1, 2, 4.5},
|
||||
{1, 3, 5.5},
|
||||
{2, 3, 6.5},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
dm.Set(tc.i, tc.j, tc.value)
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
got := dm.Get(tc.i, tc.j)
|
||||
if math.Abs(got-tc.value) > 1e-10 {
|
||||
t.Errorf("Get(%d, %d): expected %f, got %f", tc.i, tc.j, tc.value, got)
|
||||
}
|
||||
|
||||
// Check symmetry
|
||||
got = dm.Get(tc.j, tc.i)
|
||||
if math.Abs(got-tc.value) > 1e-10 {
|
||||
t.Errorf("Get(%d, %d) (symmetric): expected %f, got %f", tc.j, tc.i, tc.value, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixLabels(t *testing.T) {
|
||||
labels := []string{"A", "B", "C", "D"}
|
||||
dm := NewDistMatrixWithLabels(labels)
|
||||
|
||||
if dm.Size() != 4 {
|
||||
t.Errorf("Expected size 4, got %d", dm.Size())
|
||||
}
|
||||
|
||||
for i, label := range labels {
|
||||
if dm.GetLabel(i) != label {
|
||||
t.Errorf("Expected label %s at index %d, got %s", label, i, dm.GetLabel(i))
|
||||
}
|
||||
}
|
||||
|
||||
// Modify a label
|
||||
dm.SetLabel(1, "Modified")
|
||||
if dm.GetLabel(1) != "Modified" {
|
||||
t.Errorf("Expected label 'Modified' at index 1, got %s", dm.GetLabel(1))
|
||||
}
|
||||
|
||||
// Check Labels() returns a copy
|
||||
labelsCopy := dm.Labels()
|
||||
labelsCopy[0] = "ChangedCopy"
|
||||
if dm.GetLabel(0) != "A" {
|
||||
t.Errorf("Modifying Labels() return value should not affect original labels")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixMinDistance(t *testing.T) {
|
||||
dm := NewDistMatrix(4)
|
||||
|
||||
dm.Set(0, 1, 2.5)
|
||||
dm.Set(0, 2, 1.5) // minimum
|
||||
dm.Set(0, 3, 3.5)
|
||||
dm.Set(1, 2, 4.5)
|
||||
dm.Set(1, 3, 5.5)
|
||||
dm.Set(2, 3, 6.5)
|
||||
|
||||
minDist, minI, minJ := dm.MinDistance()
|
||||
|
||||
if math.Abs(minDist-1.5) > 1e-10 {
|
||||
t.Errorf("Expected min distance 1.5, got %f", minDist)
|
||||
}
|
||||
|
||||
if (minI != 0 || minJ != 2) && (minI != 2 || minJ != 0) {
|
||||
t.Errorf("Expected min at (0, 2) or (2, 0), got (%d, %d)", minI, minJ)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixMaxDistance(t *testing.T) {
|
||||
dm := NewDistMatrix(4)
|
||||
|
||||
dm.Set(0, 1, 2.5)
|
||||
dm.Set(0, 2, 1.5)
|
||||
dm.Set(0, 3, 3.5)
|
||||
dm.Set(1, 2, 4.5)
|
||||
dm.Set(1, 3, 5.5)
|
||||
dm.Set(2, 3, 6.5) // maximum
|
||||
|
||||
maxDist, maxI, maxJ := dm.MaxDistance()
|
||||
|
||||
if math.Abs(maxDist-6.5) > 1e-10 {
|
||||
t.Errorf("Expected max distance 6.5, got %f", maxDist)
|
||||
}
|
||||
|
||||
if (maxI != 2 || maxJ != 3) && (maxI != 3 || maxJ != 2) {
|
||||
t.Errorf("Expected max at (2, 3) or (3, 2), got (%d, %d)", maxI, maxJ)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixGetRow(t *testing.T) {
|
||||
dm := NewDistMatrix(3)
|
||||
|
||||
dm.Set(0, 1, 1.0)
|
||||
dm.Set(0, 2, 2.0)
|
||||
dm.Set(1, 2, 3.0)
|
||||
|
||||
row0 := dm.GetRow(0)
|
||||
expected0 := []float64{0.0, 1.0, 2.0}
|
||||
|
||||
for i, val := range expected0 {
|
||||
if math.Abs(row0[i]-val) > 1e-10 {
|
||||
t.Errorf("Row 0[%d]: expected %f, got %f", i, val, row0[i])
|
||||
}
|
||||
}
|
||||
|
||||
row1 := dm.GetRow(1)
|
||||
expected1 := []float64{1.0, 0.0, 3.0}
|
||||
|
||||
for i, val := range expected1 {
|
||||
if math.Abs(row1[i]-val) > 1e-10 {
|
||||
t.Errorf("Row 1[%d]: expected %f, got %f", i, val, row1[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixCopy(t *testing.T) {
|
||||
dm := NewDistMatrixWithLabels([]string{"A", "B", "C"})
|
||||
dm.Set(0, 1, 1.5)
|
||||
dm.Set(0, 2, 2.5)
|
||||
dm.Set(1, 2, 3.5)
|
||||
|
||||
dmCopy := dm.Copy()
|
||||
|
||||
// Check values are copied
|
||||
if dmCopy.Get(0, 1) != dm.Get(0, 1) {
|
||||
t.Errorf("Copy has different value at (0, 1)")
|
||||
}
|
||||
|
||||
// Check labels are copied
|
||||
if dmCopy.GetLabel(0) != dm.GetLabel(0) {
|
||||
t.Errorf("Copy has different label at index 0")
|
||||
}
|
||||
|
||||
// Modify copy and ensure original unchanged
|
||||
dmCopy.Set(0, 1, 99.9)
|
||||
if dm.Get(0, 1) == 99.9 {
|
||||
t.Errorf("Modifying copy affected original matrix")
|
||||
}
|
||||
|
||||
dmCopy.SetLabel(0, "Modified")
|
||||
if dm.GetLabel(0) == "Modified" {
|
||||
t.Errorf("Modifying copy label affected original matrix")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixToFullMatrix(t *testing.T) {
|
||||
dm := NewDistMatrix(3)
|
||||
dm.Set(0, 1, 1.0)
|
||||
dm.Set(0, 2, 2.0)
|
||||
dm.Set(1, 2, 3.0)
|
||||
|
||||
full := dm.ToFullMatrix()
|
||||
|
||||
expected := [][]float64{
|
||||
{0.0, 1.0, 2.0},
|
||||
{1.0, 0.0, 3.0},
|
||||
{2.0, 3.0, 0.0},
|
||||
}
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
for j := 0; j < 3; j++ {
|
||||
if math.Abs(full[i][j]-expected[i][j]) > 1e-10 {
|
||||
t.Errorf("Full matrix[%d][%d]: expected %f, got %f",
|
||||
i, j, expected[i][j], full[i][j])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixBoundsChecking(t *testing.T) {
|
||||
dm := NewDistMatrix(3)
|
||||
|
||||
// Test Get out of bounds
|
||||
testPanic := func(f func()) {
|
||||
defer func() {
|
||||
if r := recover(); r == nil {
|
||||
t.Errorf("Expected panic, but didn't get one")
|
||||
}
|
||||
}()
|
||||
f()
|
||||
}
|
||||
|
||||
testPanic(func() { dm.Get(-1, 0) })
|
||||
testPanic(func() { dm.Get(0, 3) })
|
||||
testPanic(func() { dm.Set(3, 0, 1.0) })
|
||||
testPanic(func() { dm.GetLabel(-1) })
|
||||
testPanic(func() { dm.SetLabel(3, "Invalid") })
|
||||
testPanic(func() { dm.GetRow(3) })
|
||||
}
|
||||
|
||||
func TestDistMatrixEmptyMatrix(t *testing.T) {
|
||||
dm := NewDistMatrix(0)
|
||||
|
||||
if dm.Size() != 0 {
|
||||
t.Errorf("Expected size 0, got %d", dm.Size())
|
||||
}
|
||||
|
||||
minDist, minI, minJ := dm.MinDistance()
|
||||
if minDist != 0.0 || minI != -1 || minJ != -1 {
|
||||
t.Errorf("Empty matrix MinDistance should return (0.0, -1, -1), got (%f, %d, %d)",
|
||||
minDist, minI, minJ)
|
||||
}
|
||||
|
||||
maxDist, maxI, maxJ := dm.MaxDistance()
|
||||
if maxDist != 0.0 || maxI != -1 || maxJ != -1 {
|
||||
t.Errorf("Empty matrix MaxDistance should return (0.0, -1, -1), got (%f, %d, %d)",
|
||||
maxDist, maxI, maxJ)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDistMatrixSingleElement(t *testing.T) {
|
||||
dm := NewDistMatrix(1)
|
||||
|
||||
if dm.Size() != 1 {
|
||||
t.Errorf("Expected size 1, got %d", dm.Size())
|
||||
}
|
||||
|
||||
// Only element is diagonal (always 0)
|
||||
if dm.Get(0, 0) != 0.0 {
|
||||
t.Errorf("Expected 0.0 at (0, 0), got %f", dm.Get(0, 0))
|
||||
}
|
||||
|
||||
minDist, minI, minJ := dm.MinDistance()
|
||||
if minDist != 0.0 || minI != -1 || minJ != -1 {
|
||||
t.Errorf("Single element matrix MinDistance should return (0.0, -1, -1), got (%f, %d, %d)",
|
||||
minDist, minI, minJ)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewSimilarityMatrix(t *testing.T) {
|
||||
sm := NewSimilarityMatrix(4)
|
||||
|
||||
if sm.Size() != 4 {
|
||||
t.Errorf("Expected size 4, got %d", sm.Size())
|
||||
}
|
||||
|
||||
// Check diagonal is 1.0
|
||||
for i := 0; i < 4; i++ {
|
||||
if sm.Get(i, i) != 1.0 {
|
||||
t.Errorf("Expected diagonal element (%d, %d) to be 1.0, got %f", i, i, sm.Get(i, i))
|
||||
}
|
||||
}
|
||||
|
||||
// Check off-diagonal is 0.0
|
||||
if sm.Get(0, 1) != 0.0 {
|
||||
t.Errorf("Expected off-diagonal to be 0.0, got %f", sm.Get(0, 1))
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewSimilarityMatrixWithLabels(t *testing.T) {
|
||||
labels := []string{"A", "B", "C"}
|
||||
sm := NewSimilarityMatrixWithLabels(labels)
|
||||
|
||||
if sm.Size() != 3 {
|
||||
t.Errorf("Expected size 3, got %d", sm.Size())
|
||||
}
|
||||
|
||||
// Check labels
|
||||
for i, label := range labels {
|
||||
if sm.GetLabel(i) != label {
|
||||
t.Errorf("Expected label %s at index %d, got %s", label, i, sm.GetLabel(i))
|
||||
}
|
||||
}
|
||||
|
||||
// Check diagonal is 1.0
|
||||
for i := 0; i < 3; i++ {
|
||||
if sm.Get(i, i) != 1.0 {
|
||||
t.Errorf("Expected diagonal element (%d, %d) to be 1.0, got %f", i, i, sm.Get(i, i))
|
||||
}
|
||||
}
|
||||
|
||||
// Set some similarities
|
||||
sm.Set(0, 1, 0.8)
|
||||
sm.Set(0, 2, 0.6)
|
||||
sm.Set(1, 2, 0.7)
|
||||
|
||||
// Check values
|
||||
if math.Abs(sm.Get(0, 1)-0.8) > 1e-10 {
|
||||
t.Errorf("Expected 0.8 at (0, 1), got %f", sm.Get(0, 1))
|
||||
}
|
||||
|
||||
if math.Abs(sm.Get(1, 0)-0.8) > 1e-10 {
|
||||
t.Errorf("Expected 0.8 at (1, 0) (symmetry), got %f", sm.Get(1, 0))
|
||||
}
|
||||
}
|
||||
|
||||
func TestSimilarityMatrixCopy(t *testing.T) {
|
||||
sm := NewSimilarityMatrix(3)
|
||||
sm.Set(0, 1, 0.9)
|
||||
sm.Set(0, 2, 0.7)
|
||||
|
||||
smCopy := sm.Copy()
|
||||
|
||||
// Check diagonal is preserved
|
||||
if smCopy.Get(0, 0) != 1.0 {
|
||||
t.Errorf("Copied similarity matrix should have diagonal 1.0, got %f", smCopy.Get(0, 0))
|
||||
}
|
||||
|
||||
// Check values are preserved
|
||||
if math.Abs(smCopy.Get(0, 1)-0.9) > 1e-10 {
|
||||
t.Errorf("Copy should preserve values, expected 0.9, got %f", smCopy.Get(0, 1))
|
||||
}
|
||||
|
||||
// Modify copy and ensure original unchanged
|
||||
smCopy.Set(0, 1, 0.5)
|
||||
if math.Abs(sm.Get(0, 1)-0.9) > 1e-10 {
|
||||
t.Errorf("Modifying copy should not affect original, expected 0.9, got %f", sm.Get(0, 1))
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,11 @@ import (
|
||||
"git.metabarcoding.org/obitools/obitools4/obitools4/pkg/obiutils"
|
||||
)
|
||||
|
||||
// __single_base_code__ encodes DNA bases to 2-bit values.
|
||||
// Standard bases: A=0, C=1, G=2, T/U=3
|
||||
// Ambiguous bases (N, R, Y, W, S, K, M, B, D, H, V) and other characters: encoded as 0 (A)
|
||||
// Note: For error detection with ambiguous bases, use __single_base_code_err__ in encodekmer.go
|
||||
|
||||
var __single_base_code__ = []byte{0,
|
||||
// A, B, C, D,
|
||||
0, 0, 1, 0,
|
||||
|
||||
903
pkg/obikmer/encodekmer.go
Normal file
903
pkg/obikmer/encodekmer.go
Normal file
@@ -0,0 +1,903 @@
|
||||
package obikmer
|
||||
|
||||
import "iter"
|
||||
|
||||
var __single_base_code_err__ = []byte{0,
|
||||
// A, B, C, D,
|
||||
0, 0xFF, 1, 0xFF,
|
||||
// E, F, G, H,
|
||||
0xFF, 0xFF, 2, 0xFF,
|
||||
// I, J, K, L,
|
||||
0xFF, 0xFF, 0xFF, 0xFF,
|
||||
// M, N, O, P,
|
||||
0xFF, 0xFF, 0xFF, 0xFF,
|
||||
// Q, R, S, T,
|
||||
0xFF, 0xFF, 0xFF, 3,
|
||||
// U, V, W, X,
|
||||
3, 0xFF, 0xFF, 0xFF,
|
||||
// Y, Z, ., .
|
||||
0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF,
|
||||
}
|
||||
|
||||
const ambiguousBaseCode = byte(0xFF)
|
||||
|
||||
// Error markers for k-mers of odd length ≤ 31
|
||||
// For odd k ≤ 31, only k*2 bits are used (max 62 bits), leaving 2 high bits
|
||||
// available for error coding in the top 2 bits (bits 62-63).
|
||||
//
|
||||
// Error codes are simple integers:
|
||||
//
|
||||
// 0 = no error
|
||||
// 1 = error type 1
|
||||
// 2 = error type 2
|
||||
// 3 = error type 3
|
||||
//
|
||||
// Use SetKmerError(kmer, code) and GetKmerError(kmer) to manipulate error bits.
|
||||
const (
|
||||
KmerErrorMask uint64 = 0b11 << 62 // Mask to extract error bits (bits 62-63)
|
||||
KmerSequenceMask uint64 = ^KmerErrorMask // Mask to extract sequence bits (bits 0-61)
|
||||
)
|
||||
|
||||
// SetKmerError sets the error marker bits on a k-mer encoded value.
|
||||
// Only valid for odd k-mer sizes ≤ 31 where 2 bits remain unused.
|
||||
//
|
||||
// Parameters:
|
||||
// - kmer: the encoded k-mer value
|
||||
// - errorCode: error code (0-3), where 0=no error, 1-3=error types
|
||||
//
|
||||
// Returns:
|
||||
// - k-mer with error bits set
|
||||
func SetKmerError(kmer uint64, errorCode uint64) uint64 {
|
||||
return (kmer & KmerSequenceMask) | ((errorCode & 0b11) << 62)
|
||||
}
|
||||
|
||||
// GetKmerError extracts the error marker bits from a k-mer encoded value.
|
||||
//
|
||||
// Returns:
|
||||
// - error code (0-3) as raw value (not shifted)
|
||||
func GetKmerError(kmer uint64) uint64 {
|
||||
return (kmer & KmerErrorMask) >> 62
|
||||
}
|
||||
|
||||
// ClearKmerError removes the error marker bits from a k-mer, returning
|
||||
// just the sequence encoding.
|
||||
//
|
||||
// Returns:
|
||||
// - k-mer with error bits cleared (set to 00)
|
||||
func ClearKmerError(kmer uint64) uint64 {
|
||||
return kmer & KmerSequenceMask
|
||||
}
|
||||
|
||||
// EncodeKmer encodes a single k-mer sequence to uint64.
|
||||
// This is the optimal zero-allocation function for encoding a single k-mer.
|
||||
//
|
||||
// Each nucleotide is encoded on 2 bits according to __single_base_code__:
|
||||
// - A = 0 (00)
|
||||
// - C = 1 (01)
|
||||
// - G = 2 (10)
|
||||
// - T/U = 3 (11)
|
||||
//
|
||||
// The maximum k-mer size is 31 (using 62 bits), leaving the top 2 bits
|
||||
// available for error markers if needed.
|
||||
//
|
||||
// Parameters:
|
||||
// - seq: DNA sequence as a byte slice (case insensitive, supports A, C, G, T, U)
|
||||
// - k: k-mer size (must be between 1 and 31)
|
||||
//
|
||||
// Returns:
|
||||
// - encoded k-mer as uint64
|
||||
// - panics if len(seq) != k or k is invalid
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// kmer := EncodeKmer([]byte("ACGT"), 4)
|
||||
func EncodeKmer(seq []byte, k int) uint64 {
|
||||
if k < 1 || k > 31 {
|
||||
panic("k must be between 1 and 31")
|
||||
}
|
||||
if len(seq) != k {
|
||||
panic("sequence length must equal k")
|
||||
}
|
||||
|
||||
var kmer uint64
|
||||
for i := 0; i < k; i++ {
|
||||
kmer <<= 2
|
||||
kmer |= uint64(__single_base_code__[seq[i]&31])
|
||||
}
|
||||
return kmer
|
||||
}
|
||||
|
||||
// EncodeCanonicalKmer encodes a single k-mer sequence to its canonical form (uint64).
|
||||
// Returns the lexicographically smaller of the k-mer and its reverse complement.
|
||||
// This is the optimal zero-allocation function for encoding a single canonical k-mer.
|
||||
//
|
||||
// Parameters:
|
||||
// - seq: DNA sequence as a byte slice (case insensitive, supports A, C, G, T, U)
|
||||
// - k: k-mer size (must be between 1 and 31)
|
||||
//
|
||||
// Returns:
|
||||
// - canonical k-mer as uint64
|
||||
// - panics if len(seq) != k or k is invalid
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// canonical := EncodeCanonicalKmer([]byte("ACGT"), 4)
|
||||
func EncodeCanonicalKmer(seq []byte, k int) uint64 {
|
||||
if k < 1 || k > 31 {
|
||||
panic("k must be between 1 and 31")
|
||||
}
|
||||
if len(seq) != k {
|
||||
panic("sequence length must equal k")
|
||||
}
|
||||
|
||||
rcShift := uint((k - 1) * 2)
|
||||
|
||||
var fwd, rvc uint64
|
||||
for i := 0; i < k; i++ {
|
||||
code := uint64(__single_base_code__[seq[i]&31])
|
||||
fwd <<= 2
|
||||
fwd |= code
|
||||
rvc >>= 2
|
||||
rvc |= (code ^ 3) << rcShift
|
||||
}
|
||||
|
||||
if fwd <= rvc {
|
||||
return fwd
|
||||
}
|
||||
return rvc
|
||||
}
|
||||
|
||||
// DecodeKmer decodes a uint64 k-mer back to a DNA sequence.
|
||||
// This function reuses a provided buffer to avoid allocation.
|
||||
//
|
||||
// Parameters:
|
||||
// - kmer: encoded k-mer as uint64
|
||||
// - k: k-mer size (number of nucleotides)
|
||||
// - buffer: pre-allocated buffer of length >= k (if nil, allocates new slice)
|
||||
//
|
||||
// Returns:
|
||||
// - decoded DNA sequence as []byte (lowercase acgt)
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// var buf [32]byte
|
||||
// seq := DecodeKmer(kmer, 21, buf[:])
|
||||
func DecodeKmer(kmer uint64, k int, buffer []byte) []byte {
|
||||
var result []byte
|
||||
if buffer == nil || len(buffer) < k {
|
||||
result = make([]byte, k)
|
||||
} else {
|
||||
result = buffer[:k]
|
||||
}
|
||||
|
||||
bases := [4]byte{'a', 'c', 'g', 't'}
|
||||
for i := k - 1; i >= 0; i-- {
|
||||
result[i] = bases[kmer&3]
|
||||
kmer >>= 2
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// EncodeKmers converts a DNA sequence to a slice of encoded k-mers.
|
||||
// Each nucleotide is encoded on 2 bits according to __single_base_code__:
|
||||
// - A = 0 (00)
|
||||
// - C = 1 (01)
|
||||
// - G = 2 (10)
|
||||
// - T/U = 3 (11)
|
||||
//
|
||||
// The function returns overlapping k-mers of size k encoded as uint64.
|
||||
// For a sequence of length n, it returns n-k+1 k-mers.
|
||||
//
|
||||
// The maximum k-mer size is 31 (using 62 bits), leaving the top 2 bits
|
||||
// available for error markers (see SetKmerError).
|
||||
//
|
||||
// Parameters:
|
||||
// - seq: DNA sequence as a byte slice (case insensitive, supports A, C, G, T, U)
|
||||
// - k: k-mer size (must be between 1 and 31)
|
||||
// - buffer: optional pre-allocated buffer for results. If nil, a new slice is created.
|
||||
//
|
||||
// Returns:
|
||||
// - slice of uint64 encoded k-mers
|
||||
// - nil if sequence is shorter than k or k is invalid
|
||||
func EncodeKmers(seq []byte, k int, buffer *[]uint64) []uint64 {
|
||||
if k < 1 || k > 31 || len(seq) < k {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []uint64
|
||||
if buffer == nil {
|
||||
result = make([]uint64, 0, len(seq)-k+1)
|
||||
} else {
|
||||
result = (*buffer)[:0]
|
||||
}
|
||||
|
||||
for kmer := range IterKmers(seq, k) {
|
||||
result = append(result, kmer)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// IterKmers returns an iterator over all k-mers in the sequence.
|
||||
// No intermediate slice is allocated, making it memory-efficient for
|
||||
// processing k-mers one by one (e.g., adding to a Roaring Bitmap).
|
||||
//
|
||||
// Parameters:
|
||||
// - seq: DNA sequence as a byte slice (case insensitive, supports A, C, G, T, U)
|
||||
// - k: k-mer size (must be between 1 and 31)
|
||||
//
|
||||
// Returns:
|
||||
// - iterator yielding uint64 encoded k-mers
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// for kmer := range IterKmers(seq, 21) {
|
||||
// bitmap.Add(kmer)
|
||||
// }
|
||||
func IterKmers(seq []byte, k int) iter.Seq[uint64] {
|
||||
return func(yield func(uint64) bool) {
|
||||
if k < 1 || k > 31 || len(seq) < k {
|
||||
return
|
||||
}
|
||||
|
||||
mask := uint64(1)<<(k*2) - 1
|
||||
|
||||
var kmer uint64
|
||||
for i := 0; i < k; i++ {
|
||||
kmer <<= 2
|
||||
kmer |= uint64(__single_base_code__[seq[i]&31])
|
||||
}
|
||||
|
||||
if !yield(kmer) {
|
||||
return
|
||||
}
|
||||
|
||||
for i := k; i < len(seq); i++ {
|
||||
kmer <<= 2
|
||||
kmer |= uint64(__single_base_code__[seq[i]&31])
|
||||
kmer &= mask
|
||||
|
||||
if !yield(kmer) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// IterCanonicalKmersWithErrors returns an iterator over all canonical k-mers
|
||||
// with error markers for ambiguous bases. No intermediate slice is allocated.
|
||||
//
|
||||
// Ambiguous bases (N, R, Y, W, S, K, M, B, D, H, V) are encoded as 0xFF and detected
|
||||
// during k-mer construction. The error code in bits 62-63 indicates the number of
|
||||
// ambiguous bases in each k-mer (0=clean, 1-3=error count).
|
||||
//
|
||||
// Only valid for odd k ≤ 31 where 2 bits remain unused for error markers.
|
||||
//
|
||||
// Parameters:
|
||||
// - seq: DNA sequence as a byte slice (case insensitive, supports A, C, G, T, U, and ambiguous bases)
|
||||
// - k: k-mer size (must be odd, between 1 and 31)
|
||||
//
|
||||
// Returns:
|
||||
// - iterator yielding uint64 canonical k-mers with error markers
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// for kmer := range IterCanonicalKmersWithErrors(seq, 21) {
|
||||
// if GetKmerError(kmer) == 0 {
|
||||
// bitmap.Add(kmer) // Only add clean k-mers
|
||||
// }
|
||||
// }
|
||||
func IterCanonicalKmersWithErrors(seq []byte, k int) iter.Seq[uint64] {
|
||||
return func(yield func(uint64) bool) {
|
||||
if k < 1 || k > 31 || k%2 == 0 || len(seq) < k {
|
||||
return
|
||||
}
|
||||
|
||||
mask := uint64(1)<<(k*2) - 1
|
||||
|
||||
rcShift := uint((k - 1) * 2)
|
||||
|
||||
ambiguousCount := 0
|
||||
const ambiguousCode = byte(0xFF)
|
||||
|
||||
var fwd, rvc uint64
|
||||
hasError := false
|
||||
for i := 0; i < k; i++ {
|
||||
code := __single_base_code_err__[seq[i]&31]
|
||||
|
||||
if code == ambiguousCode {
|
||||
ambiguousCount++
|
||||
hasError = true
|
||||
code = 0
|
||||
}
|
||||
|
||||
codeUint := uint64(code)
|
||||
fwd <<= 2
|
||||
fwd |= codeUint
|
||||
rvc >>= 2
|
||||
rvc |= (codeUint ^ 3) << rcShift
|
||||
}
|
||||
|
||||
var canonical uint64
|
||||
if fwd <= rvc {
|
||||
canonical = fwd
|
||||
} else {
|
||||
canonical = rvc
|
||||
}
|
||||
|
||||
if hasError {
|
||||
errorCode := uint64(ambiguousCount)
|
||||
if errorCode > 3 {
|
||||
errorCode = 3
|
||||
}
|
||||
canonical = SetKmerError(canonical, errorCode)
|
||||
}
|
||||
|
||||
if !yield(canonical) {
|
||||
return
|
||||
}
|
||||
|
||||
for i := k; i < len(seq); i++ {
|
||||
outgoingCode := __single_base_code_err__[seq[i-k]&31]
|
||||
if outgoingCode == ambiguousCode {
|
||||
ambiguousCount--
|
||||
}
|
||||
|
||||
code := __single_base_code_err__[seq[i]&31]
|
||||
if code == ambiguousCode {
|
||||
ambiguousCount++
|
||||
code = 0
|
||||
}
|
||||
|
||||
codeUint := uint64(code)
|
||||
|
||||
fwd <<= 2
|
||||
fwd |= codeUint
|
||||
fwd &= mask
|
||||
|
||||
rvc >>= 2
|
||||
rvc |= (codeUint ^ 3) << rcShift
|
||||
|
||||
if fwd <= rvc {
|
||||
canonical = fwd
|
||||
} else {
|
||||
canonical = rvc
|
||||
}
|
||||
|
||||
if ambiguousCount > 0 {
|
||||
errorCode := uint64(ambiguousCount)
|
||||
if errorCode > 3 {
|
||||
errorCode = 3
|
||||
}
|
||||
canonical = SetKmerError(canonical, errorCode)
|
||||
}
|
||||
|
||||
if !yield(canonical) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// IterCanonicalKmers returns an iterator over all canonical k-mers.
|
||||
// No intermediate slice is allocated, making it memory-efficient.
|
||||
//
|
||||
// Parameters:
|
||||
// - seq: DNA sequence as a byte slice (case insensitive, supports A, C, G, T, U)
|
||||
// - k: k-mer size (must be between 1 and 31)
|
||||
//
|
||||
// Returns:
|
||||
// - iterator yielding uint64 canonical k-mers
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// for canonical := range IterCanonicalKmers(seq, 21) {
|
||||
// bitmap.Add(canonical)
|
||||
// }
|
||||
func IterCanonicalKmers(seq []byte, k int) iter.Seq[uint64] {
|
||||
return func(yield func(uint64) bool) {
|
||||
if k < 1 || k > 31 || len(seq) < k {
|
||||
return
|
||||
}
|
||||
|
||||
mask := uint64(1)<<(k*2) - 1
|
||||
|
||||
rcShift := uint((k - 1) * 2)
|
||||
|
||||
var fwd, rvc uint64
|
||||
for i := 0; i < k; i++ {
|
||||
code := uint64(__single_base_code__[seq[i]&31])
|
||||
fwd <<= 2
|
||||
fwd |= code
|
||||
rvc >>= 2
|
||||
rvc |= (code ^ 3) << rcShift
|
||||
}
|
||||
|
||||
var canonical uint64
|
||||
if fwd <= rvc {
|
||||
canonical = fwd
|
||||
} else {
|
||||
canonical = rvc
|
||||
}
|
||||
|
||||
if !yield(canonical) {
|
||||
return
|
||||
}
|
||||
|
||||
for i := k; i < len(seq); i++ {
|
||||
code := uint64(__single_base_code__[seq[i]&31])
|
||||
|
||||
fwd <<= 2
|
||||
fwd |= code
|
||||
fwd &= mask
|
||||
|
||||
rvc >>= 2
|
||||
rvc |= (code ^ 3) << rcShift
|
||||
|
||||
if fwd <= rvc {
|
||||
canonical = fwd
|
||||
} else {
|
||||
canonical = rvc
|
||||
}
|
||||
|
||||
if !yield(canonical) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SuperKmer represents a maximal subsequence where all consecutive k-mers
|
||||
// share the same minimizer. A minimizer is the smallest canonical m-mer
|
||||
// among the (k-m+1) m-mers contained in a k-mer.
|
||||
type SuperKmer struct {
|
||||
Minimizer uint64 // The canonical minimizer value (normalized m-mer)
|
||||
Start int // Starting position in the original sequence (0-indexed)
|
||||
End int // Ending position (exclusive, like Go slice notation)
|
||||
Sequence []byte // The actual DNA subsequence [Start:End]
|
||||
}
|
||||
|
||||
// dequeItem represents an element in the monotone deque used for
|
||||
// tracking minimizers in a sliding window.
|
||||
type dequeItem struct {
|
||||
position int // Position of the m-mer in the sequence
|
||||
canonical uint64 // Canonical (normalized) m-mer value
|
||||
}
|
||||
|
||||
// ExtractSuperKmers extracts super k-mers from a DNA sequence.
|
||||
// A super k-mer is a maximal subsequence where all consecutive k-mers
|
||||
// share the same minimizer. The minimizer of a k-mer is the smallest
|
||||
// canonical m-mer among its (k-m+1) constituent m-mers.
|
||||
//
|
||||
// The algorithm uses:
|
||||
// - Simultaneous forward/reverse m-mer encoding for O(1) canonical m-mer computation
|
||||
// - Monotone deque for O(1) amortized minimizer tracking per position
|
||||
//
|
||||
// The maximum k-mer size is 31 (using 62 bits), leaving the top 2 bits
|
||||
// available for error markers if needed.
|
||||
//
|
||||
// Parameters:
|
||||
// - seq: DNA sequence as a byte slice (case insensitive, supports A, C, G, T, U)
|
||||
// - k: k-mer size (must be between m+1 and 31)
|
||||
// - m: minimizer size (must be between 1 and k-1)
|
||||
// - buffer: optional pre-allocated buffer for results. If nil, a new slice is created.
|
||||
//
|
||||
// Returns:
|
||||
// - slice of SuperKmer structs representing maximal subsequences
|
||||
// - nil if parameters are invalid or sequence is too short
|
||||
//
|
||||
// Time complexity: O(n) where n is the sequence length
|
||||
// Space complexity: O(k-m+1) for the deque + O(number of super k-mers) for results
|
||||
func ExtractSuperKmers(seq []byte, k int, m int, buffer *[]SuperKmer) []SuperKmer {
|
||||
if m < 1 || m >= k || k < 2 || k > 31 || len(seq) < k {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []SuperKmer
|
||||
if buffer == nil {
|
||||
estimatedSize := len(seq) / k
|
||||
if estimatedSize < 1 {
|
||||
estimatedSize = 1
|
||||
}
|
||||
result = make([]SuperKmer, 0, estimatedSize)
|
||||
} else {
|
||||
result = (*buffer)[:0]
|
||||
}
|
||||
|
||||
deque := make([]dequeItem, 0, k-m+1)
|
||||
|
||||
mMask := uint64(1)<<(m*2) - 1
|
||||
rcShift := uint((m - 1) * 2)
|
||||
|
||||
var fwdMmer, rvcMmer uint64
|
||||
for i := 0; i < m-1 && i < len(seq); i++ {
|
||||
code := uint64(__single_base_code__[seq[i]&31])
|
||||
fwdMmer = (fwdMmer << 2) | code
|
||||
rvcMmer = (rvcMmer >> 2) | ((code ^ 3) << rcShift)
|
||||
}
|
||||
|
||||
superKmerStart := 0
|
||||
var currentMinimizer uint64
|
||||
firstKmer := true
|
||||
|
||||
for pos := m - 1; pos < len(seq); pos++ {
|
||||
code := uint64(__single_base_code__[seq[pos]&31])
|
||||
fwdMmer = ((fwdMmer << 2) | code) & mMask
|
||||
rvcMmer = (rvcMmer >> 2) | ((code ^ 3) << rcShift)
|
||||
|
||||
canonical := fwdMmer
|
||||
if rvcMmer < fwdMmer {
|
||||
canonical = rvcMmer
|
||||
}
|
||||
|
||||
mmerPos := pos - m + 1
|
||||
|
||||
if pos >= k-1 {
|
||||
windowStart := pos - k + 1
|
||||
for len(deque) > 0 && deque[0].position < windowStart {
|
||||
deque = deque[1:]
|
||||
}
|
||||
}
|
||||
|
||||
for len(deque) > 0 && deque[len(deque)-1].canonical >= canonical {
|
||||
deque = deque[:len(deque)-1]
|
||||
}
|
||||
|
||||
deque = append(deque, dequeItem{position: mmerPos, canonical: canonical})
|
||||
|
||||
if pos >= k-1 {
|
||||
newMinimizer := deque[0].canonical
|
||||
kmerStart := pos - k + 1
|
||||
|
||||
if firstKmer {
|
||||
currentMinimizer = newMinimizer
|
||||
firstKmer = false
|
||||
} else if newMinimizer != currentMinimizer {
|
||||
endPos := kmerStart + k - 1
|
||||
superKmer := SuperKmer{
|
||||
Minimizer: currentMinimizer,
|
||||
Start: superKmerStart,
|
||||
End: endPos,
|
||||
Sequence: seq[superKmerStart:endPos],
|
||||
}
|
||||
result = append(result, superKmer)
|
||||
|
||||
superKmerStart = kmerStart
|
||||
currentMinimizer = newMinimizer
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !firstKmer {
|
||||
superKmer := SuperKmer{
|
||||
Minimizer: currentMinimizer,
|
||||
Start: superKmerStart,
|
||||
End: len(seq),
|
||||
Sequence: seq[superKmerStart:],
|
||||
}
|
||||
result = append(result, superKmer)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// ReverseComplement computes the reverse complement of an encoded k-mer.
|
||||
// The k-mer is encoded with 2 bits per nucleotide (A=00, C=01, G=10, T=11).
|
||||
// The complement is: A↔T (00↔11), C↔G (01↔10), which is simply XOR with 11.
|
||||
// The reverse swaps the order of 2-bit pairs.
|
||||
//
|
||||
// For k-mers with error markers (top 2 bits), the error bits are preserved
|
||||
// and transferred to the reverse complement.
|
||||
//
|
||||
// Parameters:
|
||||
// - kmer: the encoded k-mer (possibly with error bits in positions 62-63)
|
||||
// - k: the k-mer size (number of nucleotides)
|
||||
//
|
||||
// Returns:
|
||||
// - the reverse complement of the k-mer with error bits preserved
|
||||
func ReverseComplement(kmer uint64, k int) uint64 {
|
||||
errorBits := kmer & KmerErrorMask
|
||||
|
||||
mask := uint64(1)<<(k*2) - 1
|
||||
rc := (^kmer) & mask
|
||||
|
||||
rc = ((rc & 0x3333333333333333) << 2) | ((rc & 0xCCCCCCCCCCCCCCCC) >> 2)
|
||||
rc = ((rc & 0x0F0F0F0F0F0F0F0F) << 4) | ((rc & 0xF0F0F0F0F0F0F0F0) >> 4)
|
||||
rc = ((rc & 0x00FF00FF00FF00FF) << 8) | ((rc & 0xFF00FF00FF00FF00) >> 8)
|
||||
rc = ((rc & 0x0000FFFF0000FFFF) << 16) | ((rc & 0xFFFF0000FFFF0000) >> 16)
|
||||
rc = (rc << 32) | (rc >> 32)
|
||||
|
||||
rc >>= (64 - k*2)
|
||||
|
||||
rc |= errorBits
|
||||
|
||||
return rc
|
||||
}
|
||||
|
||||
// CanonicalKmer returns the lexicographically smaller of a k-mer and its
|
||||
// reverse complement. This canonical form ensures that a k-mer and its
|
||||
// reverse complement map to the same value.
|
||||
//
|
||||
// This implements REVERSE COMPLEMENT canonicalization (biological canonical form).
|
||||
//
|
||||
// Parameters:
|
||||
// - kmer: the encoded k-mer
|
||||
// - k: the k-mer size (number of nucleotides)
|
||||
//
|
||||
// Returns:
|
||||
// - the canonical k-mer
|
||||
func CanonicalKmer(kmer uint64, k int) uint64 {
|
||||
rc := ReverseComplement(kmer, k)
|
||||
if rc < kmer {
|
||||
return rc
|
||||
}
|
||||
return kmer
|
||||
}
|
||||
|
||||
// NormalizeCircular returns the lexicographically smallest circular rotation
|
||||
// of a k-mer. This is used for entropy calculations in low-complexity masking.
|
||||
//
|
||||
// This implements CIRCULAR PERMUTATION normalization (rotation-based canonicalization).
|
||||
// Example: ACGT → min(ACGT, CGTA, GTAC, TACG) by circular rotation
|
||||
//
|
||||
// This is DIFFERENT from NormalizeKmer which uses reverse complement.
|
||||
//
|
||||
// Parameters:
|
||||
// - kmer: the encoded k-mer
|
||||
// - k: the k-mer size (number of nucleotides)
|
||||
//
|
||||
// Returns:
|
||||
// - the lexicographically smallest circular rotation
|
||||
//
|
||||
// Time complexity: O(k) - checks all k rotations
|
||||
func NormalizeCircular(kmer uint64, k int) uint64 {
|
||||
if k < 1 || k > 31 {
|
||||
return kmer
|
||||
}
|
||||
|
||||
mask := uint64(1)<<(k*2) - 1
|
||||
canonical := kmer
|
||||
current := kmer
|
||||
|
||||
// Try all k rotations
|
||||
for i := 0; i < k; i++ {
|
||||
// Rotate: take top 2 bits, shift left, add to bottom
|
||||
top := (current >> ((k - 1) * 2)) & 3
|
||||
current = ((current << 2) | top) & mask
|
||||
|
||||
if current < canonical {
|
||||
canonical = current
|
||||
}
|
||||
}
|
||||
|
||||
return canonical
|
||||
}
|
||||
|
||||
// EncodeCircularCanonicalKmer encodes a k-mer and returns its lexicographically
|
||||
// smallest circular rotation. This is optimized for single k-mer encoding with
|
||||
// circular canonicalization.
|
||||
//
|
||||
// This implements CIRCULAR PERMUTATION canonicalization, used for entropy-based
|
||||
// low-complexity masking. This is DIFFERENT from EncodeCanonicalKmer which
|
||||
// uses reverse complement canonicalization.
|
||||
//
|
||||
// Parameters:
|
||||
// - seq: DNA sequence as a byte slice (case insensitive, supports A, C, G, T, U)
|
||||
// - k: k-mer size (must be between 1 and 31)
|
||||
//
|
||||
// Returns:
|
||||
// - canonical k-mer as uint64 (smallest circular rotation)
|
||||
// - panics if len(seq) != k or k is invalid
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// canonical := EncodeCircularCanonicalKmer([]byte("ACGT"), 4)
|
||||
func EncodeCircularCanonicalKmer(seq []byte, k int) uint64 {
|
||||
kmer := EncodeKmer(seq, k)
|
||||
return NormalizeCircular(kmer, k)
|
||||
}
|
||||
|
||||
// CanonicalCircularKmerCount returns the number of unique canonical k-mers
|
||||
// under circular permutation normalization for DNA sequences (4-letter alphabet).
|
||||
//
|
||||
// This counts equivalence classes where k-mers are considered the same if one
|
||||
// is a circular rotation of another (e.g., "ACGT", "CGTA", "GTAC", "TACG" are
|
||||
// all equivalent).
|
||||
//
|
||||
// Uses Moreau's necklace-counting formula for exact counts:
|
||||
//
|
||||
// N(n, a) = (1/n) * Σ φ(d) * a^(n/d)
|
||||
//
|
||||
// where the sum is over all divisors d of n, and φ is Euler's totient function.
|
||||
//
|
||||
// Parameters:
|
||||
// - k: k-mer size
|
||||
//
|
||||
// Returns:
|
||||
// - number of unique canonical k-mers under circular rotation
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// count := CanonicalCircularKmerCount(4) // Returns 70 (not 256)
|
||||
func CanonicalCircularKmerCount(k int) int {
|
||||
// Hardcoded exact counts for k=1 to 6 (optimization)
|
||||
switch k {
|
||||
case 1:
|
||||
return 4
|
||||
case 2:
|
||||
return 10
|
||||
case 3:
|
||||
return 24
|
||||
case 4:
|
||||
return 70
|
||||
case 5:
|
||||
return 208
|
||||
case 6:
|
||||
return 700
|
||||
default:
|
||||
// For k>6, use Moreau's necklace-counting formula
|
||||
return necklaceCount(k, 4)
|
||||
}
|
||||
}
|
||||
|
||||
// eulerTotient computes Euler's totient function φ(n), which counts
|
||||
// the number of integers from 1 to n that are coprime with n.
|
||||
func eulerTotient(n int) int {
|
||||
if n <= 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
result := n
|
||||
|
||||
// Process all prime factors
|
||||
for p := 2; p*p <= n; p++ {
|
||||
if n%p == 0 {
|
||||
// Remove all occurrences of p
|
||||
for n%p == 0 {
|
||||
n /= p
|
||||
}
|
||||
// Apply: φ(n) = n * (1 - 1/p) = n * (p-1)/p
|
||||
result -= result / p
|
||||
}
|
||||
}
|
||||
|
||||
// If n is still greater than 1, then it's a prime factor
|
||||
if n > 1 {
|
||||
result -= result / n
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// divisors returns all divisors of n in ascending order.
|
||||
func divisors(n int) []int {
|
||||
if n <= 0 {
|
||||
return []int{}
|
||||
}
|
||||
|
||||
divs := []int{}
|
||||
for i := 1; i*i <= n; i++ {
|
||||
if n%i == 0 {
|
||||
divs = append(divs, i)
|
||||
if i != n/i {
|
||||
divs = append(divs, n/i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Bubble sort in ascending order
|
||||
for i := 0; i < len(divs)-1; i++ {
|
||||
for j := i + 1; j < len(divs); j++ {
|
||||
if divs[i] > divs[j] {
|
||||
divs[i], divs[j] = divs[j], divs[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return divs
|
||||
}
|
||||
|
||||
// necklaceCount computes the number of distinct necklaces (equivalence classes
|
||||
// under rotation) for sequences of length n over an alphabet of size a.
|
||||
// Uses Moreau's necklace-counting formula:
|
||||
//
|
||||
// N(n, a) = (1/n) * Σ φ(d) * a^(n/d)
|
||||
//
|
||||
// where the sum is over all divisors d of n, and φ is Euler's totient function.
|
||||
func necklaceCount(n, alphabetSize int) int {
|
||||
if n <= 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
divs := divisors(n)
|
||||
sum := 0
|
||||
|
||||
for _, d := range divs {
|
||||
// Compute a^(n/d)
|
||||
power := 1
|
||||
exp := n / d
|
||||
for i := 0; i < exp; i++ {
|
||||
power *= alphabetSize
|
||||
}
|
||||
|
||||
sum += eulerTotient(d) * power
|
||||
}
|
||||
|
||||
return sum / n
|
||||
}
|
||||
|
||||
// EncodeCanonicalKmersWithErrors converts a DNA sequence to a slice of canonical k-mers
|
||||
// with error markers for ambiguous bases (N, R, Y, W, S, K, M, B, D, H, V).
|
||||
//
|
||||
// Ambiguous bases are encoded as 0xFF by __single_base_code__ and detected during
|
||||
// k-mer construction. The error code in bits 62-63 indicates the number of ambiguous
|
||||
// bases in each k-mer:
|
||||
// - errorCode 0: no ambiguous bases (clean k-mer)
|
||||
// - errorCode 1: 1 ambiguous base
|
||||
// - errorCode 2: 2 ambiguous bases
|
||||
// - errorCode 3: 3 or more ambiguous bases
|
||||
//
|
||||
// Only valid for odd k ≤ 31 where 2 bits remain unused for error markers.
|
||||
//
|
||||
// Parameters:
|
||||
// - seq: DNA sequence as a byte slice (case insensitive, supports A, C, G, T, U, and ambiguous bases)
|
||||
// - k: k-mer size (must be odd, between 1 and 31)
|
||||
// - buffer: optional pre-allocated buffer for results. If nil, a new slice is created.
|
||||
//
|
||||
// Returns:
|
||||
// - slice of uint64 canonical k-mers with error markers
|
||||
// - nil if sequence is shorter than k, k is invalid, or k is even
|
||||
func EncodeCanonicalKmersWithErrors(seq []byte, k int, buffer *[]uint64) []uint64 {
|
||||
if k < 1 || k > 31 || k%2 == 0 || len(seq) < k {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []uint64
|
||||
if buffer == nil {
|
||||
result = make([]uint64, 0, len(seq)-k+1)
|
||||
} else {
|
||||
result = (*buffer)[:0]
|
||||
}
|
||||
|
||||
for kmer := range IterCanonicalKmersWithErrors(seq, k) {
|
||||
result = append(result, kmer)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// EncodeCanonicalKmers converts a DNA sequence to a slice of canonical k-mers.
|
||||
// Each k-mer is replaced by the lexicographically smaller of itself and its
|
||||
// reverse complement. This ensures that forward and reverse complement sequences
|
||||
// produce the same k-mer set.
|
||||
//
|
||||
// The maximum k-mer size is 31 (using 62 bits), leaving the top 2 bits
|
||||
// available for error markers (see SetKmerError).
|
||||
//
|
||||
// Parameters:
|
||||
// - seq: DNA sequence as a byte slice (case insensitive, supports A, C, G, T, U)
|
||||
// - k: k-mer size (must be between 1 and 31)
|
||||
// - buffer: optional pre-allocated buffer for results. If nil, a new slice is created.
|
||||
//
|
||||
// Returns:
|
||||
// - slice of uint64 canonical k-mers
|
||||
// - nil if sequence is shorter than k or k is invalid
|
||||
func EncodeCanonicalKmers(seq []byte, k int, buffer *[]uint64) []uint64 {
|
||||
if k < 1 || k > 31 || len(seq) < k {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []uint64
|
||||
if buffer == nil {
|
||||
result = make([]uint64, 0, len(seq)-k+1)
|
||||
} else {
|
||||
result = (*buffer)[:0]
|
||||
}
|
||||
|
||||
for kmer := range IterCanonicalKmers(seq, k) {
|
||||
result = append(result, kmer)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
1220
pkg/obikmer/encodekmer_test.go
Normal file
1220
pkg/obikmer/encodekmer_test.go
Normal file
File diff suppressed because it is too large
Load Diff
310
pkg/obikmer/frequency_filter.go
Normal file
310
pkg/obikmer/frequency_filter.go
Normal file
@@ -0,0 +1,310 @@
|
||||
package obikmer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"git.metabarcoding.org/obitools/obitools4/obitools4/pkg/obiseq"
|
||||
)
|
||||
|
||||
// FrequencyFilter filters k-mers by minimum frequency
|
||||
// Specialization of KmerSetGroup where index[i] contains k-mers seen at least i+1 times
|
||||
type FrequencyFilter struct {
|
||||
*KmerSetGroup // Group of KmerSet (one per frequency level)
|
||||
MinFreq int // v - minimum required frequency
|
||||
}
|
||||
|
||||
// NewFrequencyFilter creates a new frequency filter
|
||||
// minFreq: minimum number d'occurrences required (v)
|
||||
func NewFrequencyFilter(k, minFreq int) *FrequencyFilter {
|
||||
ff := &FrequencyFilter{
|
||||
KmerSetGroup: NewKmerSetGroup(k, minFreq),
|
||||
MinFreq: minFreq,
|
||||
}
|
||||
|
||||
// Initialize group metadata
|
||||
ff.SetAttribute("type", "FrequencyFilter")
|
||||
ff.SetAttribute("min_freq", minFreq)
|
||||
|
||||
// Initialize metadata for each level
|
||||
for i := 0; i < minFreq; i++ {
|
||||
level := ff.Get(i)
|
||||
level.SetAttribute("level", i)
|
||||
level.SetAttribute("min_occurrences", i+1)
|
||||
level.SetId(fmt.Sprintf("level_%d", i))
|
||||
}
|
||||
|
||||
return ff
|
||||
}
|
||||
|
||||
// AddSequence adds all k-mers from a sequence to the filter
|
||||
// Uses an iterator to avoid allocating an intermediate vector
|
||||
func (ff *FrequencyFilter) AddSequence(seq *obiseq.BioSequence) {
|
||||
rawSeq := seq.Sequence()
|
||||
for canonical := range IterCanonicalKmers(rawSeq, ff.K()) {
|
||||
ff.AddKmerCode(canonical)
|
||||
}
|
||||
}
|
||||
|
||||
// AddKmerCode adds an encoded k-mer to the filter (main algorithm)
|
||||
func (ff *FrequencyFilter) AddKmerCode(kmer uint64) {
|
||||
// Find the current level of the k-mer
|
||||
c := 0
|
||||
for c < ff.MinFreq && ff.Get(c).Contains(kmer) {
|
||||
c++
|
||||
}
|
||||
|
||||
// Add to next level (if not yet at maximum)
|
||||
if c < ff.MinFreq {
|
||||
ff.Get(c).AddKmerCode(kmer)
|
||||
}
|
||||
}
|
||||
|
||||
// AddCanonicalKmerCode adds an encoded canonical k-mer to the filter
|
||||
func (ff *FrequencyFilter) AddCanonicalKmerCode(kmer uint64) {
|
||||
canonical := CanonicalKmer(kmer, ff.K())
|
||||
ff.AddKmerCode(canonical)
|
||||
}
|
||||
|
||||
// AddKmer adds a k-mer to the filter by encoding the sequence
|
||||
// The sequence must have exactly k nucleotides
|
||||
// Zero-allocation: encodes directly without creating an intermediate slice
|
||||
func (ff *FrequencyFilter) AddKmer(seq []byte) {
|
||||
kmer := EncodeKmer(seq, ff.K())
|
||||
ff.AddKmerCode(kmer)
|
||||
}
|
||||
|
||||
// AddCanonicalKmer adds a canonical k-mer to the filter by encoding the sequence
|
||||
// The sequence must have exactly k nucleotides
|
||||
// Zero-allocation: encodes directly in canonical form without creating an intermediate slice
|
||||
func (ff *FrequencyFilter) AddCanonicalKmer(seq []byte) {
|
||||
canonical := EncodeCanonicalKmer(seq, ff.K())
|
||||
ff.AddKmerCode(canonical)
|
||||
}
|
||||
|
||||
// GetFilteredSet returns a KmerSet of k-mers with frequency ≥ minFreq
|
||||
func (ff *FrequencyFilter) GetFilteredSet() *KmerSet {
|
||||
// Filtered k-mers are in the last level
|
||||
return ff.Get(ff.MinFreq - 1).Copy()
|
||||
}
|
||||
|
||||
// GetKmersAtLevel returns a KmerSet of k-mers seen at least (level+1) times
|
||||
// level doit être dans [0, minFreq-1]
|
||||
func (ff *FrequencyFilter) GetKmersAtLevel(level int) *KmerSet {
|
||||
ks := ff.Get(level)
|
||||
if ks == nil {
|
||||
return NewKmerSet(ff.K())
|
||||
}
|
||||
return ks.Copy()
|
||||
}
|
||||
|
||||
// Stats returns statistics on frequency levels
|
||||
func (ff *FrequencyFilter) Stats() FrequencyFilterStats {
|
||||
stats := FrequencyFilterStats{
|
||||
MinFreq: ff.MinFreq,
|
||||
Levels: make([]LevelStats, ff.MinFreq),
|
||||
}
|
||||
|
||||
for i := 0; i < ff.MinFreq; i++ {
|
||||
ks := ff.Get(i)
|
||||
card := ks.Len()
|
||||
sizeBytes := ks.MemoryUsage()
|
||||
|
||||
stats.Levels[i] = LevelStats{
|
||||
Level: i + 1, // Level 1 = freq ≥ 1
|
||||
Cardinality: card,
|
||||
SizeBytes: sizeBytes,
|
||||
}
|
||||
|
||||
stats.TotalBytes += sizeBytes
|
||||
}
|
||||
|
||||
// The last level contains the result
|
||||
stats.FilteredKmers = stats.Levels[ff.MinFreq-1].Cardinality
|
||||
|
||||
return stats
|
||||
}
|
||||
|
||||
// FrequencyFilterStats contains the filter statistics
|
||||
type FrequencyFilterStats struct {
|
||||
MinFreq int
|
||||
FilteredKmers uint64 // K-mers with freq ≥ minFreq
|
||||
TotalBytes uint64 // Total memory used
|
||||
Levels []LevelStats
|
||||
}
|
||||
|
||||
// LevelStats contains the stats of a level
|
||||
type LevelStats struct {
|
||||
Level int // freq ≥ Level
|
||||
Cardinality uint64 // Number of k-mers
|
||||
SizeBytes uint64 // Size in bytes
|
||||
}
|
||||
|
||||
func (ffs FrequencyFilterStats) String() string {
|
||||
result := fmt.Sprintf(`Frequency Filter Statistics (minFreq=%d):
|
||||
Filtered k-mers (freq≥%d): %d
|
||||
Total memory: %.2f MB
|
||||
|
||||
Level breakdown:
|
||||
`, ffs.MinFreq, ffs.MinFreq, ffs.FilteredKmers, float64(ffs.TotalBytes)/1024/1024)
|
||||
|
||||
for _, level := range ffs.Levels {
|
||||
result += fmt.Sprintf(" freq≥%d: %d k-mers (%.2f MB)\n",
|
||||
level.Level,
|
||||
level.Cardinality,
|
||||
float64(level.SizeBytes)/1024/1024)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Clear libère la mémoire de tous les niveaux
|
||||
// (héritée de KmerSetGroup mais redéfinie pour clarté)
|
||||
func (ff *FrequencyFilter) Clear() {
|
||||
ff.KmerSetGroup.Clear()
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// BATCH PROCESSING
|
||||
// ==================================
|
||||
|
||||
// AddSequences adds multiple sequences in batch
|
||||
func (ff *FrequencyFilter) AddSequences(sequences *obiseq.BioSequenceSlice) {
|
||||
for _, seq := range *sequences {
|
||||
ff.AddSequence(seq)
|
||||
}
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// PERSISTANCE
|
||||
// ==================================
|
||||
|
||||
// Save sauvegarde le FrequencyFilter dans un répertoire
|
||||
// Utilise le format de sérialisation du KmerSetGroup sous-jacent
|
||||
// Les métadonnées incluent le type "FrequencyFilter" et min_freq
|
||||
//
|
||||
// Format:
|
||||
// - directory/metadata.{toml,yaml,json} - métadonnées du filtre
|
||||
// - directory/set_0.roaring - k-mers vus ≥1 fois
|
||||
// - directory/set_1.roaring - k-mers vus ≥2 fois
|
||||
// - ...
|
||||
// - directory/set_{minFreq-1}.roaring - k-mers vus ≥minFreq fois
|
||||
//
|
||||
// Parameters:
|
||||
// - directory: répertoire de destination
|
||||
// - format: format des métadonnées (FormatTOML, FormatYAML, FormatJSON)
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// err := ff.Save("./my_filter", obikmer.FormatTOML)
|
||||
func (ff *FrequencyFilter) Save(directory string, format MetadataFormat) error {
|
||||
// Déléguer à KmerSetGroup qui gère déjà tout
|
||||
return ff.KmerSetGroup.Save(directory, format)
|
||||
}
|
||||
|
||||
// LoadFrequencyFilter charge un FrequencyFilter depuis un répertoire
|
||||
// Vérifie que les métadonnées correspondent à un FrequencyFilter
|
||||
//
|
||||
// Parameters:
|
||||
// - directory: répertoire source
|
||||
//
|
||||
// Returns:
|
||||
// - *FrequencyFilter: le filtre chargé
|
||||
// - error: erreur si le chargement échoue ou si ce n'est pas un FrequencyFilter
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// ff, err := obikmer.LoadFrequencyFilter("./my_filter")
|
||||
func LoadFrequencyFilter(directory string) (*FrequencyFilter, error) {
|
||||
// Charger le KmerSetGroup
|
||||
ksg, err := LoadKmerSetGroup(directory)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Vérifier que c'est bien un FrequencyFilter
|
||||
if typeAttr, ok := ksg.GetAttribute("type"); !ok || typeAttr != "FrequencyFilter" {
|
||||
return nil, fmt.Errorf("loaded data is not a FrequencyFilter (type=%v)", typeAttr)
|
||||
}
|
||||
|
||||
// Récupérer min_freq
|
||||
minFreqAttr, ok := ksg.GetIntAttribute("min_freq")
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("FrequencyFilter missing min_freq attribute")
|
||||
}
|
||||
|
||||
// Créer le FrequencyFilter
|
||||
ff := &FrequencyFilter{
|
||||
KmerSetGroup: ksg,
|
||||
MinFreq: minFreqAttr,
|
||||
}
|
||||
|
||||
return ff, nil
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// UTILITAIRES
|
||||
// ==================================
|
||||
|
||||
// Contains vérifie si un k-mer a atteint la fréquence minimale
|
||||
func (ff *FrequencyFilter) Contains(kmer uint64) bool {
|
||||
canonical := CanonicalKmer(kmer, ff.K())
|
||||
return ff.Get(ff.MinFreq - 1).Contains(canonical)
|
||||
}
|
||||
|
||||
// GetFrequency returns the approximate frequency of a k-mer
|
||||
// Retourne le niveau maximum atteint (freq ≥ niveau)
|
||||
func (ff *FrequencyFilter) GetFrequency(kmer uint64) int {
|
||||
canonical := CanonicalKmer(kmer, ff.K())
|
||||
|
||||
freq := 0
|
||||
for i := 0; i < ff.MinFreq; i++ {
|
||||
if ff.Get(i).Contains(canonical) {
|
||||
freq = i + 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return freq
|
||||
}
|
||||
|
||||
// Len returns the number of filtered k-mers or at a specific level
|
||||
// Without argument: returns the number of k-mers with freq ≥ minFreq (last level)
|
||||
// With argument level: returns the number of k-mers with freq ≥ (level+1)
|
||||
// Exemple: Len() pour les k-mers filtrés, Len(2) pour freq ≥ 3
|
||||
// (héritée de KmerSetGroup mais redéfinie pour la documentation)
|
||||
func (ff *FrequencyFilter) Len(level ...int) uint64 {
|
||||
return ff.KmerSetGroup.Len(level...)
|
||||
}
|
||||
|
||||
// MemoryUsage returns memory usage in bytes
|
||||
// (héritée de KmerSetGroup mais redéfinie pour clarté)
|
||||
func (ff *FrequencyFilter) MemoryUsage() uint64 {
|
||||
return ff.KmerSetGroup.MemoryUsage()
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// COMPARAISON AVEC D'AUTRES APPROCHES
|
||||
// ==================================
|
||||
|
||||
// CompareWithSimpleMap compare la mémoire avec une simple map
|
||||
func (ff *FrequencyFilter) CompareWithSimpleMap() string {
|
||||
totalKmers := ff.Get(0).Len()
|
||||
|
||||
simpleMapBytes := totalKmers * 24 // ~24 bytes par entrée
|
||||
roaringBytes := ff.MemoryUsage()
|
||||
|
||||
reduction := float64(simpleMapBytes) / float64(roaringBytes)
|
||||
|
||||
return fmt.Sprintf(`Memory Comparison for %d k-mers:
|
||||
Simple map[uint64]uint32: %.2f MB
|
||||
Roaring filter (v=%d): %.2f MB
|
||||
Reduction: %.1fx
|
||||
`,
|
||||
totalKmers,
|
||||
float64(simpleMapBytes)/1024/1024,
|
||||
ff.MinFreq,
|
||||
float64(roaringBytes)/1024/1024,
|
||||
reduction,
|
||||
)
|
||||
}
|
||||
217
pkg/obikmer/kmer_set.go
Normal file
217
pkg/obikmer/kmer_set.go
Normal file
@@ -0,0 +1,217 @@
|
||||
package obikmer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"git.metabarcoding.org/obitools/obitools4/obitools4/pkg/obiseq"
|
||||
"github.com/RoaringBitmap/roaring/roaring64"
|
||||
)
|
||||
|
||||
// KmerSet wraps a set of k-mers stored in a Roaring Bitmap
|
||||
// Provides utility methods for manipulating k-mer sets
|
||||
type KmerSet struct {
|
||||
id string // Unique identifier of the KmerSet
|
||||
k int // Size of k-mers (immutable)
|
||||
bitmap *roaring64.Bitmap // Bitmap containing the k-mers
|
||||
Metadata map[string]interface{} // User metadata (key=atomic value)
|
||||
}
|
||||
|
||||
// NewKmerSet creates a new empty KmerSet
|
||||
func NewKmerSet(k int) *KmerSet {
|
||||
return &KmerSet{
|
||||
k: k,
|
||||
bitmap: roaring64.New(),
|
||||
Metadata: make(map[string]interface{}),
|
||||
}
|
||||
}
|
||||
|
||||
// NewKmerSetFromBitmap creates a KmerSet from an existing bitmap
|
||||
func NewKmerSetFromBitmap(k int, bitmap *roaring64.Bitmap) *KmerSet {
|
||||
return &KmerSet{
|
||||
k: k,
|
||||
bitmap: bitmap,
|
||||
Metadata: make(map[string]interface{}),
|
||||
}
|
||||
}
|
||||
|
||||
// K returns the size of k-mers (immutable)
|
||||
func (ks *KmerSet) K() int {
|
||||
return ks.k
|
||||
}
|
||||
|
||||
// AddKmerCode adds an encoded k-mer to the set
|
||||
func (ks *KmerSet) AddKmerCode(kmer uint64) {
|
||||
ks.bitmap.Add(kmer)
|
||||
}
|
||||
|
||||
// AddCanonicalKmerCode adds an encoded canonical k-mer to the set
|
||||
func (ks *KmerSet) AddCanonicalKmerCode(kmer uint64) {
|
||||
canonical := CanonicalKmer(kmer, ks.k)
|
||||
ks.bitmap.Add(canonical)
|
||||
}
|
||||
|
||||
// AddKmer adds a k-mer to the set by encoding the sequence
|
||||
// The sequence must have exactly k nucleotides
|
||||
// Zero-allocation: encodes directly without creating an intermediate slice
|
||||
func (ks *KmerSet) AddKmer(seq []byte) {
|
||||
kmer := EncodeKmer(seq, ks.k)
|
||||
ks.bitmap.Add(kmer)
|
||||
}
|
||||
|
||||
// AddCanonicalKmer adds a canonical k-mer to the set by encoding the sequence
|
||||
// The sequence must have exactly k nucleotides
|
||||
// Zero-allocation: encodes directly in canonical form without creating an intermediate slice
|
||||
func (ks *KmerSet) AddCanonicalKmer(seq []byte) {
|
||||
canonical := EncodeCanonicalKmer(seq, ks.k)
|
||||
ks.bitmap.Add(canonical)
|
||||
}
|
||||
|
||||
// AddSequence adds all k-mers from a sequence to the set
|
||||
// Uses an iterator to avoid allocating an intermediate vector
|
||||
func (ks *KmerSet) AddSequence(seq *obiseq.BioSequence) {
|
||||
rawSeq := seq.Sequence()
|
||||
for canonical := range IterCanonicalKmers(rawSeq, ks.k) {
|
||||
ks.bitmap.Add(canonical)
|
||||
}
|
||||
}
|
||||
|
||||
// AddSequences adds all k-mers from multiple sequences in batch
|
||||
func (ks *KmerSet) AddSequences(sequences *obiseq.BioSequenceSlice) {
|
||||
for _, seq := range *sequences {
|
||||
ks.AddSequence(seq)
|
||||
}
|
||||
}
|
||||
|
||||
// Contains checks if a k-mer is in the set
|
||||
func (ks *KmerSet) Contains(kmer uint64) bool {
|
||||
return ks.bitmap.Contains(kmer)
|
||||
}
|
||||
|
||||
// Len returns the number of k-mers in the set
|
||||
func (ks *KmerSet) Len() uint64 {
|
||||
return ks.bitmap.GetCardinality()
|
||||
}
|
||||
|
||||
// MemoryUsage returns memory usage in bytes
|
||||
func (ks *KmerSet) MemoryUsage() uint64 {
|
||||
return ks.bitmap.GetSizeInBytes()
|
||||
}
|
||||
|
||||
// Clear empties the set
|
||||
func (ks *KmerSet) Clear() {
|
||||
ks.bitmap.Clear()
|
||||
}
|
||||
|
||||
// Copy creates a copy of the set (consistent with BioSequence.Copy)
|
||||
func (ks *KmerSet) Copy() *KmerSet {
|
||||
// Copy metadata
|
||||
metadata := make(map[string]interface{}, len(ks.Metadata))
|
||||
for k, v := range ks.Metadata {
|
||||
metadata[k] = v
|
||||
}
|
||||
|
||||
return &KmerSet{
|
||||
id: ks.id,
|
||||
k: ks.k,
|
||||
bitmap: ks.bitmap.Clone(),
|
||||
Metadata: metadata,
|
||||
}
|
||||
}
|
||||
|
||||
// Id returns the identifier of the KmerSet (consistent with BioSequence.Id)
|
||||
func (ks *KmerSet) Id() string {
|
||||
return ks.id
|
||||
}
|
||||
|
||||
// SetId sets the identifier of the KmerSet (consistent with BioSequence.SetId)
|
||||
func (ks *KmerSet) SetId(id string) {
|
||||
ks.id = id
|
||||
}
|
||||
|
||||
// Union returns the union of this set with another
|
||||
func (ks *KmerSet) Union(other *KmerSet) *KmerSet {
|
||||
if ks.k != other.k {
|
||||
panic(fmt.Sprintf("Cannot union KmerSets with different k values: %d vs %d", ks.k, other.k))
|
||||
}
|
||||
result := ks.bitmap.Clone()
|
||||
result.Or(other.bitmap)
|
||||
return NewKmerSetFromBitmap(ks.k, result)
|
||||
}
|
||||
|
||||
// Intersect returns the intersection of this set with another
|
||||
func (ks *KmerSet) Intersect(other *KmerSet) *KmerSet {
|
||||
if ks.k != other.k {
|
||||
panic(fmt.Sprintf("Cannot intersect KmerSets with different k values: %d vs %d", ks.k, other.k))
|
||||
}
|
||||
result := ks.bitmap.Clone()
|
||||
result.And(other.bitmap)
|
||||
return NewKmerSetFromBitmap(ks.k, result)
|
||||
}
|
||||
|
||||
// Difference returns the difference of this set with another (this - other)
|
||||
func (ks *KmerSet) Difference(other *KmerSet) *KmerSet {
|
||||
if ks.k != other.k {
|
||||
panic(fmt.Sprintf("Cannot subtract KmerSets with different k values: %d vs %d", ks.k, other.k))
|
||||
}
|
||||
result := ks.bitmap.Clone()
|
||||
result.AndNot(other.bitmap)
|
||||
return NewKmerSetFromBitmap(ks.k, result)
|
||||
}
|
||||
|
||||
// JaccardDistance computes the Jaccard distance between two KmerSets.
|
||||
// The Jaccard distance is defined as: 1 - (|A ∩ B| / |A ∪ B|)
|
||||
// where A and B are the two sets.
|
||||
//
|
||||
// Returns:
|
||||
// - 0.0 when sets are identical (distance = 0, similarity = 1)
|
||||
// - 1.0 when sets are completely disjoint (distance = 1, similarity = 0)
|
||||
// - 1.0 when both sets are empty (by convention)
|
||||
//
|
||||
// Time complexity: O(|A| + |B|) for Roaring Bitmap operations
|
||||
// Space complexity: O(1) as operations are done in-place on temporary bitmaps
|
||||
func (ks *KmerSet) JaccardDistance(other *KmerSet) float64 {
|
||||
if ks.k != other.k {
|
||||
panic(fmt.Sprintf("Cannot compute Jaccard distance between KmerSets with different k values: %d vs %d", ks.k, other.k))
|
||||
}
|
||||
|
||||
// Compute intersection cardinality
|
||||
intersectionCard := ks.bitmap.AndCardinality(other.bitmap)
|
||||
|
||||
// Compute union cardinality
|
||||
unionCard := ks.bitmap.OrCardinality(other.bitmap)
|
||||
|
||||
// If union is empty, both sets are empty - return 1.0 by convention
|
||||
if unionCard == 0 {
|
||||
return 1.0
|
||||
}
|
||||
|
||||
// Jaccard similarity = |A ∩ B| / |A ∪ B|
|
||||
similarity := float64(intersectionCard) / float64(unionCard)
|
||||
|
||||
// Jaccard distance = 1 - similarity
|
||||
return 1.0 - similarity
|
||||
}
|
||||
|
||||
// JaccardSimilarity computes the Jaccard similarity coefficient between two KmerSets.
|
||||
// The Jaccard similarity is defined as: |A ∩ B| / |A ∪ B|
|
||||
//
|
||||
// Returns:
|
||||
// - 1.0 when sets are identical (maximum similarity)
|
||||
// - 0.0 when sets are completely disjoint (no similarity)
|
||||
// - 0.0 when both sets are empty (by convention)
|
||||
//
|
||||
// Time complexity: O(|A| + |B|) for Roaring Bitmap operations
|
||||
// Space complexity: O(1) as operations are done in-place on temporary bitmaps
|
||||
func (ks *KmerSet) JaccardSimilarity(other *KmerSet) float64 {
|
||||
return 1.0 - ks.JaccardDistance(other)
|
||||
}
|
||||
|
||||
// Iterator returns an iterator over all k-mers in the set
|
||||
func (ks *KmerSet) Iterator() roaring64.IntIterable64 {
|
||||
return ks.bitmap.Iterator()
|
||||
}
|
||||
|
||||
// Bitmap returns the underlying bitmap (for compatibility)
|
||||
func (ks *KmerSet) Bitmap() *roaring64.Bitmap {
|
||||
return ks.bitmap
|
||||
}
|
||||
362
pkg/obikmer/kmer_set_attributes.go
Normal file
362
pkg/obikmer/kmer_set_attributes.go
Normal file
@@ -0,0 +1,362 @@
|
||||
package obikmer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"git.metabarcoding.org/obitools/obitools4/obitools4/pkg/obiutils"
|
||||
)
|
||||
|
||||
// ==================================
|
||||
// KMER SET ATTRIBUTE API
|
||||
// Mimic BioSequence attribute API from obiseq/attributes.go
|
||||
// ==================================
|
||||
|
||||
// HasAttribute vérifie si une clé d'attribut existe
|
||||
func (ks *KmerSet) HasAttribute(key string) bool {
|
||||
_, ok := ks.Metadata[key]
|
||||
return ok
|
||||
}
|
||||
|
||||
// GetAttribute récupère la valeur d'un attribut
|
||||
// Cas particuliers: "id" utilise Id(), "k" utilise K()
|
||||
func (ks *KmerSet) GetAttribute(key string) (interface{}, bool) {
|
||||
switch key {
|
||||
case "id":
|
||||
return ks.Id(), true
|
||||
case "k":
|
||||
return ks.K(), true
|
||||
default:
|
||||
value, ok := ks.Metadata[key]
|
||||
return value, ok
|
||||
}
|
||||
}
|
||||
|
||||
// SetAttribute sets the value of an attribute
|
||||
// Cas particuliers: "id" utilise SetId(), "k" est immutable (panique)
|
||||
func (ks *KmerSet) SetAttribute(key string, value interface{}) {
|
||||
switch key {
|
||||
case "id":
|
||||
if id, ok := value.(string); ok {
|
||||
ks.SetId(id)
|
||||
} else {
|
||||
panic(fmt.Sprintf("id must be a string, got %T", value))
|
||||
}
|
||||
case "k":
|
||||
panic("k is immutable and cannot be modified via SetAttribute")
|
||||
default:
|
||||
ks.Metadata[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
// DeleteAttribute supprime un attribut
|
||||
func (ks *KmerSet) DeleteAttribute(key string) {
|
||||
delete(ks.Metadata, key)
|
||||
}
|
||||
|
||||
// RemoveAttribute supprime un attribut (alias de DeleteAttribute)
|
||||
func (ks *KmerSet) RemoveAttribute(key string) {
|
||||
ks.DeleteAttribute(key)
|
||||
}
|
||||
|
||||
// RenameAttribute renomme un attribut
|
||||
func (ks *KmerSet) RenameAttribute(newName, oldName string) {
|
||||
if value, ok := ks.Metadata[oldName]; ok {
|
||||
ks.Metadata[newName] = value
|
||||
delete(ks.Metadata, oldName)
|
||||
}
|
||||
}
|
||||
|
||||
// GetIntAttribute récupère un attribut en tant qu'entier
|
||||
func (ks *KmerSet) GetIntAttribute(key string) (int, bool) {
|
||||
value, ok := ks.Metadata[key]
|
||||
if !ok {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case int:
|
||||
return v, true
|
||||
case int64:
|
||||
return int(v), true
|
||||
case float64:
|
||||
return int(v), true
|
||||
case string:
|
||||
if i, err := strconv.Atoi(v); err == nil {
|
||||
return i, true
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// GetFloatAttribute récupère un attribut en tant que float64
|
||||
func (ks *KmerSet) GetFloatAttribute(key string) (float64, bool) {
|
||||
value, ok := ks.Metadata[key]
|
||||
if !ok {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
return v, true
|
||||
case float32:
|
||||
return float64(v), true
|
||||
case int:
|
||||
return float64(v), true
|
||||
case int64:
|
||||
return float64(v), true
|
||||
case string:
|
||||
if f, err := strconv.ParseFloat(v, 64); err == nil {
|
||||
return f, true
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// GetNumericAttribute récupère un attribut numérique (alias de GetFloatAttribute)
|
||||
func (ks *KmerSet) GetNumericAttribute(key string) (float64, bool) {
|
||||
return ks.GetFloatAttribute(key)
|
||||
}
|
||||
|
||||
// GetStringAttribute récupère un attribut en tant que chaîne
|
||||
func (ks *KmerSet) GetStringAttribute(key string) (string, bool) {
|
||||
value, ok := ks.Metadata[key]
|
||||
if !ok {
|
||||
return "", false
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
return v, true
|
||||
default:
|
||||
return fmt.Sprintf("%v", v), true
|
||||
}
|
||||
}
|
||||
|
||||
// GetBoolAttribute récupère un attribut en tant que booléen
|
||||
func (ks *KmerSet) GetBoolAttribute(key string) (bool, bool) {
|
||||
value, ok := ks.Metadata[key]
|
||||
if !ok {
|
||||
return false, false
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case bool:
|
||||
return v, true
|
||||
case int:
|
||||
return v != 0, true
|
||||
case string:
|
||||
if b, err := strconv.ParseBool(v); err == nil {
|
||||
return b, true
|
||||
}
|
||||
}
|
||||
return false, false
|
||||
}
|
||||
|
||||
// AttributeKeys returns the set of attribute keys
|
||||
func (ks *KmerSet) AttributeKeys() obiutils.Set[string] {
|
||||
keys := obiutils.MakeSet[string]()
|
||||
for key := range ks.Metadata {
|
||||
keys.Add(key)
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
// Keys returns the set of attribute keys (alias of AttributeKeys)
|
||||
func (ks *KmerSet) Keys() obiutils.Set[string] {
|
||||
return ks.AttributeKeys()
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// KMER SET GROUP ATTRIBUTE API
|
||||
// Métadonnées du groupe + accès via Get() pour les sets individuels
|
||||
// ==================================
|
||||
|
||||
// HasAttribute vérifie si une clé d'attribut existe pour le groupe
|
||||
func (ksg *KmerSetGroup) HasAttribute(key string) bool {
|
||||
_, ok := ksg.Metadata[key]
|
||||
return ok
|
||||
}
|
||||
|
||||
// GetAttribute récupère la valeur d'un attribut du groupe
|
||||
// Cas particuliers: "id" utilise Id(), "k" utilise K()
|
||||
func (ksg *KmerSetGroup) GetAttribute(key string) (interface{}, bool) {
|
||||
switch key {
|
||||
case "id":
|
||||
return ksg.Id(), true
|
||||
case "k":
|
||||
return ksg.K(), true
|
||||
default:
|
||||
value, ok := ksg.Metadata[key]
|
||||
return value, ok
|
||||
}
|
||||
}
|
||||
|
||||
// SetAttribute sets the value of an attribute du groupe
|
||||
// Cas particuliers: "id" utilise SetId(), "k" est immutable (panique)
|
||||
func (ksg *KmerSetGroup) SetAttribute(key string, value interface{}) {
|
||||
switch key {
|
||||
case "id":
|
||||
if id, ok := value.(string); ok {
|
||||
ksg.SetId(id)
|
||||
} else {
|
||||
panic(fmt.Sprintf("id must be a string, got %T", value))
|
||||
}
|
||||
case "k":
|
||||
panic("k is immutable and cannot be modified via SetAttribute")
|
||||
default:
|
||||
ksg.Metadata[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
// DeleteAttribute supprime un attribut du groupe
|
||||
func (ksg *KmerSetGroup) DeleteAttribute(key string) {
|
||||
delete(ksg.Metadata, key)
|
||||
}
|
||||
|
||||
// RemoveAttribute supprime un attribut du groupe (alias)
|
||||
func (ksg *KmerSetGroup) RemoveAttribute(key string) {
|
||||
ksg.DeleteAttribute(key)
|
||||
}
|
||||
|
||||
// RenameAttribute renomme un attribut du groupe
|
||||
func (ksg *KmerSetGroup) RenameAttribute(newName, oldName string) {
|
||||
if value, ok := ksg.Metadata[oldName]; ok {
|
||||
ksg.Metadata[newName] = value
|
||||
delete(ksg.Metadata, oldName)
|
||||
}
|
||||
}
|
||||
|
||||
// GetIntAttribute récupère un attribut entier du groupe
|
||||
func (ksg *KmerSetGroup) GetIntAttribute(key string) (int, bool) {
|
||||
value, ok := ksg.GetAttribute(key)
|
||||
if !ok {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case int:
|
||||
return v, true
|
||||
case int64:
|
||||
return int(v), true
|
||||
case float64:
|
||||
return int(v), true
|
||||
case string:
|
||||
if i, err := strconv.Atoi(v); err == nil {
|
||||
return i, true
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// GetFloatAttribute récupère un attribut float64 du groupe
|
||||
func (ksg *KmerSetGroup) GetFloatAttribute(key string) (float64, bool) {
|
||||
value, ok := ksg.GetAttribute(key)
|
||||
if !ok {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
return v, true
|
||||
case float32:
|
||||
return float64(v), true
|
||||
case int:
|
||||
return float64(v), true
|
||||
case int64:
|
||||
return float64(v), true
|
||||
case string:
|
||||
if f, err := strconv.ParseFloat(v, 64); err == nil {
|
||||
return f, true
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// GetNumericAttribute récupère un attribut numérique du groupe
|
||||
func (ksg *KmerSetGroup) GetNumericAttribute(key string) (float64, bool) {
|
||||
return ksg.GetFloatAttribute(key)
|
||||
}
|
||||
|
||||
// GetStringAttribute récupère un attribut chaîne du groupe
|
||||
func (ksg *KmerSetGroup) GetStringAttribute(key string) (string, bool) {
|
||||
value, ok := ksg.GetAttribute(key)
|
||||
if !ok {
|
||||
return "", false
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
return v, true
|
||||
default:
|
||||
return fmt.Sprintf("%v", v), true
|
||||
}
|
||||
}
|
||||
|
||||
// GetBoolAttribute récupère un attribut booléen du groupe
|
||||
func (ksg *KmerSetGroup) GetBoolAttribute(key string) (bool, bool) {
|
||||
value, ok := ksg.GetAttribute(key)
|
||||
if !ok {
|
||||
return false, false
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case bool:
|
||||
return v, true
|
||||
case int:
|
||||
return v != 0, true
|
||||
case string:
|
||||
if b, err := strconv.ParseBool(v); err == nil {
|
||||
return b, true
|
||||
}
|
||||
}
|
||||
return false, false
|
||||
}
|
||||
|
||||
// AttributeKeys returns the set of attribute keys du groupe
|
||||
func (ksg *KmerSetGroup) AttributeKeys() obiutils.Set[string] {
|
||||
keys := obiutils.MakeSet[string]()
|
||||
for key := range ksg.Metadata {
|
||||
keys.Add(key)
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
// Keys returns the set of group attribute keys (alias)
|
||||
func (ksg *KmerSetGroup) Keys() obiutils.Set[string] {
|
||||
return ksg.AttributeKeys()
|
||||
}
|
||||
|
||||
// ==================================
|
||||
// MÉTHODES POUR ACCÉDER AUX ATTRIBUTS DES SETS INDIVIDUELS VIA Get()
|
||||
// Architecture zero-copy: ksg.Get(i).SetAttribute(...)
|
||||
// ==================================
|
||||
|
||||
// Exemple d'utilisation:
|
||||
// Pour accéder aux métadonnées d'un KmerSet individuel dans un groupe:
|
||||
// ks := ksg.Get(0)
|
||||
// ks.SetAttribute("level", 1)
|
||||
// hasLevel := ks.HasAttribute("level")
|
||||
//
|
||||
// Pour les métadonnées du groupe:
|
||||
// ksg.SetAttribute("name", "FrequencyFilter")
|
||||
// name, ok := ksg.GetStringAttribute("name")
|
||||
|
||||
// AllAttributeKeys returns all unique attribute keys of the group AND all its sets
|
||||
func (ksg *KmerSetGroup) AllAttributeKeys() obiutils.Set[string] {
|
||||
keys := obiutils.MakeSet[string]()
|
||||
|
||||
// Ajouter les clés du groupe
|
||||
for key := range ksg.Metadata {
|
||||
keys.Add(key)
|
||||
}
|
||||
|
||||
// Ajouter les clés de chaque set
|
||||
for _, ks := range ksg.sets {
|
||||
for key := range ks.Metadata {
|
||||
keys.Add(key)
|
||||
}
|
||||
}
|
||||
|
||||
return keys
|
||||
}
|
||||
339
pkg/obikmer/kmer_set_group.go
Normal file
339
pkg/obikmer/kmer_set_group.go
Normal file
@@ -0,0 +1,339 @@
|
||||
package obikmer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"git.metabarcoding.org/obitools/obitools4/obitools4/pkg/obidist"
|
||||
"git.metabarcoding.org/obitools/obitools4/obitools4/pkg/obiseq"
|
||||
)
|
||||
|
||||
// KmerSetGroup represents a vector of KmerSet
|
||||
// Used to manage multiple k-mer sets (for example, by frequency level)
|
||||
type KmerSetGroup struct {
|
||||
id string // Unique identifier of the KmerSetGroup
|
||||
k int // Size of k-mers (immutable)
|
||||
sets []*KmerSet // Vector of KmerSet
|
||||
Metadata map[string]interface{} // Group metadata (not individual sets)
|
||||
}
|
||||
|
||||
// NewKmerSetGroup creates a new group of n KmerSets
|
||||
func NewKmerSetGroup(k int, n int) *KmerSetGroup {
|
||||
if n < 1 {
|
||||
panic("KmerSetGroup size must be >= 1")
|
||||
}
|
||||
|
||||
sets := make([]*KmerSet, n)
|
||||
for i := range sets {
|
||||
sets[i] = NewKmerSet(k)
|
||||
}
|
||||
|
||||
return &KmerSetGroup{
|
||||
k: k,
|
||||
sets: sets,
|
||||
Metadata: make(map[string]interface{}),
|
||||
}
|
||||
}
|
||||
|
||||
// K returns the size of k-mers (immutable)
|
||||
func (ksg *KmerSetGroup) K() int {
|
||||
return ksg.k
|
||||
}
|
||||
|
||||
// Size returns the number of KmerSet in the group
|
||||
func (ksg *KmerSetGroup) Size() int {
|
||||
return len(ksg.sets)
|
||||
}
|
||||
|
||||
// Get returns the KmerSet at the given index
|
||||
// Returns nil if the index is invalid
|
||||
func (ksg *KmerSetGroup) Get(index int) *KmerSet {
|
||||
if index < 0 || index >= len(ksg.sets) {
|
||||
return nil
|
||||
}
|
||||
return ksg.sets[index]
|
||||
}
|
||||
|
||||
// Set replaces the KmerSet at the given index
|
||||
// Panics if the index is invalid or if k does not match
|
||||
func (ksg *KmerSetGroup) Set(index int, ks *KmerSet) {
|
||||
if index < 0 || index >= len(ksg.sets) {
|
||||
panic(fmt.Sprintf("Index out of bounds: %d (size: %d)", index, len(ksg.sets)))
|
||||
}
|
||||
if ks.k != ksg.k {
|
||||
panic(fmt.Sprintf("KmerSet k mismatch: expected %d, got %d", ksg.k, ks.k))
|
||||
}
|
||||
ksg.sets[index] = ks
|
||||
}
|
||||
|
||||
// Len returns the number of k-mers in a specific KmerSet
|
||||
// Without argument: returns the number of k-mers in the last KmerSet
|
||||
// With argument index: returns the number of k-mers in the KmerSet at this index
|
||||
func (ksg *KmerSetGroup) Len(index ...int) uint64 {
|
||||
if len(index) == 0 {
|
||||
// Without argument: last KmerSet
|
||||
return ksg.sets[len(ksg.sets)-1].Len()
|
||||
}
|
||||
|
||||
// With argument: specific KmerSet
|
||||
idx := index[0]
|
||||
if idx < 0 || idx >= len(ksg.sets) {
|
||||
return 0
|
||||
}
|
||||
return ksg.sets[idx].Len()
|
||||
}
|
||||
|
||||
// MemoryUsage returns the total memory usage in bytes
|
||||
func (ksg *KmerSetGroup) MemoryUsage() uint64 {
|
||||
total := uint64(0)
|
||||
for _, ks := range ksg.sets {
|
||||
total += ks.MemoryUsage()
|
||||
}
|
||||
return total
|
||||
}
|
||||
|
||||
// Clear empties all KmerSet in the group
|
||||
func (ksg *KmerSetGroup) Clear() {
|
||||
for _, ks := range ksg.sets {
|
||||
ks.Clear()
|
||||
}
|
||||
}
|
||||
|
||||
// Copy creates a complete copy of the group (consistent with BioSequence.Copy)
|
||||
func (ksg *KmerSetGroup) Copy() *KmerSetGroup {
|
||||
copiedSets := make([]*KmerSet, len(ksg.sets))
|
||||
for i, ks := range ksg.sets {
|
||||
copiedSets[i] = ks.Copy() // Copy each KmerSet with its metadata
|
||||
}
|
||||
|
||||
// Copy group metadata
|
||||
groupMetadata := make(map[string]interface{}, len(ksg.Metadata))
|
||||
for k, v := range ksg.Metadata {
|
||||
groupMetadata[k] = v
|
||||
}
|
||||
|
||||
return &KmerSetGroup{
|
||||
id: ksg.id,
|
||||
k: ksg.k,
|
||||
sets: copiedSets,
|
||||
Metadata: groupMetadata,
|
||||
}
|
||||
}
|
||||
|
||||
// Id returns the identifier of the KmerSetGroup (consistent with BioSequence.Id)
|
||||
func (ksg *KmerSetGroup) Id() string {
|
||||
return ksg.id
|
||||
}
|
||||
|
||||
// SetId sets the identifier of the KmerSetGroup (consistent with BioSequence.SetId)
|
||||
func (ksg *KmerSetGroup) SetId(id string) {
|
||||
ksg.id = id
|
||||
}
|
||||
|
||||
// AddSequence adds all k-mers from a sequence to a specific KmerSet
|
||||
func (ksg *KmerSetGroup) AddSequence(seq *obiseq.BioSequence, index int) {
|
||||
if index < 0 || index >= len(ksg.sets) {
|
||||
panic(fmt.Sprintf("Index out of bounds: %d (size: %d)", index, len(ksg.sets)))
|
||||
}
|
||||
ksg.sets[index].AddSequence(seq)
|
||||
}
|
||||
|
||||
// AddSequences adds all k-mers from multiple sequences to a specific KmerSet
|
||||
func (ksg *KmerSetGroup) AddSequences(sequences *obiseq.BioSequenceSlice, index int) {
|
||||
if index < 0 || index >= len(ksg.sets) {
|
||||
panic(fmt.Sprintf("Index out of bounds: %d (size: %d)", index, len(ksg.sets)))
|
||||
}
|
||||
ksg.sets[index].AddSequences(sequences)
|
||||
}
|
||||
|
||||
// Union returns the union of all KmerSet in the group
|
||||
// Optimization: starts from the largest set to minimize operations
|
||||
func (ksg *KmerSetGroup) Union() *KmerSet {
|
||||
if len(ksg.sets) == 0 {
|
||||
return NewKmerSet(ksg.k)
|
||||
}
|
||||
|
||||
if len(ksg.sets) == 1 {
|
||||
return ksg.sets[0].Copy()
|
||||
}
|
||||
|
||||
// Find the index of the largest set (the one with the most k-mers)
|
||||
maxIdx := 0
|
||||
maxCard := ksg.sets[0].Len()
|
||||
for i := 1; i < len(ksg.sets); i++ {
|
||||
card := ksg.sets[i].Len()
|
||||
if card > maxCard {
|
||||
maxCard = card
|
||||
maxIdx = i
|
||||
}
|
||||
}
|
||||
|
||||
// Copy the largest set and perform unions in-place
|
||||
result := ksg.sets[maxIdx].bitmap.Clone()
|
||||
for i := 0; i < len(ksg.sets); i++ {
|
||||
if i != maxIdx {
|
||||
result.Or(ksg.sets[i].bitmap)
|
||||
}
|
||||
}
|
||||
|
||||
return NewKmerSetFromBitmap(ksg.k, result)
|
||||
}
|
||||
|
||||
// Intersect returns the intersection of all KmerSet in the group
|
||||
// Optimization: starts from the smallest set to minimize operations
|
||||
func (ksg *KmerSetGroup) Intersect() *KmerSet {
|
||||
if len(ksg.sets) == 0 {
|
||||
return NewKmerSet(ksg.k)
|
||||
}
|
||||
|
||||
if len(ksg.sets) == 1 {
|
||||
return ksg.sets[0].Copy()
|
||||
}
|
||||
|
||||
// Find the index of the smallest set (the one with the fewest k-mers)
|
||||
minIdx := 0
|
||||
minCard := ksg.sets[0].Len()
|
||||
for i := 1; i < len(ksg.sets); i++ {
|
||||
card := ksg.sets[i].Len()
|
||||
if card < minCard {
|
||||
minCard = card
|
||||
minIdx = i
|
||||
}
|
||||
}
|
||||
|
||||
// Copy the smallest set and perform intersections in-place
|
||||
result := ksg.sets[minIdx].bitmap.Clone()
|
||||
for i := 0; i < len(ksg.sets); i++ {
|
||||
if i != minIdx {
|
||||
result.And(ksg.sets[i].bitmap)
|
||||
}
|
||||
}
|
||||
|
||||
return NewKmerSetFromBitmap(ksg.k, result)
|
||||
}
|
||||
|
||||
// Stats returns statistics for each KmerSet in the group
|
||||
type KmerSetGroupStats struct {
|
||||
K int
|
||||
Size int // Number of KmerSet
|
||||
TotalBytes uint64 // Total memory used
|
||||
Sets []KmerSetStats // Stats of each KmerSet
|
||||
}
|
||||
|
||||
type KmerSetStats struct {
|
||||
Index int // Index of the KmerSet in the group
|
||||
Len uint64 // Number of k-mers
|
||||
SizeBytes uint64 // Size in bytes
|
||||
}
|
||||
|
||||
func (ksg *KmerSetGroup) Stats() KmerSetGroupStats {
|
||||
stats := KmerSetGroupStats{
|
||||
K: ksg.k,
|
||||
Size: len(ksg.sets),
|
||||
Sets: make([]KmerSetStats, len(ksg.sets)),
|
||||
}
|
||||
|
||||
for i, ks := range ksg.sets {
|
||||
sizeBytes := ks.MemoryUsage()
|
||||
stats.Sets[i] = KmerSetStats{
|
||||
Index: i,
|
||||
Len: ks.Len(),
|
||||
SizeBytes: sizeBytes,
|
||||
}
|
||||
stats.TotalBytes += sizeBytes
|
||||
}
|
||||
|
||||
return stats
|
||||
}
|
||||
|
||||
func (ksgs KmerSetGroupStats) String() string {
|
||||
result := fmt.Sprintf(`KmerSetGroup Statistics (k=%d, size=%d):
|
||||
Total memory: %.2f MB
|
||||
|
||||
Set breakdown:
|
||||
`, ksgs.K, ksgs.Size, float64(ksgs.TotalBytes)/1024/1024)
|
||||
|
||||
for _, set := range ksgs.Sets {
|
||||
result += fmt.Sprintf(" Set[%d]: %d k-mers (%.2f MB)\n",
|
||||
set.Index,
|
||||
set.Len,
|
||||
float64(set.SizeBytes)/1024/1024)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// JaccardDistanceMatrix computes a pairwise Jaccard distance matrix for all KmerSets in the group.
|
||||
// Returns a triangular distance matrix where element (i, j) represents the Jaccard distance
|
||||
// between set i and set j.
|
||||
//
|
||||
// The Jaccard distance is: 1 - (|A ∩ B| / |A ∪ B|)
|
||||
//
|
||||
// The matrix labels are set to the IDs of the individual KmerSets if available,
|
||||
// otherwise they are set to "set_0", "set_1", etc.
|
||||
//
|
||||
// Time complexity: O(n² × (|A| + |B|)) where n is the number of sets
|
||||
// Space complexity: O(n²) for the distance matrix
|
||||
func (ksg *KmerSetGroup) JaccardDistanceMatrix() *obidist.DistMatrix {
|
||||
n := len(ksg.sets)
|
||||
|
||||
// Create labels from set IDs
|
||||
labels := make([]string, n)
|
||||
for i, ks := range ksg.sets {
|
||||
if ks.Id() != "" {
|
||||
labels[i] = ks.Id()
|
||||
} else {
|
||||
labels[i] = fmt.Sprintf("set_%d", i)
|
||||
}
|
||||
}
|
||||
|
||||
dm := obidist.NewDistMatrixWithLabels(labels)
|
||||
|
||||
// Compute pairwise distances
|
||||
for i := 0; i < n-1; i++ {
|
||||
for j := i + 1; j < n; j++ {
|
||||
distance := ksg.sets[i].JaccardDistance(ksg.sets[j])
|
||||
dm.Set(i, j, distance)
|
||||
}
|
||||
}
|
||||
|
||||
return dm
|
||||
}
|
||||
|
||||
// JaccardSimilarityMatrix computes a pairwise Jaccard similarity matrix for all KmerSets in the group.
|
||||
// Returns a similarity matrix where element (i, j) represents the Jaccard similarity
|
||||
// between set i and set j.
|
||||
//
|
||||
// The Jaccard similarity is: |A ∩ B| / |A ∪ B|
|
||||
//
|
||||
// The diagonal is 1.0 (similarity of a set to itself).
|
||||
//
|
||||
// The matrix labels are set to the IDs of the individual KmerSets if available,
|
||||
// otherwise they are set to "set_0", "set_1", etc.
|
||||
//
|
||||
// Time complexity: O(n² × (|A| + |B|)) where n is the number of sets
|
||||
// Space complexity: O(n²) for the similarity matrix
|
||||
func (ksg *KmerSetGroup) JaccardSimilarityMatrix() *obidist.DistMatrix {
|
||||
n := len(ksg.sets)
|
||||
|
||||
// Create labels from set IDs
|
||||
labels := make([]string, n)
|
||||
for i, ks := range ksg.sets {
|
||||
if ks.Id() != "" {
|
||||
labels[i] = ks.Id()
|
||||
} else {
|
||||
labels[i] = fmt.Sprintf("set_%d", i)
|
||||
}
|
||||
}
|
||||
|
||||
sm := obidist.NewSimilarityMatrixWithLabels(labels)
|
||||
|
||||
// Compute pairwise similarities
|
||||
for i := 0; i < n-1; i++ {
|
||||
for j := i + 1; j < n; j++ {
|
||||
similarity := ksg.sets[i].JaccardSimilarity(ksg.sets[j])
|
||||
sm.Set(i, j, similarity)
|
||||
}
|
||||
}
|
||||
|
||||
return sm
|
||||
}
|
||||
231
pkg/obikmer/kmer_set_group_jaccard_test.go
Normal file
231
pkg/obikmer/kmer_set_group_jaccard_test.go
Normal file
@@ -0,0 +1,231 @@
|
||||
package obikmer
|
||||
|
||||
import (
|
||||
"math"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestKmerSetGroupJaccardDistanceMatrix(t *testing.T) {
|
||||
ksg := NewKmerSetGroup(5, 3)
|
||||
|
||||
// Set 0: {1, 2, 3}
|
||||
ksg.Get(0).AddKmerCode(1)
|
||||
ksg.Get(0).AddKmerCode(2)
|
||||
ksg.Get(0).AddKmerCode(3)
|
||||
ksg.Get(0).SetId("set_A")
|
||||
|
||||
// Set 1: {2, 3, 4}
|
||||
ksg.Get(1).AddKmerCode(2)
|
||||
ksg.Get(1).AddKmerCode(3)
|
||||
ksg.Get(1).AddKmerCode(4)
|
||||
ksg.Get(1).SetId("set_B")
|
||||
|
||||
// Set 2: {5, 6, 7}
|
||||
ksg.Get(2).AddKmerCode(5)
|
||||
ksg.Get(2).AddKmerCode(6)
|
||||
ksg.Get(2).AddKmerCode(7)
|
||||
ksg.Get(2).SetId("set_C")
|
||||
|
||||
dm := ksg.JaccardDistanceMatrix()
|
||||
|
||||
// Check labels
|
||||
if dm.GetLabel(0) != "set_A" {
|
||||
t.Errorf("Expected label 'set_A' at index 0, got '%s'", dm.GetLabel(0))
|
||||
}
|
||||
if dm.GetLabel(1) != "set_B" {
|
||||
t.Errorf("Expected label 'set_B' at index 1, got '%s'", dm.GetLabel(1))
|
||||
}
|
||||
if dm.GetLabel(2) != "set_C" {
|
||||
t.Errorf("Expected label 'set_C' at index 2, got '%s'", dm.GetLabel(2))
|
||||
}
|
||||
|
||||
// Check distances
|
||||
// Distance(0, 1):
|
||||
// Intersection: {2, 3} -> 2 elements
|
||||
// Union: {1, 2, 3, 4} -> 4 elements
|
||||
// Similarity: 2/4 = 0.5
|
||||
// Distance: 1 - 0.5 = 0.5
|
||||
expectedDist01 := 0.5
|
||||
actualDist01 := dm.Get(0, 1)
|
||||
if math.Abs(actualDist01-expectedDist01) > 1e-10 {
|
||||
t.Errorf("Distance(0, 1): expected %f, got %f", expectedDist01, actualDist01)
|
||||
}
|
||||
|
||||
// Distance(0, 2):
|
||||
// Intersection: {} -> 0 elements
|
||||
// Union: {1, 2, 3, 5, 6, 7} -> 6 elements
|
||||
// Similarity: 0/6 = 0
|
||||
// Distance: 1 - 0 = 1.0
|
||||
expectedDist02 := 1.0
|
||||
actualDist02 := dm.Get(0, 2)
|
||||
if math.Abs(actualDist02-expectedDist02) > 1e-10 {
|
||||
t.Errorf("Distance(0, 2): expected %f, got %f", expectedDist02, actualDist02)
|
||||
}
|
||||
|
||||
// Distance(1, 2):
|
||||
// Intersection: {} -> 0 elements
|
||||
// Union: {2, 3, 4, 5, 6, 7} -> 6 elements
|
||||
// Similarity: 0/6 = 0
|
||||
// Distance: 1 - 0 = 1.0
|
||||
expectedDist12 := 1.0
|
||||
actualDist12 := dm.Get(1, 2)
|
||||
if math.Abs(actualDist12-expectedDist12) > 1e-10 {
|
||||
t.Errorf("Distance(1, 2): expected %f, got %f", expectedDist12, actualDist12)
|
||||
}
|
||||
|
||||
// Check symmetry
|
||||
if dm.Get(0, 1) != dm.Get(1, 0) {
|
||||
t.Errorf("Matrix not symmetric: Get(0, 1) = %f, Get(1, 0) = %f",
|
||||
dm.Get(0, 1), dm.Get(1, 0))
|
||||
}
|
||||
|
||||
// Check diagonal
|
||||
if dm.Get(0, 0) != 0.0 {
|
||||
t.Errorf("Diagonal should be 0, got %f", dm.Get(0, 0))
|
||||
}
|
||||
if dm.Get(1, 1) != 0.0 {
|
||||
t.Errorf("Diagonal should be 0, got %f", dm.Get(1, 1))
|
||||
}
|
||||
if dm.Get(2, 2) != 0.0 {
|
||||
t.Errorf("Diagonal should be 0, got %f", dm.Get(2, 2))
|
||||
}
|
||||
}
|
||||
|
||||
func TestKmerSetGroupJaccardSimilarityMatrix(t *testing.T) {
|
||||
ksg := NewKmerSetGroup(5, 3)
|
||||
|
||||
// Set 0: {1, 2, 3}
|
||||
ksg.Get(0).AddKmerCode(1)
|
||||
ksg.Get(0).AddKmerCode(2)
|
||||
ksg.Get(0).AddKmerCode(3)
|
||||
|
||||
// Set 1: {2, 3, 4}
|
||||
ksg.Get(1).AddKmerCode(2)
|
||||
ksg.Get(1).AddKmerCode(3)
|
||||
ksg.Get(1).AddKmerCode(4)
|
||||
|
||||
// Set 2: {1, 2, 3} (same as set 0)
|
||||
ksg.Get(2).AddKmerCode(1)
|
||||
ksg.Get(2).AddKmerCode(2)
|
||||
ksg.Get(2).AddKmerCode(3)
|
||||
|
||||
sm := ksg.JaccardSimilarityMatrix()
|
||||
|
||||
// Check similarities
|
||||
// Similarity(0, 1): 0.5 (as calculated above)
|
||||
expectedSim01 := 0.5
|
||||
actualSim01 := sm.Get(0, 1)
|
||||
if math.Abs(actualSim01-expectedSim01) > 1e-10 {
|
||||
t.Errorf("Similarity(0, 1): expected %f, got %f", expectedSim01, actualSim01)
|
||||
}
|
||||
|
||||
// Similarity(0, 2): 1.0 (identical sets)
|
||||
expectedSim02 := 1.0
|
||||
actualSim02 := sm.Get(0, 2)
|
||||
if math.Abs(actualSim02-expectedSim02) > 1e-10 {
|
||||
t.Errorf("Similarity(0, 2): expected %f, got %f", expectedSim02, actualSim02)
|
||||
}
|
||||
|
||||
// Similarity(1, 2): 0.5
|
||||
// Intersection: {2, 3} -> 2
|
||||
// Union: {1, 2, 3, 4} -> 4
|
||||
// Similarity: 2/4 = 0.5
|
||||
expectedSim12 := 0.5
|
||||
actualSim12 := sm.Get(1, 2)
|
||||
if math.Abs(actualSim12-expectedSim12) > 1e-10 {
|
||||
t.Errorf("Similarity(1, 2): expected %f, got %f", expectedSim12, actualSim12)
|
||||
}
|
||||
|
||||
// Check diagonal (similarity to self = 1.0)
|
||||
if sm.Get(0, 0) != 1.0 {
|
||||
t.Errorf("Diagonal should be 1.0, got %f", sm.Get(0, 0))
|
||||
}
|
||||
if sm.Get(1, 1) != 1.0 {
|
||||
t.Errorf("Diagonal should be 1.0, got %f", sm.Get(1, 1))
|
||||
}
|
||||
if sm.Get(2, 2) != 1.0 {
|
||||
t.Errorf("Diagonal should be 1.0, got %f", sm.Get(2, 2))
|
||||
}
|
||||
}
|
||||
|
||||
func TestKmerSetGroupJaccardMatricesRelation(t *testing.T) {
|
||||
ksg := NewKmerSetGroup(5, 4)
|
||||
|
||||
// Create different sets
|
||||
ksg.Get(0).AddKmerCode(1)
|
||||
ksg.Get(0).AddKmerCode(2)
|
||||
|
||||
ksg.Get(1).AddKmerCode(2)
|
||||
ksg.Get(1).AddKmerCode(3)
|
||||
|
||||
ksg.Get(2).AddKmerCode(1)
|
||||
ksg.Get(2).AddKmerCode(2)
|
||||
ksg.Get(2).AddKmerCode(3)
|
||||
|
||||
ksg.Get(3).AddKmerCode(10)
|
||||
ksg.Get(3).AddKmerCode(20)
|
||||
|
||||
dm := ksg.JaccardDistanceMatrix()
|
||||
sm := ksg.JaccardSimilarityMatrix()
|
||||
|
||||
// For all pairs (including diagonal), distance + similarity should equal 1.0
|
||||
for i := 0; i < 4; i++ {
|
||||
for j := 0; j < 4; j++ {
|
||||
distance := dm.Get(i, j)
|
||||
similarity := sm.Get(i, j)
|
||||
sum := distance + similarity
|
||||
|
||||
if math.Abs(sum-1.0) > 1e-10 {
|
||||
t.Errorf("At (%d, %d): distance %f + similarity %f = %f, expected 1.0",
|
||||
i, j, distance, similarity, sum)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestKmerSetGroupJaccardMatrixLabels(t *testing.T) {
|
||||
ksg := NewKmerSetGroup(5, 3)
|
||||
|
||||
// Don't set IDs - should use default labels
|
||||
ksg.Get(0).AddKmerCode(1)
|
||||
ksg.Get(1).AddKmerCode(2)
|
||||
ksg.Get(2).AddKmerCode(3)
|
||||
|
||||
dm := ksg.JaccardDistanceMatrix()
|
||||
|
||||
// Check default labels
|
||||
if dm.GetLabel(0) != "set_0" {
|
||||
t.Errorf("Expected default label 'set_0', got '%s'", dm.GetLabel(0))
|
||||
}
|
||||
if dm.GetLabel(1) != "set_1" {
|
||||
t.Errorf("Expected default label 'set_1', got '%s'", dm.GetLabel(1))
|
||||
}
|
||||
if dm.GetLabel(2) != "set_2" {
|
||||
t.Errorf("Expected default label 'set_2', got '%s'", dm.GetLabel(2))
|
||||
}
|
||||
}
|
||||
|
||||
func TestKmerSetGroupJaccardMatrixSize(t *testing.T) {
|
||||
ksg := NewKmerSetGroup(5, 5)
|
||||
|
||||
for i := 0; i < 5; i++ {
|
||||
ksg.Get(i).AddKmerCode(uint64(i))
|
||||
}
|
||||
|
||||
dm := ksg.JaccardDistanceMatrix()
|
||||
|
||||
if dm.Size() != 5 {
|
||||
t.Errorf("Expected matrix size 5, got %d", dm.Size())
|
||||
}
|
||||
|
||||
// All sets are disjoint, so all distances should be 1.0
|
||||
for i := 0; i < 5; i++ {
|
||||
for j := i + 1; j < 5; j++ {
|
||||
dist := dm.Get(i, j)
|
||||
if math.Abs(dist-1.0) > 1e-10 {
|
||||
t.Errorf("Expected distance 1.0 for disjoint sets (%d, %d), got %f",
|
||||
i, j, dist)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
235
pkg/obikmer/kmer_set_group_quorum.go
Normal file
235
pkg/obikmer/kmer_set_group_quorum.go
Normal file
@@ -0,0 +1,235 @@
|
||||
package obikmer
|
||||
|
||||
import (
|
||||
"container/heap"
|
||||
|
||||
"github.com/RoaringBitmap/roaring/roaring64"
|
||||
)
|
||||
|
||||
// heapItem represents an element in the min-heap for k-way merge
|
||||
type heapItem struct {
|
||||
value uint64
|
||||
idx int
|
||||
}
|
||||
|
||||
// kmerMinHeap implements heap.Interface for k-way merge algorithm
|
||||
type kmerMinHeap []heapItem
|
||||
|
||||
func (h kmerMinHeap) Len() int { return len(h) }
|
||||
func (h kmerMinHeap) Less(i, j int) bool { return h[i].value < h[j].value }
|
||||
func (h kmerMinHeap) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
|
||||
|
||||
func (h *kmerMinHeap) Push(x interface{}) {
|
||||
*h = append(*h, x.(heapItem))
|
||||
}
|
||||
|
||||
func (h *kmerMinHeap) Pop() interface{} {
|
||||
old := *h
|
||||
n := len(old)
|
||||
x := old[n-1]
|
||||
*h = old[0 : n-1]
|
||||
return x
|
||||
}
|
||||
|
||||
// QuorumAtLeast returns k-mers present in at least q sets
|
||||
//
|
||||
// Algorithm: K-way merge with min-heap counting
|
||||
//
|
||||
// The algorithm processes all k-mers in sorted order using a min-heap:
|
||||
//
|
||||
// 1. Initialize one iterator per non-empty set
|
||||
// 2. Build a min-heap of (value, set_index) pairs, one per iterator
|
||||
// 3. While heap is not empty:
|
||||
// a. Extract the minimum value v from heap
|
||||
// b. Pop ALL heap items with value == v (counting occurrences)
|
||||
// c. If count >= q, add v to result
|
||||
// d. Advance each popped iterator and re-insert into heap if valid
|
||||
//
|
||||
// This ensures each unique k-mer is counted exactly once across all sets.
|
||||
//
|
||||
// Time complexity: O(M log N)
|
||||
// - M = sum of all set cardinalities (total k-mer occurrences)
|
||||
// - N = number of sets
|
||||
// - Each k-mer occurrence is inserted/extracted from heap once: O(M) operations
|
||||
// - Each heap operation costs O(log N)
|
||||
//
|
||||
// Space complexity: O(N)
|
||||
// - Heap contains at most N elements (one per set iterator)
|
||||
// - Output bitmap size depends on quorum result
|
||||
//
|
||||
// Special cases (optimized):
|
||||
// - q <= 0: returns empty set
|
||||
// - q == 1: delegates to Union() (native OR operations)
|
||||
// - q == n: delegates to Intersect() (native AND operations)
|
||||
// - q > n: returns empty set (impossible to satisfy)
|
||||
func (ksg *KmerSetGroup) QuorumAtLeast(q int) *KmerSet {
|
||||
n := len(ksg.sets)
|
||||
|
||||
// Edge cases
|
||||
if q <= 0 || n == 0 {
|
||||
return NewKmerSet(ksg.k)
|
||||
}
|
||||
if q > n {
|
||||
return NewKmerSet(ksg.k)
|
||||
}
|
||||
if q == 1 {
|
||||
return ksg.Union()
|
||||
}
|
||||
if q == n {
|
||||
return ksg.Intersect()
|
||||
}
|
||||
|
||||
// Initialize iterators for all non-empty sets
|
||||
iterators := make([]roaring64.IntIterable64, 0, n)
|
||||
iterIndices := make([]int, 0, n)
|
||||
|
||||
for i, set := range ksg.sets {
|
||||
if set.Len() > 0 {
|
||||
iter := set.bitmap.Iterator()
|
||||
if iter.HasNext() {
|
||||
iterators = append(iterators, iter)
|
||||
iterIndices = append(iterIndices, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(iterators) == 0 {
|
||||
return NewKmerSet(ksg.k)
|
||||
}
|
||||
|
||||
// Initialize heap with first value from each iterator
|
||||
h := make(kmerMinHeap, len(iterators))
|
||||
for i, iter := range iterators {
|
||||
h[i] = heapItem{value: iter.Next(), idx: i}
|
||||
}
|
||||
heap.Init(&h)
|
||||
|
||||
// Result bitmap
|
||||
result := roaring64.New()
|
||||
|
||||
// K-way merge with counting
|
||||
for len(h) > 0 {
|
||||
minVal := h[0].value
|
||||
count := 0
|
||||
activeIndices := make([]int, 0, len(h))
|
||||
|
||||
// Pop all elements with same value (count occurrences)
|
||||
for len(h) > 0 && h[0].value == minVal {
|
||||
item := heap.Pop(&h).(heapItem)
|
||||
count++
|
||||
activeIndices = append(activeIndices, item.idx)
|
||||
}
|
||||
|
||||
// Add to result if quorum reached
|
||||
if count >= q {
|
||||
result.Add(minVal)
|
||||
}
|
||||
|
||||
// Advance iterators and re-insert into heap
|
||||
for _, iterIdx := range activeIndices {
|
||||
if iterators[iterIdx].HasNext() {
|
||||
heap.Push(&h, heapItem{
|
||||
value: iterators[iterIdx].Next(),
|
||||
idx: iterIdx,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return NewKmerSetFromBitmap(ksg.k, result)
|
||||
}
|
||||
|
||||
// QuorumAtMost returns k-mers present in at most q sets
|
||||
//
|
||||
// Algorithm: Uses the mathematical identity
|
||||
// AtMost(q) = Union() - AtLeast(q+1)
|
||||
//
|
||||
// Proof:
|
||||
// - Union() contains all k-mers present in at least 1 set
|
||||
// - AtLeast(q+1) contains all k-mers present in q+1 or more sets
|
||||
// - Their difference contains only k-mers present in at most q sets
|
||||
//
|
||||
// Implementation:
|
||||
// 1. Compute U = Union()
|
||||
// 2. Compute A = QuorumAtLeast(q+1)
|
||||
// 3. Return U - A using bitmap AndNot operation
|
||||
//
|
||||
// Time complexity: O(M log N)
|
||||
// - Union(): O(M) with native OR operations
|
||||
// - QuorumAtLeast(q+1): O(M log N)
|
||||
// - AndNot: O(|U|) where |U| <= M
|
||||
// - Total: O(M log N)
|
||||
//
|
||||
// Space complexity: O(N)
|
||||
// - Inherited from QuorumAtLeast heap
|
||||
//
|
||||
// Special cases:
|
||||
// - q <= 0: returns empty set
|
||||
// - q >= n: returns Union() (all k-mers are in at most n sets)
|
||||
func (ksg *KmerSetGroup) QuorumAtMost(q int) *KmerSet {
|
||||
n := len(ksg.sets)
|
||||
|
||||
// Edge cases
|
||||
if q <= 0 {
|
||||
return NewKmerSet(ksg.k)
|
||||
}
|
||||
if q >= n {
|
||||
return ksg.Union()
|
||||
}
|
||||
|
||||
// Compute Union() - AtLeast(q+1)
|
||||
union := ksg.Union()
|
||||
atLeastQ1 := ksg.QuorumAtLeast(q + 1)
|
||||
|
||||
// Difference: elements in union but not in atLeastQ1
|
||||
result := union.bitmap.Clone()
|
||||
result.AndNot(atLeastQ1.bitmap)
|
||||
|
||||
return NewKmerSetFromBitmap(ksg.k, result)
|
||||
}
|
||||
|
||||
// QuorumExactly returns k-mers present in exactly q sets
|
||||
//
|
||||
// Algorithm: Uses the mathematical identity
|
||||
// Exactly(q) = AtLeast(q) - AtLeast(q+1)
|
||||
//
|
||||
// Proof:
|
||||
// - AtLeast(q) contains all k-mers present in q or more sets
|
||||
// - AtLeast(q+1) contains all k-mers present in q+1 or more sets
|
||||
// - Their difference contains only k-mers present in exactly q sets
|
||||
//
|
||||
// Implementation:
|
||||
// 1. Compute A = QuorumAtLeast(q)
|
||||
// 2. Compute B = QuorumAtLeast(q+1)
|
||||
// 3. Return A - B using bitmap AndNot operation
|
||||
//
|
||||
// Time complexity: O(M log N)
|
||||
// - Two calls to QuorumAtLeast: 2 * O(M log N)
|
||||
// - One AndNot operation: O(|A|) where |A| <= M
|
||||
// - Total: O(M log N) since AndNot is dominated by merge operations
|
||||
//
|
||||
// Space complexity: O(N)
|
||||
// - Inherited from QuorumAtLeast heap
|
||||
// - Two temporary bitmaps for intermediate results
|
||||
//
|
||||
// Special cases:
|
||||
// - q <= 0: returns empty set
|
||||
// - q > n: returns empty set (impossible to have k-mer in more than n sets)
|
||||
func (ksg *KmerSetGroup) QuorumExactly(q int) *KmerSet {
|
||||
n := len(ksg.sets)
|
||||
|
||||
// Edge cases
|
||||
if q <= 0 || q > n {
|
||||
return NewKmerSet(ksg.k)
|
||||
}
|
||||
|
||||
// Compute AtLeast(q) - AtLeast(q+1)
|
||||
aq := ksg.QuorumAtLeast(q)
|
||||
aq1 := ksg.QuorumAtLeast(q + 1)
|
||||
|
||||
// Difference: elements in aq but not in aq1
|
||||
result := aq.bitmap.Clone()
|
||||
result.AndNot(aq1.bitmap)
|
||||
|
||||
return NewKmerSetFromBitmap(ksg.k, result)
|
||||
}
|
||||
395
pkg/obikmer/kmer_set_group_quorum_test.go
Normal file
395
pkg/obikmer/kmer_set_group_quorum_test.go
Normal file
@@ -0,0 +1,395 @@
|
||||
package obikmer
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestQuorumAtLeastEdgeCases tests edge cases for QuorumAtLeast
|
||||
func TestQuorumAtLeastEdgeCases(t *testing.T) {
|
||||
k := 5
|
||||
|
||||
// Test group with all empty sets
|
||||
emptyGroup := NewKmerSetGroup(k, 3)
|
||||
result := emptyGroup.QuorumAtLeast(1)
|
||||
if result.Len() != 0 {
|
||||
t.Errorf("Empty sets: expected 0 k-mers, got %d", result.Len())
|
||||
}
|
||||
|
||||
// Test q <= 0
|
||||
group := NewKmerSetGroup(k, 3)
|
||||
result = group.QuorumAtLeast(0)
|
||||
if result.Len() != 0 {
|
||||
t.Errorf("q=0: expected 0 k-mers, got %d", result.Len())
|
||||
}
|
||||
|
||||
result = group.QuorumAtLeast(-1)
|
||||
if result.Len() != 0 {
|
||||
t.Errorf("q=-1: expected 0 k-mers, got %d", result.Len())
|
||||
}
|
||||
|
||||
// Test q > n
|
||||
group.Get(0).AddKmerCode(1)
|
||||
result = group.QuorumAtLeast(10)
|
||||
if result.Len() != 0 {
|
||||
t.Errorf("q>n: expected 0 k-mers, got %d", result.Len())
|
||||
}
|
||||
}
|
||||
|
||||
// TestQuorumAtLeastQ1 tests q=1 (should equal Union)
|
||||
func TestQuorumAtLeastQ1(t *testing.T) {
|
||||
k := 5
|
||||
group := NewKmerSetGroup(k, 3)
|
||||
|
||||
// Add different k-mers to each set
|
||||
group.Get(0).AddKmerCode(1)
|
||||
group.Get(0).AddKmerCode(2)
|
||||
group.Get(1).AddKmerCode(2)
|
||||
group.Get(1).AddKmerCode(3)
|
||||
group.Get(2).AddKmerCode(3)
|
||||
group.Get(2).AddKmerCode(4)
|
||||
|
||||
quorum := group.QuorumAtLeast(1)
|
||||
union := group.Union()
|
||||
|
||||
if quorum.Len() != union.Len() {
|
||||
t.Errorf("QuorumAtLeast(1) length %d != Union length %d", quorum.Len(), union.Len())
|
||||
}
|
||||
|
||||
// Check all elements match
|
||||
for kmer := uint64(1); kmer <= 4; kmer++ {
|
||||
if quorum.Contains(kmer) != union.Contains(kmer) {
|
||||
t.Errorf("Mismatch for k-mer %d", kmer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestQuorumAtLeastQN tests q=n (should equal Intersect)
|
||||
func TestQuorumAtLeastQN(t *testing.T) {
|
||||
k := 5
|
||||
group := NewKmerSetGroup(k, 3)
|
||||
|
||||
// Add some common k-mers and some unique
|
||||
for i := 0; i < 3; i++ {
|
||||
group.Get(i).AddKmerCode(10) // common to all
|
||||
group.Get(i).AddKmerCode(20) // common to all
|
||||
}
|
||||
group.Get(0).AddKmerCode(1) // unique to set 0
|
||||
group.Get(1).AddKmerCode(2) // unique to set 1
|
||||
|
||||
quorum := group.QuorumAtLeast(3)
|
||||
intersect := group.Intersect()
|
||||
|
||||
if quorum.Len() != intersect.Len() {
|
||||
t.Errorf("QuorumAtLeast(n) length %d != Intersect length %d", quorum.Len(), intersect.Len())
|
||||
}
|
||||
|
||||
if quorum.Len() != 2 {
|
||||
t.Errorf("Expected 2 common k-mers, got %d", quorum.Len())
|
||||
}
|
||||
|
||||
if !quorum.Contains(10) || !quorum.Contains(20) {
|
||||
t.Error("Missing common k-mers")
|
||||
}
|
||||
|
||||
if quorum.Contains(1) || quorum.Contains(2) {
|
||||
t.Error("Unique k-mers should not be in result")
|
||||
}
|
||||
}
|
||||
|
||||
// TestQuorumAtLeastGeneral tests general quorum values
|
||||
func TestQuorumAtLeastGeneral(t *testing.T) {
|
||||
k := 5
|
||||
group := NewKmerSetGroup(k, 5)
|
||||
|
||||
// Setup: k-mer i appears in i sets (for i=1..5)
|
||||
// k-mer 1: in set 0
|
||||
// k-mer 2: in sets 0,1
|
||||
// k-mer 3: in sets 0,1,2
|
||||
// k-mer 4: in sets 0,1,2,3
|
||||
// k-mer 5: in sets 0,1,2,3,4 (all)
|
||||
|
||||
for kmer := uint64(1); kmer <= 5; kmer++ {
|
||||
for setIdx := 0; setIdx < int(kmer); setIdx++ {
|
||||
group.Get(setIdx).AddKmerCode(kmer)
|
||||
}
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
q int
|
||||
expected map[uint64]bool
|
||||
}{
|
||||
{1, map[uint64]bool{1: true, 2: true, 3: true, 4: true, 5: true}},
|
||||
{2, map[uint64]bool{2: true, 3: true, 4: true, 5: true}},
|
||||
{3, map[uint64]bool{3: true, 4: true, 5: true}},
|
||||
{4, map[uint64]bool{4: true, 5: true}},
|
||||
{5, map[uint64]bool{5: true}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := group.QuorumAtLeast(tt.q)
|
||||
|
||||
if result.Len() != uint64(len(tt.expected)) {
|
||||
t.Errorf("q=%d: expected %d k-mers, got %d", tt.q, len(tt.expected), result.Len())
|
||||
}
|
||||
|
||||
for kmer := uint64(1); kmer <= 5; kmer++ {
|
||||
shouldContain := tt.expected[kmer]
|
||||
doesContain := result.Contains(kmer)
|
||||
if shouldContain != doesContain {
|
||||
t.Errorf("q=%d, k-mer=%d: expected contains=%v, got %v", tt.q, kmer, shouldContain, doesContain)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestQuorumExactlyBasic tests QuorumExactly basic functionality
|
||||
func TestQuorumExactlyBasic(t *testing.T) {
|
||||
k := 5
|
||||
group := NewKmerSetGroup(k, 5)
|
||||
|
||||
// Setup: k-mer i appears in exactly i sets
|
||||
for kmer := uint64(1); kmer <= 5; kmer++ {
|
||||
for setIdx := 0; setIdx < int(kmer); setIdx++ {
|
||||
group.Get(setIdx).AddKmerCode(kmer)
|
||||
}
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
q int
|
||||
expected []uint64
|
||||
}{
|
||||
{1, []uint64{1}},
|
||||
{2, []uint64{2}},
|
||||
{3, []uint64{3}},
|
||||
{4, []uint64{4}},
|
||||
{5, []uint64{5}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := group.QuorumExactly(tt.q)
|
||||
|
||||
if result.Len() != uint64(len(tt.expected)) {
|
||||
t.Errorf("q=%d: expected %d k-mers, got %d", tt.q, len(tt.expected), result.Len())
|
||||
}
|
||||
|
||||
for _, kmer := range tt.expected {
|
||||
if !result.Contains(kmer) {
|
||||
t.Errorf("q=%d: missing k-mer %d", tt.q, kmer)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestQuorumIdentity tests the mathematical identity: Exactly(q) = AtLeast(q) - AtLeast(q+1)
|
||||
func TestQuorumIdentity(t *testing.T) {
|
||||
k := 5
|
||||
group := NewKmerSetGroup(k, 4)
|
||||
|
||||
// Add random distribution
|
||||
group.Get(0).AddKmerCode(1)
|
||||
group.Get(0).AddKmerCode(2)
|
||||
group.Get(0).AddKmerCode(3)
|
||||
|
||||
group.Get(1).AddKmerCode(2)
|
||||
group.Get(1).AddKmerCode(3)
|
||||
group.Get(1).AddKmerCode(4)
|
||||
|
||||
group.Get(2).AddKmerCode(3)
|
||||
group.Get(2).AddKmerCode(4)
|
||||
|
||||
group.Get(3).AddKmerCode(4)
|
||||
|
||||
for q := 1; q <= 4; q++ {
|
||||
exactly := group.QuorumExactly(q)
|
||||
atLeast := group.QuorumAtLeast(q)
|
||||
atLeastPlus1 := group.QuorumAtLeast(q + 1)
|
||||
|
||||
// Verify: every element in exactly(q) is in atLeast(q)
|
||||
iter := exactly.Iterator()
|
||||
for iter.HasNext() {
|
||||
kmer := iter.Next()
|
||||
if !atLeast.Contains(kmer) {
|
||||
t.Errorf("q=%d: k-mer %d in Exactly but not in AtLeast", q, kmer)
|
||||
}
|
||||
if atLeastPlus1.Contains(kmer) {
|
||||
t.Errorf("q=%d: k-mer %d in Exactly but also in AtLeast(q+1)", q, kmer)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestQuorumDisjointSets tests quorum on completely disjoint sets
|
||||
func TestQuorumDisjointSets(t *testing.T) {
|
||||
k := 5
|
||||
group := NewKmerSetGroup(k, 3)
|
||||
|
||||
// Each set has unique k-mers
|
||||
group.Get(0).AddKmerCode(1)
|
||||
group.Get(1).AddKmerCode(2)
|
||||
group.Get(2).AddKmerCode(3)
|
||||
|
||||
// q=1 should give all
|
||||
result := group.QuorumAtLeast(1)
|
||||
if result.Len() != 3 {
|
||||
t.Errorf("Disjoint sets q=1: expected 3, got %d", result.Len())
|
||||
}
|
||||
|
||||
// q=2 should give none
|
||||
result = group.QuorumAtLeast(2)
|
||||
if result.Len() != 0 {
|
||||
t.Errorf("Disjoint sets q=2: expected 0, got %d", result.Len())
|
||||
}
|
||||
}
|
||||
|
||||
// TestQuorumIdenticalSets tests quorum on identical sets
|
||||
func TestQuorumIdenticalSets(t *testing.T) {
|
||||
k := 5
|
||||
group := NewKmerSetGroup(k, 3)
|
||||
|
||||
// All sets have same k-mers
|
||||
for i := 0; i < 3; i++ {
|
||||
group.Get(i).AddKmerCode(10)
|
||||
group.Get(i).AddKmerCode(20)
|
||||
group.Get(i).AddKmerCode(30)
|
||||
}
|
||||
|
||||
// Any q <= n should give all k-mers
|
||||
for q := 1; q <= 3; q++ {
|
||||
result := group.QuorumAtLeast(q)
|
||||
if result.Len() != 3 {
|
||||
t.Errorf("Identical sets q=%d: expected 3, got %d", q, result.Len())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestQuorumLargeNumbers tests with large k-mer values
|
||||
func TestQuorumLargeNumbers(t *testing.T) {
|
||||
k := 21
|
||||
group := NewKmerSetGroup(k, 3)
|
||||
|
||||
// Use large uint64 values (actual k-mer encodings)
|
||||
largeKmers := []uint64{
|
||||
0x1234567890ABCDEF,
|
||||
0xFEDCBA0987654321,
|
||||
0xAAAAAAAAAAAAAAAA,
|
||||
}
|
||||
|
||||
// Add to multiple sets
|
||||
for i := 0; i < 3; i++ {
|
||||
for j := 0; j <= i; j++ {
|
||||
group.Get(j).AddKmerCode(largeKmers[i])
|
||||
}
|
||||
}
|
||||
|
||||
result := group.QuorumAtLeast(2)
|
||||
if result.Len() != 2 {
|
||||
t.Errorf("Large numbers q=2: expected 2, got %d", result.Len())
|
||||
}
|
||||
|
||||
if !result.Contains(largeKmers[1]) || !result.Contains(largeKmers[2]) {
|
||||
t.Error("Large numbers: wrong k-mers in result")
|
||||
}
|
||||
}
|
||||
|
||||
// TestQuorumAtMostBasic tests QuorumAtMost basic functionality
|
||||
func TestQuorumAtMostBasic(t *testing.T) {
|
||||
k := 5
|
||||
group := NewKmerSetGroup(k, 5)
|
||||
|
||||
// Setup: k-mer i appears in exactly i sets
|
||||
for kmer := uint64(1); kmer <= 5; kmer++ {
|
||||
for setIdx := 0; setIdx < int(kmer); setIdx++ {
|
||||
group.Get(setIdx).AddKmerCode(kmer)
|
||||
}
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
q int
|
||||
expected []uint64
|
||||
}{
|
||||
{0, []uint64{}}, // at most 0: none
|
||||
{1, []uint64{1}}, // at most 1: only k-mer 1
|
||||
{2, []uint64{1, 2}}, // at most 2: k-mers 1,2
|
||||
{3, []uint64{1, 2, 3}}, // at most 3: k-mers 1,2,3
|
||||
{4, []uint64{1, 2, 3, 4}}, // at most 4: k-mers 1,2,3,4
|
||||
{5, []uint64{1, 2, 3, 4, 5}}, // at most 5: all k-mers
|
||||
{10, []uint64{1, 2, 3, 4, 5}}, // at most 10: all k-mers
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result := group.QuorumAtMost(tt.q)
|
||||
|
||||
if result.Len() != uint64(len(tt.expected)) {
|
||||
t.Errorf("q=%d: expected %d k-mers, got %d", tt.q, len(tt.expected), result.Len())
|
||||
}
|
||||
|
||||
for _, kmer := range tt.expected {
|
||||
if !result.Contains(kmer) {
|
||||
t.Errorf("q=%d: missing k-mer %d", tt.q, kmer)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestQuorumComplementIdentity tests that AtLeast and AtMost are complementary
|
||||
func TestQuorumComplementIdentity(t *testing.T) {
|
||||
k := 5
|
||||
group := NewKmerSetGroup(k, 4)
|
||||
|
||||
// Add random distribution
|
||||
group.Get(0).AddKmerCode(1)
|
||||
group.Get(0).AddKmerCode(2)
|
||||
group.Get(0).AddKmerCode(3)
|
||||
|
||||
group.Get(1).AddKmerCode(2)
|
||||
group.Get(1).AddKmerCode(3)
|
||||
group.Get(1).AddKmerCode(4)
|
||||
|
||||
group.Get(2).AddKmerCode(3)
|
||||
group.Get(2).AddKmerCode(4)
|
||||
|
||||
group.Get(3).AddKmerCode(4)
|
||||
|
||||
union := group.Union()
|
||||
|
||||
for q := 1; q < 4; q++ {
|
||||
atMost := group.QuorumAtMost(q)
|
||||
atLeast := group.QuorumAtLeast(q + 1)
|
||||
|
||||
// Verify: AtMost(q) ∪ AtLeast(q+1) = Union()
|
||||
combined := atMost.Union(atLeast)
|
||||
|
||||
if combined.Len() != union.Len() {
|
||||
t.Errorf("q=%d: AtMost(q) ∪ AtLeast(q+1) has %d k-mers, Union has %d",
|
||||
q, combined.Len(), union.Len())
|
||||
}
|
||||
|
||||
// Verify: AtMost(q) ∩ AtLeast(q+1) = ∅
|
||||
overlap := atMost.Intersect(atLeast)
|
||||
if overlap.Len() != 0 {
|
||||
t.Errorf("q=%d: AtMost(q) and AtLeast(q+1) overlap with %d k-mers",
|
||||
q, overlap.Len())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkQuorumAtLeast benchmarks quorum operations
|
||||
func BenchmarkQuorumAtLeast(b *testing.B) {
|
||||
k := 21
|
||||
n := 10
|
||||
group := NewKmerSetGroup(k, n)
|
||||
|
||||
// Populate with realistic data
|
||||
for i := 0; i < n; i++ {
|
||||
for j := uint64(0); j < 10000; j++ {
|
||||
if (j % uint64(n)) <= uint64(i) {
|
||||
group.Get(i).AddKmerCode(j)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = group.QuorumAtLeast(5)
|
||||
}
|
||||
}
|
||||
376
pkg/obikmer/kmer_set_persistence.go
Normal file
376
pkg/obikmer/kmer_set_persistence.go
Normal file
@@ -0,0 +1,376 @@
|
||||
package obikmer
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// MetadataFormat represents the metadata serialization format
|
||||
type MetadataFormat int
|
||||
|
||||
const (
|
||||
FormatTOML MetadataFormat = iota
|
||||
FormatYAML
|
||||
FormatJSON
|
||||
)
|
||||
|
||||
// String returns the file extension for the format
|
||||
func (f MetadataFormat) String() string {
|
||||
switch f {
|
||||
case FormatTOML:
|
||||
return "toml"
|
||||
case FormatYAML:
|
||||
return "yaml"
|
||||
case FormatJSON:
|
||||
return "json"
|
||||
default:
|
||||
return "toml"
|
||||
}
|
||||
}
|
||||
|
||||
// KmerSetMetadata contient les métadonnées d'un KmerSet ou KmerSetGroup
|
||||
type KmerSetMetadata struct {
|
||||
ID string `toml:"id,omitempty" yaml:"id,omitempty" json:"id,omitempty"` // Identifiant unique
|
||||
K int `toml:"k" yaml:"k" json:"k"` // Taille des k-mers
|
||||
Type string `toml:"type" yaml:"type" json:"type"` // "KmerSet" ou "KmerSetGroup"
|
||||
Size int `toml:"size" yaml:"size" json:"size"` // 1 pour KmerSet, n pour KmerSetGroup
|
||||
Files []string `toml:"files" yaml:"files" json:"files"` // Liste des fichiers .roaring
|
||||
SetsIDs []string `toml:"sets_ids,omitempty" yaml:"sets_ids,omitempty" json:"sets_ids,omitempty"` // IDs des KmerSet individuels
|
||||
UserMetadata map[string]interface{} `toml:"user_metadata,omitempty" yaml:"user_metadata,omitempty" json:"user_metadata,omitempty"` // Métadonnées KmerSet ou KmerSetGroup
|
||||
SetsMetadata []map[string]interface{} `toml:"sets_metadata,omitempty" yaml:"sets_metadata,omitempty" json:"sets_metadata,omitempty"` // Métadonnées des KmerSet individuels dans un KmerSetGroup
|
||||
}
|
||||
|
||||
// SaveKmerSet sauvegarde un KmerSet dans un répertoire
|
||||
// Format: directory/metadata.{toml,yaml,json} + directory/set_0.roaring
|
||||
func (ks *KmerSet) Save(directory string, format MetadataFormat) error {
|
||||
// Créer le répertoire si nécessaire
|
||||
if err := os.MkdirAll(directory, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create directory %s: %w", directory, err)
|
||||
}
|
||||
|
||||
// Métadonnées
|
||||
metadata := KmerSetMetadata{
|
||||
ID: ks.id,
|
||||
K: ks.k,
|
||||
Type: "KmerSet",
|
||||
Size: 1,
|
||||
Files: []string{"set_0.roaring"},
|
||||
UserMetadata: ks.Metadata, // Sauvegarder les métadonnées utilisateur
|
||||
}
|
||||
|
||||
// Sauvegarder les métadonnées
|
||||
if err := saveMetadata(filepath.Join(directory, "metadata."+format.String()), metadata, format); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Sauvegarder le bitmap
|
||||
bitmapPath := filepath.Join(directory, "set_0.roaring")
|
||||
file, err := os.Create(bitmapPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create bitmap file %s: %w", bitmapPath, err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
if _, err := ks.bitmap.WriteTo(file); err != nil {
|
||||
return fmt.Errorf("failed to write bitmap: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadKmerSet charge un KmerSet depuis un répertoire
|
||||
func LoadKmerSet(directory string) (*KmerSet, error) {
|
||||
// Lire les métadonnées (essayer tous les formats)
|
||||
metadata, err := loadMetadata(directory)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Vérifier le type
|
||||
if metadata.Type != "KmerSet" {
|
||||
return nil, fmt.Errorf("invalid type: expected KmerSet, got %s", metadata.Type)
|
||||
}
|
||||
|
||||
// Vérifier qu'il n'y a qu'un seul fichier
|
||||
if metadata.Size != 1 || len(metadata.Files) != 1 {
|
||||
return nil, fmt.Errorf("KmerSet must have exactly 1 bitmap file, got %d", len(metadata.Files))
|
||||
}
|
||||
|
||||
// Charger le bitmap
|
||||
bitmapPath := filepath.Join(directory, metadata.Files[0])
|
||||
file, err := os.Open(bitmapPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open bitmap file %s: %w", bitmapPath, err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
ks := NewKmerSet(metadata.K)
|
||||
|
||||
// Charger l'ID
|
||||
ks.id = metadata.ID
|
||||
|
||||
// Charger les métadonnées utilisateur
|
||||
if metadata.UserMetadata != nil {
|
||||
ks.Metadata = metadata.UserMetadata
|
||||
}
|
||||
|
||||
if _, err := ks.bitmap.ReadFrom(file); err != nil {
|
||||
return nil, fmt.Errorf("failed to read bitmap: %w", err)
|
||||
}
|
||||
|
||||
return ks, nil
|
||||
}
|
||||
|
||||
// SaveKmerSetGroup sauvegarde un KmerSetGroup dans un répertoire
|
||||
// Format: directory/metadata.{toml,yaml,json} + directory/set_0.roaring, set_1.roaring, ...
|
||||
func (ksg *KmerSetGroup) Save(directory string, format MetadataFormat) error {
|
||||
// Créer le répertoire si nécessaire
|
||||
if err := os.MkdirAll(directory, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create directory %s: %w", directory, err)
|
||||
}
|
||||
|
||||
// Métadonnées
|
||||
files := make([]string, len(ksg.sets))
|
||||
for i := range ksg.sets {
|
||||
files[i] = fmt.Sprintf("set_%d.roaring", i)
|
||||
}
|
||||
|
||||
// Collecter les IDs et métadonnées de chaque KmerSet individuel
|
||||
setsIDs := make([]string, len(ksg.sets))
|
||||
setsMetadata := make([]map[string]interface{}, len(ksg.sets))
|
||||
for i, ks := range ksg.sets {
|
||||
setsIDs[i] = ks.id
|
||||
setsMetadata[i] = ks.Metadata
|
||||
}
|
||||
|
||||
metadata := KmerSetMetadata{
|
||||
ID: ksg.id,
|
||||
K: ksg.k,
|
||||
Type: "KmerSetGroup",
|
||||
Size: len(ksg.sets),
|
||||
Files: files,
|
||||
SetsIDs: setsIDs, // IDs de chaque set
|
||||
UserMetadata: ksg.Metadata, // Métadonnées du groupe
|
||||
SetsMetadata: setsMetadata, // Métadonnées de chaque set
|
||||
}
|
||||
|
||||
// Sauvegarder les métadonnées
|
||||
if err := saveMetadata(filepath.Join(directory, "metadata."+format.String()), metadata, format); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Sauvegarder chaque bitmap
|
||||
for i, ks := range ksg.sets {
|
||||
bitmapPath := filepath.Join(directory, files[i])
|
||||
file, err := os.Create(bitmapPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create bitmap file %s: %w", bitmapPath, err)
|
||||
}
|
||||
|
||||
if _, err := ks.bitmap.WriteTo(file); err != nil {
|
||||
file.Close()
|
||||
return fmt.Errorf("failed to write bitmap %d: %w", i, err)
|
||||
}
|
||||
file.Close()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadKmerSetGroup charge un KmerSetGroup depuis un répertoire
|
||||
func LoadKmerSetGroup(directory string) (*KmerSetGroup, error) {
|
||||
// Lire les métadonnées (essayer tous les formats)
|
||||
metadata, err := loadMetadata(directory)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Vérifier le type
|
||||
if metadata.Type != "KmerSetGroup" {
|
||||
return nil, fmt.Errorf("invalid type: expected KmerSetGroup, got %s", metadata.Type)
|
||||
}
|
||||
|
||||
// Vérifier la cohérence
|
||||
if metadata.Size != len(metadata.Files) {
|
||||
return nil, fmt.Errorf("size mismatch: size=%d but %d files listed", metadata.Size, len(metadata.Files))
|
||||
}
|
||||
|
||||
// Créer le groupe
|
||||
ksg := NewKmerSetGroup(metadata.K, metadata.Size)
|
||||
|
||||
// Charger l'ID du groupe
|
||||
ksg.id = metadata.ID
|
||||
|
||||
// Charger les métadonnées du groupe
|
||||
if metadata.UserMetadata != nil {
|
||||
ksg.Metadata = metadata.UserMetadata
|
||||
}
|
||||
|
||||
// Charger les IDs de chaque KmerSet
|
||||
if metadata.SetsIDs != nil && len(metadata.SetsIDs) == metadata.Size {
|
||||
for i := range ksg.sets {
|
||||
ksg.sets[i].id = metadata.SetsIDs[i]
|
||||
}
|
||||
}
|
||||
|
||||
// Charger les métadonnées de chaque KmerSet individuel
|
||||
if metadata.SetsMetadata != nil {
|
||||
if len(metadata.SetsMetadata) != metadata.Size {
|
||||
return nil, fmt.Errorf("sets metadata size mismatch: expected %d, got %d", metadata.Size, len(metadata.SetsMetadata))
|
||||
}
|
||||
for i := range ksg.sets {
|
||||
ksg.sets[i].Metadata = metadata.SetsMetadata[i]
|
||||
}
|
||||
}
|
||||
|
||||
// Charger chaque bitmap
|
||||
for i, filename := range metadata.Files {
|
||||
bitmapPath := filepath.Join(directory, filename)
|
||||
file, err := os.Open(bitmapPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open bitmap file %s: %w", bitmapPath, err)
|
||||
}
|
||||
|
||||
if _, err := ksg.sets[i].bitmap.ReadFrom(file); err != nil {
|
||||
file.Close()
|
||||
return nil, fmt.Errorf("failed to read bitmap %d: %w", i, err)
|
||||
}
|
||||
file.Close()
|
||||
}
|
||||
|
||||
return ksg, nil
|
||||
}
|
||||
|
||||
// saveMetadata sauvegarde les métadonnées dans le format spécifié
|
||||
func saveMetadata(path string, metadata KmerSetMetadata, format MetadataFormat) error {
|
||||
file, err := os.Create(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create metadata file %s: %w", path, err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
var encoder interface{ Encode(interface{}) error }
|
||||
|
||||
switch format {
|
||||
case FormatTOML:
|
||||
encoder = toml.NewEncoder(file)
|
||||
case FormatYAML:
|
||||
encoder = yaml.NewEncoder(file)
|
||||
case FormatJSON:
|
||||
jsonEncoder := json.NewEncoder(file)
|
||||
jsonEncoder.SetIndent("", " ")
|
||||
encoder = jsonEncoder
|
||||
default:
|
||||
return fmt.Errorf("unsupported format: %v", format)
|
||||
}
|
||||
|
||||
if err := encoder.Encode(metadata); err != nil {
|
||||
return fmt.Errorf("failed to encode metadata: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadMetadata charge les métadonnées depuis un répertoire
|
||||
// Essaie tous les formats (TOML, YAML, JSON) dans l'ordre
|
||||
func loadMetadata(directory string) (*KmerSetMetadata, error) {
|
||||
formats := []MetadataFormat{FormatTOML, FormatYAML, FormatJSON}
|
||||
|
||||
var lastErr error
|
||||
for _, format := range formats {
|
||||
path := filepath.Join(directory, "metadata."+format.String())
|
||||
|
||||
// Vérifier si le fichier existe
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
continue
|
||||
}
|
||||
|
||||
metadata, err := loadMetadataFromFile(path, format)
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
continue
|
||||
}
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
if lastErr != nil {
|
||||
return nil, fmt.Errorf("failed to load metadata: %w", lastErr)
|
||||
}
|
||||
return nil, fmt.Errorf("no metadata file found in %s (tried .toml, .yaml, .json)", directory)
|
||||
}
|
||||
|
||||
// loadMetadataFromFile charge les métadonnées depuis un fichier spécifique
|
||||
func loadMetadataFromFile(path string, format MetadataFormat) (*KmerSetMetadata, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open metadata file %s: %w", path, err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
var metadata KmerSetMetadata
|
||||
var decoder interface{ Decode(interface{}) error }
|
||||
|
||||
switch format {
|
||||
case FormatTOML:
|
||||
decoder = toml.NewDecoder(file)
|
||||
case FormatYAML:
|
||||
decoder = yaml.NewDecoder(file)
|
||||
case FormatJSON:
|
||||
decoder = json.NewDecoder(file)
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported format: %v", format)
|
||||
}
|
||||
|
||||
if err := decoder.Decode(&metadata); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode metadata: %w", err)
|
||||
}
|
||||
|
||||
return &metadata, nil
|
||||
}
|
||||
|
||||
// DetectFormat détecte le format des métadonnées dans un répertoire
|
||||
func DetectFormat(directory string) (MetadataFormat, error) {
|
||||
formats := []MetadataFormat{FormatTOML, FormatYAML, FormatJSON}
|
||||
|
||||
for _, format := range formats {
|
||||
path := filepath.Join(directory, "metadata."+format.String())
|
||||
if _, err := os.Stat(path); err == nil {
|
||||
return format, nil
|
||||
}
|
||||
}
|
||||
|
||||
return FormatTOML, fmt.Errorf("no metadata file found in %s", directory)
|
||||
}
|
||||
|
||||
// IsKmerSetDirectory vérifie si un répertoire contient un KmerSet ou KmerSetGroup
|
||||
func IsKmerSetDirectory(directory string) (bool, string, error) {
|
||||
metadata, err := loadMetadata(directory)
|
||||
if err != nil {
|
||||
return false, "", err
|
||||
}
|
||||
|
||||
return true, metadata.Type, nil
|
||||
}
|
||||
|
||||
// ListBitmapFiles liste tous les fichiers .roaring dans un répertoire
|
||||
func ListBitmapFiles(directory string) ([]string, error) {
|
||||
entries, err := os.ReadDir(directory)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read directory %s: %w", directory, err)
|
||||
}
|
||||
|
||||
var files []string
|
||||
for _, entry := range entries {
|
||||
if !entry.IsDir() && strings.HasSuffix(entry.Name(), ".roaring") {
|
||||
files = append(files, entry.Name())
|
||||
}
|
||||
}
|
||||
|
||||
return files, nil
|
||||
}
|
||||
272
pkg/obikmer/kmer_set_test.go
Normal file
272
pkg/obikmer/kmer_set_test.go
Normal file
@@ -0,0 +1,272 @@
|
||||
package obikmer
|
||||
|
||||
import (
|
||||
"math"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestJaccardDistanceIdentical(t *testing.T) {
|
||||
ks1 := NewKmerSet(5)
|
||||
ks1.AddKmerCode(100)
|
||||
ks1.AddKmerCode(200)
|
||||
ks1.AddKmerCode(300)
|
||||
|
||||
ks2 := NewKmerSet(5)
|
||||
ks2.AddKmerCode(100)
|
||||
ks2.AddKmerCode(200)
|
||||
ks2.AddKmerCode(300)
|
||||
|
||||
distance := ks1.JaccardDistance(ks2)
|
||||
similarity := ks1.JaccardSimilarity(ks2)
|
||||
|
||||
if distance != 0.0 {
|
||||
t.Errorf("Expected distance 0.0 for identical sets, got %f", distance)
|
||||
}
|
||||
|
||||
if similarity != 1.0 {
|
||||
t.Errorf("Expected similarity 1.0 for identical sets, got %f", similarity)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJaccardDistanceDisjoint(t *testing.T) {
|
||||
ks1 := NewKmerSet(5)
|
||||
ks1.AddKmerCode(100)
|
||||
ks1.AddKmerCode(200)
|
||||
ks1.AddKmerCode(300)
|
||||
|
||||
ks2 := NewKmerSet(5)
|
||||
ks2.AddKmerCode(400)
|
||||
ks2.AddKmerCode(500)
|
||||
ks2.AddKmerCode(600)
|
||||
|
||||
distance := ks1.JaccardDistance(ks2)
|
||||
similarity := ks1.JaccardSimilarity(ks2)
|
||||
|
||||
if distance != 1.0 {
|
||||
t.Errorf("Expected distance 1.0 for disjoint sets, got %f", distance)
|
||||
}
|
||||
|
||||
if similarity != 0.0 {
|
||||
t.Errorf("Expected similarity 0.0 for disjoint sets, got %f", similarity)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJaccardDistancePartialOverlap(t *testing.T) {
|
||||
// Set 1: {1, 2, 3}
|
||||
ks1 := NewKmerSet(5)
|
||||
ks1.AddKmerCode(1)
|
||||
ks1.AddKmerCode(2)
|
||||
ks1.AddKmerCode(3)
|
||||
|
||||
// Set 2: {2, 3, 4}
|
||||
ks2 := NewKmerSet(5)
|
||||
ks2.AddKmerCode(2)
|
||||
ks2.AddKmerCode(3)
|
||||
ks2.AddKmerCode(4)
|
||||
|
||||
// Intersection: {2, 3} -> cardinality = 2
|
||||
// Union: {1, 2, 3, 4} -> cardinality = 4
|
||||
// Similarity = 2/4 = 0.5
|
||||
// Distance = 1 - 0.5 = 0.5
|
||||
|
||||
distance := ks1.JaccardDistance(ks2)
|
||||
similarity := ks1.JaccardSimilarity(ks2)
|
||||
|
||||
expectedDistance := 0.5
|
||||
expectedSimilarity := 0.5
|
||||
|
||||
if math.Abs(distance-expectedDistance) > 1e-10 {
|
||||
t.Errorf("Expected distance %f, got %f", expectedDistance, distance)
|
||||
}
|
||||
|
||||
if math.Abs(similarity-expectedSimilarity) > 1e-10 {
|
||||
t.Errorf("Expected similarity %f, got %f", expectedSimilarity, similarity)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJaccardDistanceOneSubsetOfOther(t *testing.T) {
|
||||
// Set 1: {1, 2}
|
||||
ks1 := NewKmerSet(5)
|
||||
ks1.AddKmerCode(1)
|
||||
ks1.AddKmerCode(2)
|
||||
|
||||
// Set 2: {1, 2, 3, 4}
|
||||
ks2 := NewKmerSet(5)
|
||||
ks2.AddKmerCode(1)
|
||||
ks2.AddKmerCode(2)
|
||||
ks2.AddKmerCode(3)
|
||||
ks2.AddKmerCode(4)
|
||||
|
||||
// Intersection: {1, 2} -> cardinality = 2
|
||||
// Union: {1, 2, 3, 4} -> cardinality = 4
|
||||
// Similarity = 2/4 = 0.5
|
||||
// Distance = 1 - 0.5 = 0.5
|
||||
|
||||
distance := ks1.JaccardDistance(ks2)
|
||||
similarity := ks1.JaccardSimilarity(ks2)
|
||||
|
||||
expectedDistance := 0.5
|
||||
expectedSimilarity := 0.5
|
||||
|
||||
if math.Abs(distance-expectedDistance) > 1e-10 {
|
||||
t.Errorf("Expected distance %f, got %f", expectedDistance, distance)
|
||||
}
|
||||
|
||||
if math.Abs(similarity-expectedSimilarity) > 1e-10 {
|
||||
t.Errorf("Expected similarity %f, got %f", expectedSimilarity, similarity)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJaccardDistanceEmptySets(t *testing.T) {
|
||||
ks1 := NewKmerSet(5)
|
||||
ks2 := NewKmerSet(5)
|
||||
|
||||
distance := ks1.JaccardDistance(ks2)
|
||||
similarity := ks1.JaccardSimilarity(ks2)
|
||||
|
||||
// By convention, distance = 1.0 for empty sets
|
||||
if distance != 1.0 {
|
||||
t.Errorf("Expected distance 1.0 for empty sets, got %f", distance)
|
||||
}
|
||||
|
||||
if similarity != 0.0 {
|
||||
t.Errorf("Expected similarity 0.0 for empty sets, got %f", similarity)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJaccardDistanceOneEmpty(t *testing.T) {
|
||||
ks1 := NewKmerSet(5)
|
||||
ks1.AddKmerCode(1)
|
||||
ks1.AddKmerCode(2)
|
||||
ks1.AddKmerCode(3)
|
||||
|
||||
ks2 := NewKmerSet(5)
|
||||
|
||||
distance := ks1.JaccardDistance(ks2)
|
||||
similarity := ks1.JaccardSimilarity(ks2)
|
||||
|
||||
// Intersection: {} -> cardinality = 0
|
||||
// Union: {1, 2, 3} -> cardinality = 3
|
||||
// Similarity = 0/3 = 0.0
|
||||
// Distance = 1.0
|
||||
|
||||
if distance != 1.0 {
|
||||
t.Errorf("Expected distance 1.0 when one set is empty, got %f", distance)
|
||||
}
|
||||
|
||||
if similarity != 0.0 {
|
||||
t.Errorf("Expected similarity 0.0 when one set is empty, got %f", similarity)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJaccardDistanceDifferentK(t *testing.T) {
|
||||
ks1 := NewKmerSet(5)
|
||||
ks1.AddKmerCode(1)
|
||||
|
||||
ks2 := NewKmerSet(7)
|
||||
ks2.AddKmerCode(1)
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r == nil {
|
||||
t.Errorf("Expected panic when computing Jaccard distance with different k values")
|
||||
}
|
||||
}()
|
||||
|
||||
_ = ks1.JaccardDistance(ks2)
|
||||
}
|
||||
|
||||
func TestJaccardDistanceSimilarityRelation(t *testing.T) {
|
||||
// Test that distance + similarity = 1.0 for all cases
|
||||
testCases := []struct {
|
||||
name string
|
||||
ks1 *KmerSet
|
||||
ks2 *KmerSet
|
||||
}{
|
||||
{
|
||||
name: "partial overlap",
|
||||
ks1: func() *KmerSet {
|
||||
ks := NewKmerSet(5)
|
||||
ks.AddKmerCode(1)
|
||||
ks.AddKmerCode(2)
|
||||
ks.AddKmerCode(3)
|
||||
return ks
|
||||
}(),
|
||||
ks2: func() *KmerSet {
|
||||
ks := NewKmerSet(5)
|
||||
ks.AddKmerCode(2)
|
||||
ks.AddKmerCode(3)
|
||||
ks.AddKmerCode(4)
|
||||
ks.AddKmerCode(5)
|
||||
return ks
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "identical",
|
||||
ks1: func() *KmerSet {
|
||||
ks := NewKmerSet(5)
|
||||
ks.AddKmerCode(10)
|
||||
ks.AddKmerCode(20)
|
||||
return ks
|
||||
}(),
|
||||
ks2: func() *KmerSet {
|
||||
ks := NewKmerSet(5)
|
||||
ks.AddKmerCode(10)
|
||||
ks.AddKmerCode(20)
|
||||
return ks
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "disjoint",
|
||||
ks1: func() *KmerSet {
|
||||
ks := NewKmerSet(5)
|
||||
ks.AddKmerCode(1)
|
||||
return ks
|
||||
}(),
|
||||
ks2: func() *KmerSet {
|
||||
ks := NewKmerSet(5)
|
||||
ks.AddKmerCode(100)
|
||||
return ks
|
||||
}(),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
distance := tc.ks1.JaccardDistance(tc.ks2)
|
||||
similarity := tc.ks1.JaccardSimilarity(tc.ks2)
|
||||
|
||||
sum := distance + similarity
|
||||
|
||||
if math.Abs(sum-1.0) > 1e-10 {
|
||||
t.Errorf("Expected distance + similarity = 1.0, got %f + %f = %f",
|
||||
distance, similarity, sum)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJaccardDistanceSymmetry(t *testing.T) {
|
||||
ks1 := NewKmerSet(5)
|
||||
ks1.AddKmerCode(1)
|
||||
ks1.AddKmerCode(2)
|
||||
ks1.AddKmerCode(3)
|
||||
|
||||
ks2 := NewKmerSet(5)
|
||||
ks2.AddKmerCode(2)
|
||||
ks2.AddKmerCode(3)
|
||||
ks2.AddKmerCode(4)
|
||||
|
||||
distance1 := ks1.JaccardDistance(ks2)
|
||||
distance2 := ks2.JaccardDistance(ks1)
|
||||
|
||||
similarity1 := ks1.JaccardSimilarity(ks2)
|
||||
similarity2 := ks2.JaccardSimilarity(ks1)
|
||||
|
||||
if math.Abs(distance1-distance2) > 1e-10 {
|
||||
t.Errorf("Jaccard distance not symmetric: %f vs %f", distance1, distance2)
|
||||
}
|
||||
|
||||
if math.Abs(similarity1-similarity2) > 1e-10 {
|
||||
t.Errorf("Jaccard similarity not symmetric: %f vs %f", similarity1, similarity2)
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,77 +0,0 @@
|
||||
package obikmer
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestNormalize(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
kmer string
|
||||
expected string
|
||||
}{
|
||||
// Test avec k=1
|
||||
{"k=1 a", "a", "a"},
|
||||
{"k=1 c", "c", "c"},
|
||||
|
||||
// Test avec k=2
|
||||
{"k=2 ca", "ca", "ac"},
|
||||
{"k=2 ac", "ac", "ac"},
|
||||
|
||||
// Test avec k=4
|
||||
{"k=4 acgt", "acgt", "acgt"},
|
||||
{"k=4 cgta", "cgta", "acgt"},
|
||||
{"k=4 gtac", "gtac", "acgt"},
|
||||
{"k=4 tacg", "tacg", "acgt"},
|
||||
{"k=4 tgca", "tgca", "atgc"},
|
||||
|
||||
// Test avec k=6
|
||||
{"k=6 aaaaaa", "aaaaaa", "aaaaaa"},
|
||||
{"k=6 tttttt", "tttttt", "tttttt"},
|
||||
|
||||
// Test avec k>6 (calcul à la volée)
|
||||
{"k=7 aaaaaaa", "aaaaaaa", "aaaaaaa"},
|
||||
{"k=7 tgcatgc", "tgcatgc", "atgctgc"},
|
||||
{"k=7 gcatgct", "gcatgct", "atgctgc"},
|
||||
{"k=8 acgtacgt", "acgtacgt", "acgtacgt"},
|
||||
{"k=8 gtacgtac", "gtacgtac", "acgtacgt"},
|
||||
{"k=10 acgtacgtac", "acgtacgtac", "acacgtacgt"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := Normalize(tt.kmer)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Normalize(%q) = %q, want %q", tt.kmer, result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeTableConsistency(t *testing.T) {
|
||||
// Vérifier que tous les kmers de la table donnent le bon résultat
|
||||
// en comparant avec le calcul à la volée
|
||||
for kmer, expected := range LexicographicNormalization {
|
||||
calculated := getCanonicalCircular(kmer)
|
||||
if calculated != expected {
|
||||
t.Errorf("Table inconsistency for %q: table=%q, calculated=%q",
|
||||
kmer, expected, calculated)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNormalizeSmall(b *testing.B) {
|
||||
// Benchmark pour k<=6 (utilise la table)
|
||||
kmer := "acgtac"
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = Normalize(kmer)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNormalizeLarge(b *testing.B) {
|
||||
// Benchmark pour k>6 (calcul à la volée)
|
||||
kmer := "acgtacgtac"
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = Normalize(kmer)
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,357 +0,0 @@
|
||||
package obikmer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestEncodeDecodeKmer(t *testing.T) {
|
||||
tests := []struct {
|
||||
kmer string
|
||||
code int
|
||||
}{
|
||||
{"a", 0},
|
||||
{"c", 1},
|
||||
{"g", 2},
|
||||
{"t", 3},
|
||||
{"aa", 0},
|
||||
{"ac", 1},
|
||||
{"ca", 4},
|
||||
{"acgt", 27}, // 0b00011011
|
||||
{"cgta", 108}, // 0b01101100
|
||||
{"tttt", 255}, // 0b11111111
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.kmer, func(t *testing.T) {
|
||||
// Test encoding
|
||||
encoded := EncodeKmer(tt.kmer)
|
||||
if encoded != tt.code {
|
||||
t.Errorf("EncodeKmer(%q) = %d, want %d", tt.kmer, encoded, tt.code)
|
||||
}
|
||||
|
||||
// Test decoding
|
||||
decoded := DecodeKmer(tt.code, len(tt.kmer))
|
||||
if decoded != tt.kmer {
|
||||
t.Errorf("DecodeKmer(%d, %d) = %q, want %q", tt.code, len(tt.kmer), decoded, tt.kmer)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeInt(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
kmer string
|
||||
expected string
|
||||
}{
|
||||
// Test avec k=1
|
||||
{"k=1 a", "a", "a"},
|
||||
{"k=1 c", "c", "c"},
|
||||
|
||||
// Test avec k=2
|
||||
{"k=2 ca", "ca", "ac"},
|
||||
{"k=2 ac", "ac", "ac"},
|
||||
{"k=2 ta", "ta", "at"},
|
||||
|
||||
// Test avec k=4 - toutes les rotations de "acgt"
|
||||
{"k=4 acgt", "acgt", "acgt"},
|
||||
{"k=4 cgta", "cgta", "acgt"},
|
||||
{"k=4 gtac", "gtac", "acgt"},
|
||||
{"k=4 tacg", "tacg", "acgt"},
|
||||
|
||||
// Test avec k=4 - rotations de "tgca"
|
||||
{"k=4 tgca", "tgca", "atgc"},
|
||||
{"k=4 gcat", "gcat", "atgc"},
|
||||
{"k=4 catg", "catg", "atgc"},
|
||||
{"k=4 atgc", "atgc", "atgc"},
|
||||
|
||||
// Test avec k=3 - rotations de "atg"
|
||||
{"k=3 atg", "atg", "atg"},
|
||||
{"k=3 tga", "tga", "atg"},
|
||||
{"k=3 gat", "gat", "atg"},
|
||||
|
||||
// Test avec k=6
|
||||
{"k=6 aaaaaa", "aaaaaa", "aaaaaa"},
|
||||
{"k=6 tttttt", "tttttt", "tttttt"},
|
||||
|
||||
// Test avec k>6 (calcul à la volée)
|
||||
{"k=7 aaaaaaa", "aaaaaaa", "aaaaaaa"},
|
||||
{"k=7 tgcatgc", "tgcatgc", "atgctgc"},
|
||||
{"k=7 gcatgct", "gcatgct", "atgctgc"},
|
||||
{"k=8 acgtacgt", "acgtacgt", "acgtacgt"},
|
||||
{"k=8 gtacgtac", "gtacgtac", "acgtacgt"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
kmerCode := EncodeKmer(tt.kmer)
|
||||
expectedCode := EncodeKmer(tt.expected)
|
||||
|
||||
result := NormalizeInt(kmerCode, len(tt.kmer))
|
||||
|
||||
if result != expectedCode {
|
||||
resultKmer := DecodeKmer(result, len(tt.kmer))
|
||||
t.Errorf("NormalizeInt(%q) = %q (code %d), want %q (code %d)",
|
||||
tt.kmer, resultKmer, result, tt.expected, expectedCode)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeIntConsistencyWithString(t *testing.T) {
|
||||
// Vérifier que NormalizeInt donne le même résultat que Normalize
|
||||
// pour tous les k-mers de taille 1 à 4 (pour ne pas trop ralentir les tests)
|
||||
bases := []byte{'a', 'c', 'g', 't'}
|
||||
|
||||
var testKmers func(current string, maxSize int)
|
||||
testKmers = func(current string, maxSize int) {
|
||||
if len(current) > 0 {
|
||||
// Test normalization
|
||||
normalizedStr := Normalize(current)
|
||||
normalizedStrCode := EncodeKmer(normalizedStr)
|
||||
|
||||
kmerCode := EncodeKmer(current)
|
||||
normalizedInt := NormalizeInt(kmerCode, len(current))
|
||||
|
||||
if normalizedInt != normalizedStrCode {
|
||||
normalizedIntStr := DecodeKmer(normalizedInt, len(current))
|
||||
t.Errorf("Inconsistency for %q: Normalize=%q (code %d), NormalizeInt=%q (code %d)",
|
||||
current, normalizedStr, normalizedStrCode, normalizedIntStr, normalizedInt)
|
||||
}
|
||||
}
|
||||
|
||||
if len(current) < maxSize {
|
||||
for _, base := range bases {
|
||||
testKmers(current+string(base), maxSize)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
testKmers("", 4) // Test jusqu'à k=4 pour rester raisonnable
|
||||
}
|
||||
|
||||
func TestCircularRotations(t *testing.T) {
|
||||
// Test que toutes les rotations circulaires donnent le même canonical
|
||||
tests := []struct {
|
||||
kmers []string
|
||||
canonical string
|
||||
}{
|
||||
{[]string{"atg", "tga", "gat"}, "atg"},
|
||||
{[]string{"acgt", "cgta", "gtac", "tacg"}, "acgt"},
|
||||
{[]string{"tgca", "gcat", "catg", "atgc"}, "atgc"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
canonicalCode := EncodeKmer(tt.canonical)
|
||||
|
||||
for _, kmer := range tt.kmers {
|
||||
kmerCode := EncodeKmer(kmer)
|
||||
result := NormalizeInt(kmerCode, len(kmer))
|
||||
|
||||
if result != canonicalCode {
|
||||
resultKmer := DecodeKmer(result, len(kmer))
|
||||
t.Errorf("NormalizeInt(%q) = %q, want %q", kmer, resultKmer, tt.canonical)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNormalizeIntSmall(b *testing.B) {
|
||||
// Benchmark pour k<=6 (utilise la table)
|
||||
kmer := "acgtac"
|
||||
kmerCode := EncodeKmer(kmer)
|
||||
kmerSize := len(kmer)
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = NormalizeInt(kmerCode, kmerSize)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNormalizeIntLarge(b *testing.B) {
|
||||
// Benchmark pour k>6 (calcul à la volée)
|
||||
kmer := "acgtacgtac"
|
||||
kmerCode := EncodeKmer(kmer)
|
||||
kmerSize := len(kmer)
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = NormalizeInt(kmerCode, kmerSize)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkEncodeKmer(b *testing.B) {
|
||||
kmer := "acgtacgt"
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = EncodeKmer(kmer)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCanonicalKmerCount(t *testing.T) {
|
||||
// Test exact counts for k=1 to 6
|
||||
tests := []struct {
|
||||
k int
|
||||
expected int
|
||||
}{
|
||||
{1, 4},
|
||||
{2, 10},
|
||||
{3, 24},
|
||||
{4, 70},
|
||||
{5, 208},
|
||||
{6, 700},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(fmt.Sprintf("k=%d", tt.k), func(t *testing.T) {
|
||||
result := CanonicalKmerCount(tt.k)
|
||||
if result != tt.expected {
|
||||
t.Errorf("CanonicalKmerCount(%d) = %d, want %d", tt.k, result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Verify counts match table sizes
|
||||
for k := 1; k <= 6; k++ {
|
||||
// Count unique canonical codes in the table
|
||||
uniqueCodes := make(map[int]bool)
|
||||
for _, canonicalCode := range LexicographicNormalizationInt[k] {
|
||||
uniqueCodes[canonicalCode] = true
|
||||
}
|
||||
|
||||
expected := len(uniqueCodes)
|
||||
result := CanonicalKmerCount(k)
|
||||
|
||||
if result != expected {
|
||||
t.Errorf("CanonicalKmerCount(%d) = %d, but table has %d unique canonical codes",
|
||||
k, result, expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNecklaceCountFormula(t *testing.T) {
|
||||
// Verify Moreau's formula gives the same results as hardcoded values for k=1 to 6
|
||||
// and compute exact values for k=7+
|
||||
tests := []struct {
|
||||
k int
|
||||
expected int
|
||||
}{
|
||||
{1, 4},
|
||||
{2, 10},
|
||||
{3, 24},
|
||||
{4, 70},
|
||||
{5, 208},
|
||||
{6, 700},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(fmt.Sprintf("k=%d", tt.k), func(t *testing.T) {
|
||||
result := necklaceCount(tt.k, 4)
|
||||
if result != tt.expected {
|
||||
t.Errorf("necklaceCount(%d, 4) = %d, want %d", tt.k, result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNecklaceCountByBruteForce(t *testing.T) {
|
||||
// Verify necklace count for k=7 and k=8 by brute force
|
||||
// Generate all 4^k k-mers and count unique normalized ones
|
||||
bases := []byte{'a', 'c', 'g', 't'}
|
||||
|
||||
for _, k := range []int{7, 8} {
|
||||
t.Run(fmt.Sprintf("k=%d", k), func(t *testing.T) {
|
||||
unique := make(map[int]bool)
|
||||
|
||||
// Generate all possible k-mers
|
||||
var generate func(current int, depth int)
|
||||
generate = func(current int, depth int) {
|
||||
if depth == k {
|
||||
// Normalize and add to set
|
||||
normalized := NormalizeInt(current, k)
|
||||
unique[normalized] = true
|
||||
return
|
||||
}
|
||||
|
||||
for _, base := range bases {
|
||||
newCode := (current << 2) | int(EncodeNucleotide(base))
|
||||
generate(newCode, depth+1)
|
||||
}
|
||||
}
|
||||
|
||||
generate(0, 0)
|
||||
|
||||
bruteForceCount := len(unique)
|
||||
formulaCount := necklaceCount(k, 4)
|
||||
|
||||
if bruteForceCount != formulaCount {
|
||||
t.Errorf("For k=%d: brute force count = %d, formula count = %d",
|
||||
k, bruteForceCount, formulaCount)
|
||||
}
|
||||
|
||||
t.Logf("k=%d: unique canonical k-mers = %d (formula matches brute force)", k, bruteForceCount)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEulerTotient(t *testing.T) {
|
||||
tests := []struct {
|
||||
n int
|
||||
expected int
|
||||
}{
|
||||
{1, 1},
|
||||
{2, 1},
|
||||
{3, 2},
|
||||
{4, 2},
|
||||
{5, 4},
|
||||
{6, 2},
|
||||
{7, 6},
|
||||
{8, 4},
|
||||
{9, 6},
|
||||
{10, 4},
|
||||
{12, 4},
|
||||
{15, 8},
|
||||
{20, 8},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(fmt.Sprintf("φ(%d)", tt.n), func(t *testing.T) {
|
||||
result := eulerTotient(tt.n)
|
||||
if result != tt.expected {
|
||||
t.Errorf("eulerTotient(%d) = %d, want %d", tt.n, result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestDivisors(t *testing.T) {
|
||||
tests := []struct {
|
||||
n int
|
||||
expected []int
|
||||
}{
|
||||
{1, []int{1}},
|
||||
{2, []int{1, 2}},
|
||||
{6, []int{1, 2, 3, 6}},
|
||||
{12, []int{1, 2, 3, 4, 6, 12}},
|
||||
{15, []int{1, 3, 5, 15}},
|
||||
{20, []int{1, 2, 4, 5, 10, 20}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(fmt.Sprintf("divisors(%d)", tt.n), func(t *testing.T) {
|
||||
result := divisors(tt.n)
|
||||
if len(result) != len(tt.expected) {
|
||||
t.Errorf("divisors(%d) = %v, want %v", tt.n, result, tt.expected)
|
||||
return
|
||||
}
|
||||
for i := range result {
|
||||
if result[i] != tt.expected[i] {
|
||||
t.Errorf("divisors(%d) = %v, want %v", tt.n, result, tt.expected)
|
||||
return
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,20 +1,14 @@
|
||||
package obioptions
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
// Version is automatically updated by the Makefile from version.txt
|
||||
// The patch number (third digit) is incremented on each push to the repository
|
||||
|
||||
// TODO: The version number is extracted from git. This induces that the version
|
||||
// corresponds to the last commit, and not the one when the file will be
|
||||
// commited
|
||||
|
||||
var _Commit = "52244cd"
|
||||
var _Version = "Release 4.4.0"
|
||||
var _Version = "Release 4.4.4"
|
||||
|
||||
// Version returns the version of the obitools package.
|
||||
//
|
||||
// No parameters.
|
||||
// Returns a string representing the version of the obitools package.
|
||||
func VersionString() string {
|
||||
return fmt.Sprintf("%s (%s)", _Version, _Commit)
|
||||
return _Version
|
||||
}
|
||||
|
||||
@@ -48,12 +48,12 @@ func LowMaskWorker(kmer_size int, level_max int, threshold float64, mode Masking
|
||||
// - We calculate the entropy of a distribution where all words appear
|
||||
// cov or cov+1 times (most uniform distribution possible)
|
||||
//
|
||||
// IMPORTANT: Uses CanonicalKmerCount to get the actual number of canonical words
|
||||
// IMPORTANT: Uses CanonicalCircularKmerCount to get the actual number of canonical words
|
||||
// after circular normalization (e.g., "atg", "tga", "gat" → all "atg").
|
||||
// This is much smaller than 4^word_size (e.g., 10 instead of 16 for word_size=2).
|
||||
emax := func(lseq, word_size int) float64 {
|
||||
nw := lseq - word_size + 1 // Number of words in a k-mer of length lseq
|
||||
na := obikmer.CanonicalKmerCount(word_size) // Number of canonical words after normalization
|
||||
nw := lseq - word_size + 1 // Number of words in a k-mer of length lseq
|
||||
na := obikmer.CanonicalCircularKmerCount(word_size) // Number of canonical words after normalization
|
||||
|
||||
// Case 1: Fewer positions than possible words
|
||||
// Maximum entropy is simply log(nw) since we can have at most nw different words
|
||||
@@ -215,7 +215,8 @@ func LowMaskWorker(kmer_size int, level_max int, threshold float64, mode Masking
|
||||
// *** CIRCULAR NORMALIZATION ***
|
||||
// Convert word to its canonical form (smallest by circular rotation)
|
||||
// This is where "atg", "tga", "gat" all become "atg"
|
||||
words[i] = obikmer.NormalizeInt(word_index, wordSize)
|
||||
// Now using uint64-based NormalizeCircular for better performance
|
||||
words[i] = int(obikmer.NormalizeCircular(uint64(word_index), wordSize))
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
|
||||
1
version.txt
Normal file
1
version.txt
Normal file
@@ -0,0 +1 @@
|
||||
4.4.4
|
||||
Reference in New Issue
Block a user