Improve code search + tests
Co-Authored-By: Qiang Zhou <zhouqiang.loaded@bytedance.com> Co-authored-by: theodoruszq <theodoruszq@gmail.com> Signed-off-by: Thomas Miceli <tho.miceli@gmail.com>
This commit is contained in:
14
.github/workflows/go.yml
vendored
14
.github/workflows/go.yml
vendored
@@ -83,6 +83,18 @@ jobs:
|
|||||||
--health-interval 10s
|
--health-interval 10s
|
||||||
--health-timeout 5s
|
--health-timeout 5s
|
||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:latest
|
||||||
|
ports:
|
||||||
|
- 47700:7700
|
||||||
|
env:
|
||||||
|
MEILI_NO_ANALYTICS: true
|
||||||
|
MEILI_ENV: development
|
||||||
|
options: >-
|
||||||
|
--health-cmd "curl -sf http://localhost:7700/health"
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
@@ -94,6 +106,8 @@ jobs:
|
|||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: make test TEST_DB_TYPE=${{ matrix.database }}
|
run: make test TEST_DB_TYPE=${{ matrix.database }}
|
||||||
|
env:
|
||||||
|
OG_TEST_MEILI_HOST: http://localhost:47700
|
||||||
|
|
||||||
test:
|
test:
|
||||||
name: Test
|
name: Test
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
"github.com/blevesearch/bleve/v2"
|
"github.com/blevesearch/bleve/v2"
|
||||||
"github.com/blevesearch/bleve/v2/analysis/analyzer/custom"
|
"github.com/blevesearch/bleve/v2/analysis/analyzer/custom"
|
||||||
"github.com/blevesearch/bleve/v2/analysis/token/camelcase"
|
"github.com/blevesearch/bleve/v2/analysis/token/camelcase"
|
||||||
|
"github.com/blevesearch/bleve/v2/analysis/token/length"
|
||||||
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
|
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
|
||||||
"github.com/blevesearch/bleve/v2/analysis/token/unicodenorm"
|
"github.com/blevesearch/bleve/v2/analysis/token/unicodenorm"
|
||||||
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||||
@@ -56,14 +57,9 @@ func (i *BleveIndexer) open() (bleve.Index, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
docMapping := bleve.NewDocumentMapping()
|
|
||||||
docMapping.AddFieldMappingsAt("GistID", bleve.NewNumericFieldMapping())
|
|
||||||
docMapping.AddFieldMappingsAt("UserID", bleve.NewNumericFieldMapping())
|
|
||||||
docMapping.AddFieldMappingsAt("Visibility", bleve.NewNumericFieldMapping())
|
|
||||||
docMapping.AddFieldMappingsAt("Content", bleve.NewTextFieldMapping())
|
|
||||||
|
|
||||||
mapping := bleve.NewIndexMapping()
|
mapping := bleve.NewIndexMapping()
|
||||||
|
|
||||||
|
// Token filters
|
||||||
if err = mapping.AddCustomTokenFilter("unicodeNormalize", map[string]any{
|
if err = mapping.AddCustomTokenFilter("unicodeNormalize", map[string]any{
|
||||||
"type": unicodenorm.Name,
|
"type": unicodenorm.Name,
|
||||||
"form": unicodenorm.NFC,
|
"form": unicodenorm.NFC,
|
||||||
@@ -71,16 +67,74 @@ func (i *BleveIndexer) open() (bleve.Index, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = mapping.AddCustomAnalyzer("gistAnalyser", map[string]interface{}{
|
if err = mapping.AddCustomTokenFilter("lengthMin2", map[string]interface{}{
|
||||||
"type": custom.Name,
|
"type": length.Name,
|
||||||
"char_filters": []string{},
|
"min": 2.0,
|
||||||
"tokenizer": unicode.Name,
|
|
||||||
"token_filters": []string{"unicodeNormalize", camelcase.Name, lowercase.Name},
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
docMapping.DefaultAnalyzer = "gistAnalyser"
|
// Analyzer: split mode (camelCase splitting for partial search)
|
||||||
|
// "CPUCard" -> ["cpu", "card"]
|
||||||
|
if err = mapping.AddCustomAnalyzer("codeSplit", map[string]interface{}{
|
||||||
|
"type": custom.Name,
|
||||||
|
"char_filters": []string{},
|
||||||
|
"tokenizer": unicode.Name,
|
||||||
|
"token_filters": []string{"unicodeNormalize", camelcase.Name, lowercase.Name, "lengthMin2"},
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Analyzer: exact mode (no camelCase splitting for full-word search)
|
||||||
|
// "CPUCard" -> ["cpucard"]
|
||||||
|
if err = mapping.AddCustomAnalyzer("codeExact", map[string]interface{}{
|
||||||
|
"type": custom.Name,
|
||||||
|
"char_filters": []string{},
|
||||||
|
"tokenizer": unicode.Name,
|
||||||
|
"token_filters": []string{"unicodeNormalize", lowercase.Name},
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Analyzer: keyword with lowercase (for Languages - single token, no splitting)
|
||||||
|
if err = mapping.AddCustomAnalyzer("lowercaseKeyword", map[string]interface{}{
|
||||||
|
"type": custom.Name,
|
||||||
|
"char_filters": []string{},
|
||||||
|
"tokenizer": "single",
|
||||||
|
"token_filters": []string{lowercase.Name},
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Document mapping
|
||||||
|
docMapping := bleve.NewDocumentMapping()
|
||||||
|
docMapping.AddFieldMappingsAt("GistID", bleve.NewNumericFieldMapping())
|
||||||
|
docMapping.AddFieldMappingsAt("UserID", bleve.NewNumericFieldMapping())
|
||||||
|
docMapping.AddFieldMappingsAt("Visibility", bleve.NewNumericFieldMapping())
|
||||||
|
|
||||||
|
// Content: dual indexing (exact + split)
|
||||||
|
// "Content" uses the property name so Bleve resolves its analyzer correctly
|
||||||
|
contentExact := bleve.NewTextFieldMapping()
|
||||||
|
contentExact.Name = "Content"
|
||||||
|
contentExact.Analyzer = "codeExact"
|
||||||
|
contentExact.Store = false
|
||||||
|
contentExact.IncludeTermVectors = true
|
||||||
|
|
||||||
|
contentSplit := bleve.NewTextFieldMapping()
|
||||||
|
contentSplit.Name = "ContentSplit"
|
||||||
|
contentSplit.Analyzer = "codeSplit"
|
||||||
|
contentSplit.Store = false
|
||||||
|
contentSplit.IncludeTermVectors = true
|
||||||
|
|
||||||
|
docMapping.AddFieldMappingsAt("Content", contentExact, contentSplit)
|
||||||
|
|
||||||
|
// Languages: keyword analyzer (preserves as single token)
|
||||||
|
languageFieldMapping := bleve.NewTextFieldMapping()
|
||||||
|
languageFieldMapping.Analyzer = "lowercaseKeyword"
|
||||||
|
docMapping.AddFieldMappingsAt("Languages", languageFieldMapping)
|
||||||
|
|
||||||
|
// All other text fields use codeSplit as default
|
||||||
|
docMapping.DefaultAnalyzer = "codeSplit"
|
||||||
mapping.DefaultMapping = docMapping
|
mapping.DefaultMapping = docMapping
|
||||||
|
|
||||||
return bleve.New(i.path, mapping)
|
return bleve.New(i.path, mapping)
|
||||||
@@ -154,23 +208,74 @@ func (i *BleveIndexer) Search(metadata SearchGistMetadata, userId uint, page int
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Exact+fuzzy query factory: exact match is boosted so it ranks above fuzzy-only matches
|
// Query factory for text fields: exact match boosted + match query + prefix
|
||||||
factoryFuzzyQuery := func(field, value string) query.Query {
|
factoryTextQuery := func(field, value string) query.Query {
|
||||||
exact := bleve.NewMatchPhraseQuery(value)
|
exact := bleve.NewMatchPhraseQuery(value)
|
||||||
exact.SetField(field)
|
exact.SetField(field)
|
||||||
exact.SetBoost(2.0)
|
exact.SetBoost(2.0)
|
||||||
|
|
||||||
fuzzy := bleve.NewMatchQuery(value)
|
fuzzy := bleve.NewMatchQuery(value)
|
||||||
fuzzy.SetField(field)
|
fuzzy.SetField(field)
|
||||||
fuzzy.SetFuzziness(2)
|
fuzzy.SetFuzziness(1)
|
||||||
|
fuzzy.SetOperator(query.MatchQueryOperatorAnd)
|
||||||
|
|
||||||
return bleve.NewDisjunctionQuery(exact, fuzzy)
|
queries := []query.Query{exact, fuzzy}
|
||||||
|
|
||||||
|
if len([]rune(value)) >= 2 {
|
||||||
|
prefix := bleve.NewPrefixQuery(strings.ToLower(value))
|
||||||
|
prefix.SetField(field)
|
||||||
|
prefix.SetBoost(1.5)
|
||||||
|
queries = append(queries, prefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Exact+fuzzy search
|
if len([]rune(value)) >= 4 {
|
||||||
addFuzzy := func(field, value string) {
|
wildcard := bleve.NewWildcardQuery("*" + strings.ToLower(value) + "*")
|
||||||
|
wildcard.SetField(field)
|
||||||
|
wildcard.SetBoost(0.5)
|
||||||
|
queries = append(queries, wildcard)
|
||||||
|
}
|
||||||
|
|
||||||
|
return bleve.NewDisjunctionQuery(queries...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query factory for Content: searches both exact (Content) and split (ContentSplit) fields
|
||||||
|
factoryContentQuery := func(value string) query.Query {
|
||||||
|
// Exact field (no camelCase split): matches "cpucard"
|
||||||
|
exactMatch := bleve.NewMatchQuery(value)
|
||||||
|
exactMatch.SetField("Content")
|
||||||
|
exactMatch.SetOperator(query.MatchQueryOperatorAnd)
|
||||||
|
exactMatch.SetBoost(2.0)
|
||||||
|
|
||||||
|
// Split field (camelCase split): matches "cpu", "card"
|
||||||
|
splitMatch := bleve.NewMatchQuery(value)
|
||||||
|
splitMatch.SetField("ContentSplit")
|
||||||
|
splitMatch.SetFuzziness(1)
|
||||||
|
splitMatch.SetOperator(query.MatchQueryOperatorAnd)
|
||||||
|
splitMatch.SetBoost(1.0)
|
||||||
|
|
||||||
|
queries := []query.Query{exactMatch, splitMatch}
|
||||||
|
|
||||||
|
if len([]rune(value)) >= 2 {
|
||||||
|
prefix := bleve.NewPrefixQuery(strings.ToLower(value))
|
||||||
|
prefix.SetField("Content")
|
||||||
|
prefix.SetBoost(1.5)
|
||||||
|
queries = append(queries, prefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len([]rune(value)) >= 4 {
|
||||||
|
wildcard := bleve.NewWildcardQuery("*" + strings.ToLower(value) + "*")
|
||||||
|
wildcard.SetField("Content")
|
||||||
|
wildcard.SetBoost(0.5)
|
||||||
|
queries = append(queries, wildcard)
|
||||||
|
}
|
||||||
|
|
||||||
|
return bleve.NewDisjunctionQuery(queries...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Text field search
|
||||||
|
addTextQuery := func(field, value string) {
|
||||||
if value != "" && value != "." {
|
if value != "" && value != "." {
|
||||||
indexerQuery = bleve.NewConjunctionQuery(indexerQuery, factoryFuzzyQuery(field, value))
|
indexerQuery = bleve.NewConjunctionQuery(indexerQuery, factoryTextQuery(field, value))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -189,8 +294,10 @@ func (i *BleveIndexer) Search(metadata SearchGistMetadata, userId uint, page int
|
|||||||
|
|
||||||
buildFieldQuery := func(field, value string) query.Query {
|
buildFieldQuery := func(field, value string) query.Query {
|
||||||
switch field {
|
switch field {
|
||||||
case "Title", "Description", "Filenames", "Content":
|
case "Content":
|
||||||
return factoryFuzzyQuery(field, value)
|
return factoryContentQuery(value)
|
||||||
|
case "Title", "Description", "Filenames":
|
||||||
|
return factoryTextQuery(field, value)
|
||||||
case "Extensions":
|
case "Extensions":
|
||||||
return factoryQuery(field, "."+value)
|
return factoryQuery(field, "."+value)
|
||||||
default: // Username, Languages, Topics
|
default: // Username, Languages, Topics
|
||||||
@@ -208,13 +315,15 @@ func (i *BleveIndexer) Search(metadata SearchGistMetadata, userId uint, page int
|
|||||||
} else {
|
} else {
|
||||||
// Original behavior: add each metadata field with AND logic
|
// Original behavior: add each metadata field with AND logic
|
||||||
addQuery("Username", metadata.Username)
|
addQuery("Username", metadata.Username)
|
||||||
addFuzzy("Title", metadata.Title)
|
addTextQuery("Title", metadata.Title)
|
||||||
addFuzzy("Description", metadata.Description)
|
addTextQuery("Description", metadata.Description)
|
||||||
addQuery("Extensions", "."+metadata.Extension)
|
addQuery("Extensions", "."+metadata.Extension)
|
||||||
addFuzzy("Filenames", metadata.Filename)
|
addTextQuery("Filenames", metadata.Filename)
|
||||||
addQuery("Languages", metadata.Language)
|
addQuery("Languages", metadata.Language)
|
||||||
addQuery("Topics", metadata.Topic)
|
addQuery("Topics", metadata.Topic)
|
||||||
addFuzzy("Content", metadata.Content)
|
if metadata.Content != "" {
|
||||||
|
indexerQuery = bleve.NewConjunctionQuery(indexerQuery, factoryContentQuery(metadata.Content))
|
||||||
|
}
|
||||||
|
|
||||||
// Handle default search fields from config with OR logic
|
// Handle default search fields from config with OR logic
|
||||||
if metadata.Default != "" {
|
if metadata.Default != "" {
|
||||||
|
|||||||
@@ -4,33 +4,31 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
// setupBleveIndexer creates a new BleveIndexer for testing
|
// setupBleveIndexer creates a new BleveIndexer for testing
|
||||||
func setupBleveIndexer(t *testing.T) (*BleveIndexer, func()) {
|
func setupBleveIndexer(t *testing.T) (Indexer, func()) {
|
||||||
|
zerolog.SetGlobalLevel(zerolog.Disabled)
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
// Create a temporary directory for the test index
|
|
||||||
tmpDir, err := os.MkdirTemp("", "bleve-test-*")
|
tmpDir, err := os.MkdirTemp("", "bleve-test-*")
|
||||||
if err != nil {
|
require.NoError(t, err)
|
||||||
t.Fatalf("Failed to create temp directory: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
indexPath := filepath.Join(tmpDir, "test.index")
|
indexPath := filepath.Join(tmpDir, "test.index")
|
||||||
indexer := NewBleveIndexer(indexPath)
|
indexer := NewBleveIndexer(indexPath)
|
||||||
|
|
||||||
// Initialize the indexer
|
|
||||||
err = indexer.Init()
|
err = indexer.Init()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
os.RemoveAll(tmpDir)
|
os.RemoveAll(tmpDir)
|
||||||
t.Fatalf("Failed to initialize BleveIndexer: %v", err)
|
t.Fatalf("Failed to initialize BleveIndexer: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store in the global atomicIndexer since Add/Remove use it
|
|
||||||
var idx Indexer = indexer
|
var idx Indexer = indexer
|
||||||
atomicIndexer.Store(&idx)
|
atomicIndexer.Store(&idx)
|
||||||
|
|
||||||
// Return cleanup function
|
|
||||||
cleanup := func() {
|
cleanup := func() {
|
||||||
atomicIndexer.Store(nil)
|
atomicIndexer.Store(nil)
|
||||||
indexer.Close()
|
indexer.Close()
|
||||||
@@ -40,124 +38,50 @@ func setupBleveIndexer(t *testing.T) (*BleveIndexer, func()) {
|
|||||||
return indexer, cleanup
|
return indexer, cleanup
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBleveIndexerAddGist(t *testing.T) {
|
func TestBleveAddAndSearch(t *testing.T) { testAddAndSearch(t, setupBleveIndexer) }
|
||||||
indexer, cleanup := setupBleveIndexer(t)
|
func TestBleveAccessControl(t *testing.T) { testAccessControl(t, setupBleveIndexer) }
|
||||||
defer cleanup()
|
func TestBleveMetadataFilters(t *testing.T) { testMetadataFilters(t, setupBleveIndexer) }
|
||||||
|
func TestBleveAllFieldSearch(t *testing.T) { testAllFieldSearch(t, setupBleveIndexer) }
|
||||||
|
func TestBleveFuzzySearch(t *testing.T) { testFuzzySearch(t, setupBleveIndexer) }
|
||||||
|
func TestBleveContentSearch(t *testing.T) { testContentSearch(t, setupBleveIndexer) }
|
||||||
|
func TestBlevePagination(t *testing.T) { testPagination(t, setupBleveIndexer) }
|
||||||
|
func TestBleveLanguageFacets(t *testing.T) { testLanguageFacets(t, setupBleveIndexer) }
|
||||||
|
func TestBleveWildcardSearch(t *testing.T) { testWildcardSearch(t, setupBleveIndexer) }
|
||||||
|
func TestBleveMetadataOnlySearch(t *testing.T) { testMetadataOnlySearch(t, setupBleveIndexer) }
|
||||||
|
func TestBleveTitleFuzzySearch(t *testing.T) { testTitleFuzzySearch(t, setupBleveIndexer) }
|
||||||
|
func TestBleveMultiLanguageFacets(t *testing.T) { testMultiLanguageFacets(t, setupBleveIndexer) }
|
||||||
|
|
||||||
testIndexerAddGist(t, indexer)
|
func TestBlevePersistence(t *testing.T) {
|
||||||
}
|
tmpDir, err := os.MkdirTemp("", "bleve-persist-test-*")
|
||||||
|
require.NoError(t, err)
|
||||||
func TestBleveIndexerAllFieldSearch(t *testing.T) {
|
|
||||||
indexer, cleanup := setupBleveIndexer(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
testIndexerAllFieldSearch(t, indexer)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBleveIndexerFuzzySearch(t *testing.T) {
|
|
||||||
indexer, cleanup := setupBleveIndexer(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
testIndexerFuzzySearch(t, indexer)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBleveIndexerSearchBasic(t *testing.T) {
|
|
||||||
indexer, cleanup := setupBleveIndexer(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
testIndexerSearchBasic(t, indexer)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBleveIndexerPagination(t *testing.T) {
|
|
||||||
indexer, cleanup := setupBleveIndexer(t)
|
|
||||||
defer cleanup()
|
|
||||||
|
|
||||||
testIndexerPagination(t, indexer)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestBleveIndexerInitAndClose tests Bleve-specific initialization and closing
|
|
||||||
func TestBleveIndexerInitAndClose(t *testing.T) {
|
|
||||||
tmpDir, err := os.MkdirTemp("", "bleve-init-test-*")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Failed to create temp directory: %v", err)
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(tmpDir)
|
defer os.RemoveAll(tmpDir)
|
||||||
|
|
||||||
indexPath := filepath.Join(tmpDir, "test.index")
|
indexPath := filepath.Join(tmpDir, "test.index")
|
||||||
indexer := NewBleveIndexer(indexPath)
|
|
||||||
|
|
||||||
// Test initialization
|
// Create and populate index
|
||||||
err = indexer.Init()
|
indexer1 := NewBleveIndexer(indexPath)
|
||||||
if err != nil {
|
require.NoError(t, indexer1.Init())
|
||||||
t.Fatalf("Failed to initialize BleveIndexer: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if indexer.index == nil {
|
var idx Indexer = indexer1
|
||||||
t.Fatal("Expected index to be initialized, got nil")
|
atomicIndexer.Store(&idx)
|
||||||
}
|
|
||||||
|
|
||||||
// Test closing
|
g := newGist(1, 1, 0, "persistent data survives restart")
|
||||||
indexer.Close()
|
require.NoError(t, indexer1.Add(g))
|
||||||
|
|
||||||
// Test reopening the same index
|
indexer1.Close()
|
||||||
|
atomicIndexer.Store(nil)
|
||||||
|
|
||||||
|
// Reopen at same path
|
||||||
indexer2 := NewBleveIndexer(indexPath)
|
indexer2 := NewBleveIndexer(indexPath)
|
||||||
err = indexer2.Init()
|
require.NoError(t, indexer2.Init())
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Failed to reopen BleveIndexer: %v", err)
|
|
||||||
}
|
|
||||||
defer indexer2.Close()
|
defer indexer2.Close()
|
||||||
|
|
||||||
if indexer2.index == nil {
|
idx = indexer2
|
||||||
t.Fatal("Expected reopened index to be initialized, got nil")
|
atomicIndexer.Store(&idx)
|
||||||
}
|
defer atomicIndexer.Store(nil)
|
||||||
}
|
|
||||||
|
|
||||||
// TestBleveIndexerUnicodeSearch tests that Unicode content can be indexed and searched
|
ids, total, _, err := indexer2.Search(SearchGistMetadata{Content: "persistent"}, 1, 1)
|
||||||
func TestBleveIndexerUnicodeSearch(t *testing.T) {
|
require.NoError(t, err)
|
||||||
indexer, cleanup := setupBleveIndexer(t)
|
require.Equal(t, uint64(1), total, "data should survive close+reopen")
|
||||||
defer cleanup()
|
require.Equal(t, uint(1), ids[0])
|
||||||
|
|
||||||
// Add a gist with Unicode content
|
|
||||||
gist := &Gist{
|
|
||||||
GistID: 100,
|
|
||||||
UserID: 100,
|
|
||||||
Visibility: 0,
|
|
||||||
Username: "testuser",
|
|
||||||
Title: "Unicode Test",
|
|
||||||
Description: "Descrition with Unicode characters: Café résumé naive",
|
|
||||||
Content: "Hello world with unicode characters: café résumé naïve",
|
|
||||||
Filenames: []string{"test.txt"},
|
|
||||||
Extensions: []string{".txt"},
|
|
||||||
Languages: []string{"Text"},
|
|
||||||
Topics: []string{"unicode"},
|
|
||||||
CreatedAt: 1234567890,
|
|
||||||
UpdatedAt: 1234567890,
|
|
||||||
}
|
|
||||||
|
|
||||||
err := indexer.Add(gist)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Failed to add gist: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Search for unicode content
|
|
||||||
gistIDs, total, _, err := indexer.Search(SearchGistMetadata{All: "café"}, 100, 1)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Search failed: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if total == 0 {
|
|
||||||
t.Skip("Unicode search may require specific index configuration")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
found := false
|
|
||||||
for _, id := range gistIDs {
|
|
||||||
if id == 100 {
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found {
|
|
||||||
t.Log("Unicode gist not found in search results, but other results were returned")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
|
||||||
"github.com/meilisearch/meilisearch-go"
|
"github.com/meilisearch/meilisearch-go"
|
||||||
"github.com/rs/zerolog/log"
|
"github.com/rs/zerolog/log"
|
||||||
@@ -51,10 +52,7 @@ func (i *MeiliIndexer) open() (meilisearch.IndexManager, error) {
|
|||||||
i.client = meilisearch.New(i.host, meilisearch.WithAPIKey(i.apikey))
|
i.client = meilisearch.New(i.host, meilisearch.WithAPIKey(i.apikey))
|
||||||
indexResult, err := i.client.GetIndex(i.indexName)
|
indexResult, err := i.client.GetIndex(i.indexName)
|
||||||
|
|
||||||
if indexResult != nil && err == nil {
|
if indexResult == nil || err != nil {
|
||||||
return indexResult.IndexManager, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = i.client.CreateIndex(&meilisearch.IndexConfig{
|
_, err = i.client.CreateIndex(&meilisearch.IndexConfig{
|
||||||
Uid: i.indexName,
|
Uid: i.indexName,
|
||||||
PrimaryKey: "GistID",
|
PrimaryKey: "GistID",
|
||||||
@@ -62,12 +60,17 @@ func (i *MeiliIndexer) open() (meilisearch.IndexManager, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
_, _ = i.client.Index(i.indexName).UpdateSettings(&meilisearch.Settings{
|
_, _ = i.client.Index(i.indexName).UpdateSettings(&meilisearch.Settings{
|
||||||
FilterableAttributes: []string{"GistID", "UserID", "Visibility", "Username", "Title", "Description", "Filenames", "Extensions", "Languages", "Topics"},
|
FilterableAttributes: []string{"GistID", "UserID", "Visibility", "Username", "Extensions", "Languages", "Topics"},
|
||||||
DisplayedAttributes: []string{"GistID"},
|
SearchableAttributes: []string{"Content", "ContentSplit", "Username", "Title", "Description", "Filenames", "Extensions", "Languages", "Topics"},
|
||||||
SearchableAttributes: []string{"Content", "Username", "Title", "Description", "Filenames", "Extensions", "Languages", "Topics"},
|
RankingRules: []string{"words", "typo", "proximity", "attribute", "sort", "exactness"},
|
||||||
RankingRules: []string{"words"},
|
TypoTolerance: &meilisearch.TypoTolerance{
|
||||||
|
Enabled: true,
|
||||||
|
DisableOnNumbers: true,
|
||||||
|
MinWordSizeForTypos: meilisearch.MinWordSizeForTypos{OneTypo: 4, TwoTypos: 10},
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
return i.client.Index(i.indexName), nil
|
return i.client.Index(i.indexName), nil
|
||||||
@@ -96,12 +99,21 @@ func (i *MeiliIndexer) Close() {
|
|||||||
i.client = nil
|
i.client = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type meiliGist struct {
|
||||||
|
Gist
|
||||||
|
ContentSplit string
|
||||||
|
}
|
||||||
|
|
||||||
func (i *MeiliIndexer) Add(gist *Gist) error {
|
func (i *MeiliIndexer) Add(gist *Gist) error {
|
||||||
if gist == nil {
|
if gist == nil {
|
||||||
return errors.New("failed to add nil gist to index")
|
return errors.New("failed to add nil gist to index")
|
||||||
}
|
}
|
||||||
|
doc := &meiliGist{
|
||||||
|
Gist: *gist,
|
||||||
|
ContentSplit: splitCamelCase(gist.Content),
|
||||||
|
}
|
||||||
primaryKey := "GistID"
|
primaryKey := "GistID"
|
||||||
_, err := (*atomicIndexer.Load()).(*MeiliIndexer).index.AddDocuments(gist, &meilisearch.DocumentOptions{PrimaryKey: &primaryKey})
|
_, err := (*atomicIndexer.Load()).(*MeiliIndexer).index.AddDocuments(doc, &meilisearch.DocumentOptions{PrimaryKey: &primaryKey})
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -116,7 +128,8 @@ func (i *MeiliIndexer) Search(queryMetadata SearchGistMetadata, userId uint, pag
|
|||||||
Limit: 11,
|
Limit: 11,
|
||||||
AttributesToRetrieve: []string{"GistID", "Languages"},
|
AttributesToRetrieve: []string{"GistID", "Languages"},
|
||||||
Facets: []string{"Languages"},
|
Facets: []string{"Languages"},
|
||||||
AttributesToSearchOn: []string{"Content"},
|
AttributesToSearchOn: []string{"Content", "ContentSplit"},
|
||||||
|
MatchingStrategy: meilisearch.All,
|
||||||
}
|
}
|
||||||
|
|
||||||
var filters []string
|
var filters []string
|
||||||
@@ -127,21 +140,24 @@ func (i *MeiliIndexer) Search(queryMetadata SearchGistMetadata, userId uint, pag
|
|||||||
filters = append(filters, fmt.Sprintf("%s = \"%s\"", field, escapeFilterValue(value)))
|
filters = append(filters, fmt.Sprintf("%s = \"%s\"", field, escapeFilterValue(value)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
var query string
|
||||||
|
if queryMetadata.All != "" {
|
||||||
|
query = queryMetadata.All
|
||||||
|
searchRequest.AttributesToSearchOn = append(AllSearchFields, "ContentSplit")
|
||||||
|
} else {
|
||||||
|
// Exact-match fields stay as filters
|
||||||
addFilter("Username", queryMetadata.Username)
|
addFilter("Username", queryMetadata.Username)
|
||||||
addFilter("Title", queryMetadata.Title)
|
if queryMetadata.Extension != "" {
|
||||||
addFilter("Description", queryMetadata.Description)
|
ext := queryMetadata.Extension
|
||||||
addFilter("Filenames", queryMetadata.Filename)
|
if !strings.HasPrefix(ext, ".") {
|
||||||
addFilter("Extensions", queryMetadata.Extension)
|
ext = "." + ext
|
||||||
|
}
|
||||||
|
addFilter("Extensions", ext)
|
||||||
|
}
|
||||||
addFilter("Languages", queryMetadata.Language)
|
addFilter("Languages", queryMetadata.Language)
|
||||||
addFilter("Topics", queryMetadata.Topic)
|
addFilter("Topics", queryMetadata.Topic)
|
||||||
|
|
||||||
if len(filters) > 0 {
|
if queryMetadata.Default != "" {
|
||||||
searchRequest.Filter = strings.Join(filters, " AND ")
|
|
||||||
}
|
|
||||||
|
|
||||||
// build query string from provided metadata. Prefer `All`, then `Default`, fall back to `Content`.
|
|
||||||
query := queryMetadata.All
|
|
||||||
if query == "" && queryMetadata.Default != "" {
|
|
||||||
query = queryMetadata.Default
|
query = queryMetadata.Default
|
||||||
var fields []string
|
var fields []string
|
||||||
for _, f := range strings.Split(config.C.SearchDefault, ",") {
|
for _, f := range strings.Split(config.C.SearchDefault, ",") {
|
||||||
@@ -155,10 +171,45 @@ func (i *MeiliIndexer) Search(queryMetadata SearchGistMetadata, userId uint, pag
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(fields) > 0 {
|
if len(fields) > 0 {
|
||||||
|
for _, f := range fields {
|
||||||
|
if f == "Content" {
|
||||||
|
fields = append(fields, "ContentSplit")
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
searchRequest.AttributesToSearchOn = fields
|
searchRequest.AttributesToSearchOn = fields
|
||||||
}
|
}
|
||||||
} else if query == "" {
|
} else {
|
||||||
query = queryMetadata.Content
|
// Fuzzy-matchable fields become part of the query
|
||||||
|
var queryParts []string
|
||||||
|
var searchFields []string
|
||||||
|
|
||||||
|
if queryMetadata.Content != "" {
|
||||||
|
queryParts = append(queryParts, queryMetadata.Content)
|
||||||
|
searchFields = append(searchFields, "Content", "ContentSplit")
|
||||||
|
}
|
||||||
|
if queryMetadata.Title != "" {
|
||||||
|
queryParts = append(queryParts, queryMetadata.Title)
|
||||||
|
searchFields = append(searchFields, "Title")
|
||||||
|
}
|
||||||
|
if queryMetadata.Description != "" {
|
||||||
|
queryParts = append(queryParts, queryMetadata.Description)
|
||||||
|
searchFields = append(searchFields, "Description")
|
||||||
|
}
|
||||||
|
if queryMetadata.Filename != "" {
|
||||||
|
queryParts = append(queryParts, queryMetadata.Filename)
|
||||||
|
searchFields = append(searchFields, "Filenames")
|
||||||
|
}
|
||||||
|
|
||||||
|
query = strings.Join(queryParts, " ")
|
||||||
|
if len(searchFields) > 0 {
|
||||||
|
searchRequest.AttributesToSearchOn = searchFields
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(filters) > 0 {
|
||||||
|
searchRequest.Filter = strings.Join(filters, " AND ")
|
||||||
}
|
}
|
||||||
|
|
||||||
response, err := (*atomicIndexer.Load()).(*MeiliIndexer).index.Search(query, searchRequest)
|
response, err := (*atomicIndexer.Load()).(*MeiliIndexer).index.Search(query, searchRequest)
|
||||||
@@ -166,7 +217,6 @@ func (i *MeiliIndexer) Search(queryMetadata SearchGistMetadata, userId uint, pag
|
|||||||
log.Error().Err(err).Msg("Failed to search Meilisearch index")
|
log.Error().Err(err).Msg("Failed to search Meilisearch index")
|
||||||
return nil, 0, nil, err
|
return nil, 0, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
gistIds := make([]uint, 0, len(response.Hits))
|
gistIds := make([]uint, 0, len(response.Hits))
|
||||||
for _, hit := range response.Hits {
|
for _, hit := range response.Hits {
|
||||||
if gistIDRaw, ok := hit["GistID"]; ok {
|
if gistIDRaw, ok := hit["GistID"]; ok {
|
||||||
@@ -182,7 +232,9 @@ func (i *MeiliIndexer) Search(queryMetadata SearchGistMetadata, userId uint, pag
|
|||||||
var facetDist map[string]map[string]int
|
var facetDist map[string]map[string]int
|
||||||
if err := json.Unmarshal(response.FacetDistribution, &facetDist); err == nil {
|
if err := json.Unmarshal(response.FacetDistribution, &facetDist); err == nil {
|
||||||
if facets, ok := facetDist["Languages"]; ok {
|
if facets, ok := facetDist["Languages"]; ok {
|
||||||
languageCounts = facets
|
for lang, count := range facets {
|
||||||
|
languageCounts[strings.ToLower(lang)] += count
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -190,6 +242,30 @@ func (i *MeiliIndexer) Search(queryMetadata SearchGistMetadata, userId uint, pag
|
|||||||
return gistIds, uint64(response.EstimatedTotalHits), languageCounts, nil
|
return gistIds, uint64(response.EstimatedTotalHits), languageCounts, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func splitCamelCase(text string) string {
|
||||||
|
var result strings.Builder
|
||||||
|
runes := []rune(text)
|
||||||
|
for i := 0; i < len(runes); i++ {
|
||||||
|
r := runes[i]
|
||||||
|
if i > 0 {
|
||||||
|
prev := runes[i-1]
|
||||||
|
if unicode.IsUpper(r) {
|
||||||
|
if unicode.IsLower(prev) || unicode.IsDigit(prev) {
|
||||||
|
result.WriteRune(' ')
|
||||||
|
} else if unicode.IsUpper(prev) && i+1 < len(runes) && unicode.IsLower(runes[i+1]) {
|
||||||
|
result.WriteRune(' ')
|
||||||
|
}
|
||||||
|
} else if unicode.IsDigit(r) && !unicode.IsDigit(prev) {
|
||||||
|
result.WriteRune(' ')
|
||||||
|
} else if !unicode.IsDigit(r) && unicode.IsDigit(prev) {
|
||||||
|
result.WriteRune(' ')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.WriteRune(r)
|
||||||
|
}
|
||||||
|
return result.String()
|
||||||
|
}
|
||||||
|
|
||||||
func escapeFilterValue(value string) string {
|
func escapeFilterValue(value string) string {
|
||||||
escaped := strings.ReplaceAll(value, "\\", "\\\\")
|
escaped := strings.ReplaceAll(value, "\\", "\\\\")
|
||||||
escaped = strings.ReplaceAll(escaped, "\"", "\\\"")
|
escaped = strings.ReplaceAll(escaped, "\"", "\\\"")
|
||||||
|
|||||||
88
internal/index/meilisearch_test.go
Normal file
88
internal/index/meilisearch_test.go
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
package index
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/meilisearch/meilisearch-go"
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
)
|
||||||
|
|
||||||
|
// syncMeiliIndexer wraps MeiliIndexer to make Add/Remove synchronous for tests.
|
||||||
|
type syncMeiliIndexer struct {
|
||||||
|
*MeiliIndexer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *syncMeiliIndexer) Add(gist *Gist) error {
|
||||||
|
if gist == nil {
|
||||||
|
return fmt.Errorf("failed to add nil gist to index")
|
||||||
|
}
|
||||||
|
doc := &meiliGist{
|
||||||
|
Gist: *gist,
|
||||||
|
ContentSplit: splitCamelCase(gist.Content),
|
||||||
|
}
|
||||||
|
primaryKey := "GistID"
|
||||||
|
taskInfo, err := s.index.AddDocuments(doc, &meilisearch.DocumentOptions{PrimaryKey: &primaryKey})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = s.client.WaitForTask(taskInfo.TaskUID, 0)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *syncMeiliIndexer) Remove(gistID uint) error {
|
||||||
|
taskInfo, err := s.index.DeleteDocument(strconv.Itoa(int(gistID)), nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = s.client.WaitForTask(taskInfo.TaskUID, 0)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupMeiliIndexer(t *testing.T) (Indexer, func()) {
|
||||||
|
zerolog.SetGlobalLevel(zerolog.Disabled)
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
host := os.Getenv("OG_TEST_MEILI_HOST")
|
||||||
|
if host == "" {
|
||||||
|
host = "http://localhost:47700"
|
||||||
|
}
|
||||||
|
apiKey := os.Getenv("OG_TEST_MEILI_API_KEY")
|
||||||
|
|
||||||
|
indexName := fmt.Sprintf("test_%d", os.Getpid())
|
||||||
|
|
||||||
|
inner := NewMeiliIndexer(host, apiKey, indexName)
|
||||||
|
err := inner.Init()
|
||||||
|
if err != nil {
|
||||||
|
t.Skipf("MeiliSearch not available at %s: %v", host, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
wrapped := &syncMeiliIndexer{MeiliIndexer: inner}
|
||||||
|
|
||||||
|
// Store the inner MeiliIndexer in atomicIndexer, because MeiliIndexer.Search
|
||||||
|
// type-asserts the global to *MeiliIndexer.
|
||||||
|
var idx Indexer = inner
|
||||||
|
atomicIndexer.Store(&idx)
|
||||||
|
|
||||||
|
cleanup := func() {
|
||||||
|
atomicIndexer.Store(nil)
|
||||||
|
inner.Reset()
|
||||||
|
inner.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
return wrapped, cleanup
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMeiliAddAndSearch(t *testing.T) { testAddAndSearch(t, setupMeiliIndexer) }
|
||||||
|
func TestMeiliAccessControl(t *testing.T) { testAccessControl(t, setupMeiliIndexer) }
|
||||||
|
func TestMeiliMetadataFilters(t *testing.T) { testMetadataFilters(t, setupMeiliIndexer) }
|
||||||
|
func TestMeiliAllFieldSearch(t *testing.T) { testAllFieldSearch(t, setupMeiliIndexer) }
|
||||||
|
func TestMeiliFuzzySearch(t *testing.T) { testFuzzySearch(t, setupMeiliIndexer) }
|
||||||
|
func TestMeiliContentSearch(t *testing.T) { testContentSearch(t, setupMeiliIndexer) }
|
||||||
|
func TestMeiliPagination(t *testing.T) { testPagination(t, setupMeiliIndexer) }
|
||||||
|
func TestMeiliLanguageFacets(t *testing.T) { testLanguageFacets(t, setupMeiliIndexer) }
|
||||||
|
func TestMeiliMetadataOnlySearch(t *testing.T) { testMetadataOnlySearch(t, setupMeiliIndexer) }
|
||||||
|
func TestMeiliTitleFuzzySearch(t *testing.T) { testTitleFuzzySearch(t, setupMeiliIndexer) }
|
||||||
|
func TestMeiliMultiLanguageFacets(t *testing.T) { testMultiLanguageFacets(t, setupMeiliIndexer) }
|
||||||
277
test.md
277
test.md
@@ -1,277 +0,0 @@
|
|||||||
---
|
|
||||||
description: Testing handler and middleware
|
|
||||||
slug: /testing
|
|
||||||
sidebar_position: 13
|
|
||||||
---
|
|
||||||
|
|
||||||
# Testing
|
|
||||||
|
|
||||||
## Testing Handler
|
|
||||||
|
|
||||||
`GET` `/users/:id`
|
|
||||||
|
|
||||||
Handler below retrieves user by id from the database. If user is not found it returns
|
|
||||||
`404` error with a message.
|
|
||||||
|
|
||||||
### CreateUser
|
|
||||||
|
|
||||||
`POST` `/users`
|
|
||||||
|
|
||||||
- Accepts JSON payload
|
|
||||||
- On success `201 - Created`
|
|
||||||
- On error `500 - Internal Server Error`
|
|
||||||
|
|
||||||
### GetUser
|
|
||||||
|
|
||||||
`GET` `/users/:email`
|
|
||||||
|
|
||||||
- On success `200 - OK`
|
|
||||||
- On error `404 - Not Found` if user is not found otherwise `500 - Internal Server Error`
|
|
||||||
|
|
||||||
`handler.go`
|
|
||||||
|
|
||||||
```go
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"github.com/labstack/echo/v5"
|
|
||||||
)
|
|
||||||
|
|
||||||
type (
|
|
||||||
User struct {
|
|
||||||
Name string `json:"name" form:"name"`
|
|
||||||
Email string `json:"email" form:"email"`
|
|
||||||
}
|
|
||||||
handler struct {
|
|
||||||
db map[string]*User
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
func (h *handler) createUser(c *echo.Context) error {
|
|
||||||
u := new(User)
|
|
||||||
if err := c.Bind(u); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return c.JSON(http.StatusCreated, u)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *handler) getUser(c *echo.Context) error {
|
|
||||||
email := c.Param("email")
|
|
||||||
user := h.db[email]
|
|
||||||
if user == nil {
|
|
||||||
return echo.NewHTTPError(http.StatusNotFound, "user not found")
|
|
||||||
}
|
|
||||||
return c.JSON(http.StatusOK, user)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
`handler_test.go`
|
|
||||||
|
|
||||||
```go
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
"net/http/httptest"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/labstack/echo/v5"
|
|
||||||
"github.com/labstack/echo/v5/echotest"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
mockDB = map[string]*User{
|
|
||||||
"jon@labstack.com": &User{"Jon Snow", "jon@labstack.com"},
|
|
||||||
}
|
|
||||||
userJSON = `{"name":"Jon Snow","email":"jon@labstack.com"}`
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestCreateUser(t *testing.T) {
|
|
||||||
// Setup
|
|
||||||
e := echo.New()
|
|
||||||
req := httptest.NewRequest(http.MethodPost, "/", strings.NewReader(userJSON))
|
|
||||||
req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON)
|
|
||||||
|
|
||||||
rec := httptest.NewRecorder()
|
|
||||||
c := e.NewContext(req, rec)
|
|
||||||
|
|
||||||
h := &controller{mockDB}
|
|
||||||
|
|
||||||
// Assertions
|
|
||||||
if assert.NoError(t, h.createUser(c)) {
|
|
||||||
assert.Equal(t, http.StatusCreated, rec.Code)
|
|
||||||
assert.Equal(t, userJSON, rec.Body.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Same test as above but using `echotest` package helpers
|
|
||||||
func TestCreateUserWithEchoTest(t *testing.T) {
|
|
||||||
c, rec := echotest.ContextConfig{
|
|
||||||
Headers: map[string][]string{
|
|
||||||
echo.HeaderContentType: {echo.MIMEApplicationJSON},
|
|
||||||
},
|
|
||||||
JSONBody: []byte(`{"name":"Jon Snow","email":"jon@labstack.com"}`),
|
|
||||||
}.ToContextRecorder(t)
|
|
||||||
|
|
||||||
h := &controller{mockDB}
|
|
||||||
|
|
||||||
// Assertions
|
|
||||||
if assert.NoError(t, h.createUser(c)) {
|
|
||||||
assert.Equal(t, http.StatusCreated, rec.Code)
|
|
||||||
assert.Equal(t, userJSON+"\n", rec.Body.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Same test as above but even shorter
|
|
||||||
func TestCreateUserWithEchoTest2(t *testing.T) {
|
|
||||||
h := &controller{mockDB}
|
|
||||||
|
|
||||||
rec := echotest.ContextConfig{
|
|
||||||
Headers: map[string][]string{
|
|
||||||
echo.HeaderContentType: {echo.MIMEApplicationJSON},
|
|
||||||
},
|
|
||||||
JSONBody: []byte(`{"name":"Jon Snow","email":"jon@labstack.com"}`),
|
|
||||||
}.ServeWithHandler(t, h.createUser)
|
|
||||||
|
|
||||||
assert.Equal(t, http.StatusCreated, rec.Code)
|
|
||||||
assert.Equal(t, userJSON+"\n", rec.Body.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGetUser(t *testing.T) {
|
|
||||||
// Setup
|
|
||||||
e := echo.New()
|
|
||||||
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
|
||||||
rec := httptest.NewRecorder()
|
|
||||||
c := e.NewContext(req, rec)
|
|
||||||
|
|
||||||
c.SetPath("/users/:email")
|
|
||||||
c.SetPathValues(echo.PathValues{
|
|
||||||
{Name: "email", Value: "jon@labstack.com"},
|
|
||||||
})
|
|
||||||
h := &controller{mockDB}
|
|
||||||
|
|
||||||
// Assertions
|
|
||||||
if assert.NoError(t, h.getUser(c)) {
|
|
||||||
assert.Equal(t, http.StatusOK, rec.Code)
|
|
||||||
assert.Equal(t, userJSON, rec.Body.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGetUserWithEchoTest(t *testing.T) {
|
|
||||||
c, rec := echotest.ContextConfig{
|
|
||||||
PathValues: echo.PathValues{
|
|
||||||
{Name: "email", Value: "jon@labstack.com"},
|
|
||||||
},
|
|
||||||
Headers: map[string][]string{
|
|
||||||
echo.HeaderContentType: {echo.MIMEApplicationJSON},
|
|
||||||
},
|
|
||||||
JSONBody: []byte(userJSON),
|
|
||||||
}.ToContextRecorder(t)
|
|
||||||
|
|
||||||
h := &controller{mockDB}
|
|
||||||
|
|
||||||
// Assertions
|
|
||||||
if assert.NoError(t, h.getUser(c)) {
|
|
||||||
assert.Equal(t, http.StatusOK, rec.Code)
|
|
||||||
assert.Equal(t, userJSON+"\n", rec.Body.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using Form Payload
|
|
||||||
|
|
||||||
```go
|
|
||||||
// import "net/url"
|
|
||||||
f := make(url.Values)
|
|
||||||
f.Set("name", "Jon Snow")
|
|
||||||
f.Set("email", "jon@labstack.com")
|
|
||||||
req := httptest.NewRequest(http.MethodPost, "/", strings.NewReader(f.Encode()))
|
|
||||||
req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationForm)
|
|
||||||
```
|
|
||||||
|
|
||||||
Multipart form payload:
|
|
||||||
```go
|
|
||||||
func TestContext_MultipartForm(t *testing.T) {
|
|
||||||
testConf := echotest.ContextConfig{
|
|
||||||
MultipartForm: &echotest.MultipartForm{
|
|
||||||
Fields: map[string]string{
|
|
||||||
"key": "value",
|
|
||||||
},
|
|
||||||
Files: []echotest.MultipartFormFile{
|
|
||||||
{
|
|
||||||
Fieldname: "file",
|
|
||||||
Filename: "test.json",
|
|
||||||
Content: echotest.LoadBytes(t, "testdata/test.json"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
c := testConf.ToContext(t)
|
|
||||||
|
|
||||||
assert.Equal(t, "value", c.FormValue("key"))
|
|
||||||
assert.Equal(t, http.MethodPost, c.Request().Method)
|
|
||||||
assert.Equal(t, true, strings.HasPrefix(c.Request().Header.Get(echo.HeaderContentType), "multipart/form-data; boundary="))
|
|
||||||
|
|
||||||
fv, err := c.FormFile("file")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
assert.Equal(t, "test.json", fv.Filename)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Setting Path Params
|
|
||||||
|
|
||||||
```go
|
|
||||||
c.SetPathValues(echo.PathValues{
|
|
||||||
{Name: "id", Value: "1"},
|
|
||||||
{Name: "email", Value: "jon@labstack.com"},
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
### Setting Query Params
|
|
||||||
|
|
||||||
```go
|
|
||||||
// import "net/url"
|
|
||||||
q := make(url.Values)
|
|
||||||
q.Set("email", "jon@labstack.com")
|
|
||||||
req := httptest.NewRequest(http.MethodGet, "/?"+q.Encode(), nil)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Testing Middleware
|
|
||||||
|
|
||||||
```go
|
|
||||||
func TestCreateUserWithEchoTest2(t *testing.T) {
|
|
||||||
handler := func(c *echo.Context) error {
|
|
||||||
return c.JSON(http.StatusTeapot, fmt.Sprintf("email: %s", c.Param("email")))
|
|
||||||
}
|
|
||||||
middleware := func(next echo.HandlerFunc) echo.HandlerFunc {
|
|
||||||
return func(c *echo.Context) error {
|
|
||||||
c.Set("user_id", int64(1234))
|
|
||||||
return next(c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
c, rec := echotest.ContextConfig{
|
|
||||||
PathValues: echo.PathValues{{Name: "email", Value: "jon@labstack.com"}},
|
|
||||||
}.ToContextRecorder(t)
|
|
||||||
|
|
||||||
err := middleware(handler)(c)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
// check that middleware set the value
|
|
||||||
userID, err := echo.ContextGet[int64](c, "user_id")
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, int64(1234), userID)
|
|
||||||
|
|
||||||
// check that handler returned the correct response
|
|
||||||
assert.Equal(t, http.StatusTeapot, rec.Code)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
For now you can look into built-in middleware [test cases](https://github.com/labstack/echo/tree/master/middleware).
|
|
||||||
158
test2.md
158
test2.md
@@ -1,158 +0,0 @@
|
|||||||
---
|
|
||||||
description: Testing handler and middleware
|
|
||||||
slug: /testing
|
|
||||||
sidebar_position: 13
|
|
||||||
---
|
|
||||||
|
|
||||||
# Testing
|
|
||||||
|
|
||||||
## Testing Handler
|
|
||||||
|
|
||||||
`GET` `/users/:id`
|
|
||||||
|
|
||||||
Handler below retrieves user by id from the database. If user is not found it returns
|
|
||||||
`404` error with a message.
|
|
||||||
|
|
||||||
### CreateUser
|
|
||||||
|
|
||||||
`POST` `/users`
|
|
||||||
|
|
||||||
- Accepts JSON payload
|
|
||||||
- On success `201 - Created`
|
|
||||||
- On error `500 - Internal Server Error`
|
|
||||||
|
|
||||||
### GetUser
|
|
||||||
|
|
||||||
`GET` `/users/:email`
|
|
||||||
|
|
||||||
- On success `200 - OK`
|
|
||||||
- On error `404 - Not Found` if user is not found otherwise `500 - Internal Server Error`
|
|
||||||
|
|
||||||
`handler.go`
|
|
||||||
|
|
||||||
```go
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"github.com/labstack/echo/v4"
|
|
||||||
)
|
|
||||||
|
|
||||||
type (
|
|
||||||
User struct {
|
|
||||||
Name string `json:"name" form:"name"`
|
|
||||||
Email string `json:"email" form:"email"`
|
|
||||||
}
|
|
||||||
handler struct {
|
|
||||||
db map[string]*User
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
func (h *handler) createUser(c echo.Context) error {
|
|
||||||
u := new(User)
|
|
||||||
if err := c.Bind(u); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return c.JSON(http.StatusCreated, u)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *handler) getUser(c echo.Context) error {
|
|
||||||
email := c.Param("email")
|
|
||||||
user := h.db[email]
|
|
||||||
if user == nil {
|
|
||||||
return echo.NewHTTPError(http.StatusNotFound, "user not found")
|
|
||||||
}
|
|
||||||
return c.JSON(http.StatusOK, user)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
`handler_test.go`
|
|
||||||
|
|
||||||
```go
|
|
||||||
package handler
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
"net/http/httptest"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/labstack/echo/v4"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
mockDB = map[string]*User{
|
|
||||||
"jon@labstack.com": &User{"Jon Snow", "jon@labstack.com"},
|
|
||||||
}
|
|
||||||
userJSON = `{"name":"Jon Snow","email":"jon@labstack.com"}`
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestCreateUser(t *testing.T) {
|
|
||||||
// Setup
|
|
||||||
e := echo.New()
|
|
||||||
req := httptest.NewRequest(http.MethodPost, "/", strings.NewReader(userJSON))
|
|
||||||
req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON)
|
|
||||||
rec := httptest.NewRecorder()
|
|
||||||
c := e.NewContext(req, rec)
|
|
||||||
h := &handler{mockDB}
|
|
||||||
|
|
||||||
// Assertions
|
|
||||||
if assert.NoError(t, h.createUser(c)) {
|
|
||||||
assert.Equal(t, http.StatusCreated, rec.Code)
|
|
||||||
assert.Equal(t, userJSON, rec.Body.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGetUser(t *testing.T) {
|
|
||||||
// Setup
|
|
||||||
e := echo.New()
|
|
||||||
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
|
||||||
rec := httptest.NewRecorder()
|
|
||||||
c := e.NewContext(req, rec)
|
|
||||||
c.SetPath("/users/:email")
|
|
||||||
c.SetParamNames("email")
|
|
||||||
c.SetParamValues("jon@labstack.com")
|
|
||||||
h := &handler{mockDB}
|
|
||||||
|
|
||||||
// Assertions
|
|
||||||
if assert.NoError(t, h.getUser(c)) {
|
|
||||||
assert.Equal(t, http.StatusOK, rec.Code)
|
|
||||||
assert.Equal(t, userJSON, rec.Body.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using Form Payload
|
|
||||||
|
|
||||||
```go
|
|
||||||
// import "net/url"
|
|
||||||
f := make(url.Values)
|
|
||||||
f.Set("name", "Jon Snow")
|
|
||||||
f.Set("email", "jon@labstack.com")
|
|
||||||
req := httptest.NewRequest(http.MethodPost, "/", strings.NewReader(f.Encode()))
|
|
||||||
req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationForm)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Setting Path Params
|
|
||||||
|
|
||||||
```go
|
|
||||||
c.SetParamNames("id", "email")
|
|
||||||
c.SetParamValues("1", "jon@labstack.com")
|
|
||||||
```
|
|
||||||
|
|
||||||
### Setting Query Params
|
|
||||||
|
|
||||||
```go
|
|
||||||
// import "net/url"
|
|
||||||
q := make(url.Values)
|
|
||||||
q.Set("email", "jon@labstack.com")
|
|
||||||
req := httptest.NewRequest(http.MethodGet, "/?"+q.Encode(), nil)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Testing Middleware
|
|
||||||
|
|
||||||
*TBD*
|
|
||||||
|
|
||||||
For now you can look into built-in middleware [test cases](https://github.com/labstack/echo/tree/master/middleware).
|
|
||||||
Reference in New Issue
Block a user