diff --git a/docs/vectors.md b/docs/vectors.md
index 439631e5c..085b208be 100644
--- a/docs/vectors.md
+++ b/docs/vectors.md
@@ -81,6 +81,26 @@ if err != nil {
 fmt.Println(searchResult.Hits)
 ```
 
+## Querying with Filters (v2.4.3+)
+```go
+searchRequest := NewSearchRequest(query.NewMatchNoneQuery())
+
+filterQuery := NewTermQuery("hello")
+
+searchRequest.AddKNNWithFilter(
+    "vec",                                      // vector field name
+    []float32{10,11,12,13,14,15,16,17,18,19},   // query vector (same dims)
+    5,                                          // k
+    0,                                          // boost
+    filterQuery,                                // filter query
+)
+searchResult, err := index.Search(searchRequest)
+if err != nil {
+    panic(err)
+}
+fmt.Println(searchResult.Hits)
+```
+
 ## Setup Instructions
 
 * Using `cmake` is a recommended approach by FAISS authors.
diff --git a/go.mod b/go.mod
index f3c66a39f..7da6dbbde 100644
--- a/go.mod
+++ b/go.mod
@@ -5,13 +5,13 @@ go 1.21
 require (
 	github.com/RoaringBitmap/roaring v1.9.3
 	github.com/bits-and-blooms/bitset v1.12.0
-	github.com/blevesearch/bleve_index_api v1.1.11
+	github.com/blevesearch/bleve_index_api v1.1.12
 	github.com/blevesearch/geo v0.1.20
 	github.com/blevesearch/go-metrics v0.0.0-20201227073835-cf1acfcdf475
 	github.com/blevesearch/go-porterstemmer v1.0.3
 	github.com/blevesearch/goleveldb v1.0.1
 	github.com/blevesearch/gtreap v0.1.1
-	github.com/blevesearch/scorch_segment_api/v2 v2.2.15
+	github.com/blevesearch/scorch_segment_api/v2 v2.2.16
 	github.com/blevesearch/segment v0.9.1
 	github.com/blevesearch/snowball v0.6.1
 	github.com/blevesearch/snowballstem v0.9.0
@@ -23,7 +23,7 @@ require (
 	github.com/blevesearch/zapx/v13 v13.3.10
 	github.com/blevesearch/zapx/v14 v14.3.10
 	github.com/blevesearch/zapx/v15 v15.3.13
-	github.com/blevesearch/zapx/v16 v16.1.6-0.20240904144721-dbcb3c000a29
+	github.com/blevesearch/zapx/v16 v16.1.6-0.20240909182401-e148470cefbe
 	github.com/couchbase/moss v0.2.0
 	github.com/golang/protobuf v1.3.2
 	github.com/spf13/cobra v1.7.0
@@ -32,7 +32,7 @@ require (
 )
 
 require (
-	github.com/blevesearch/go-faiss v1.0.21 // indirect
+	github.com/blevesearch/go-faiss v1.0.22-0.20240909180832-35a1ff78ead4 // indirect
 	github.com/blevesearch/mmap-go v1.0.4 // indirect
 	github.com/couchbase/ghistogram v0.1.0 // indirect
 	github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 // indirect
diff --git a/go.sum b/go.sum
index 00a0e0ae5..660c832bb 100644
--- a/go.sum
+++ b/go.sum
@@ -2,12 +2,12 @@ github.com/RoaringBitmap/roaring v1.9.3 h1:t4EbC5qQwnisr5PrP9nt0IRhRTb9gMUgQF4t4
 github.com/RoaringBitmap/roaring v1.9.3/go.mod h1:6AXUsoIEzDTFFQCe1RbGA6uFONMhvejWj5rqITANK90=
 github.com/bits-and-blooms/bitset v1.12.0 h1:U/q1fAF7xXRhFCrhROzIfffYnu+dlS38vCZtmFVPHmA=
 github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
-github.com/blevesearch/bleve_index_api v1.1.11 h1:OTNpRnxPWFIhMSgBUBlkD7RVWYrfsojtQeACb8tGGpw=
-github.com/blevesearch/bleve_index_api v1.1.11/go.mod h1:PbcwjIcRmjhGbkS/lJCpfgVSMROV6TRubGGAODaK1W8=
+github.com/blevesearch/bleve_index_api v1.1.12 h1:P4bw9/G/5rulOF7SJ9l4FsDoo7UFJ+5kexNy1RXfegY=
+github.com/blevesearch/bleve_index_api v1.1.12/go.mod h1:PbcwjIcRmjhGbkS/lJCpfgVSMROV6TRubGGAODaK1W8=
 github.com/blevesearch/geo v0.1.20 h1:paaSpu2Ewh/tn5DKn/FB5SzvH0EWupxHEIwbCk/QPqM=
 github.com/blevesearch/geo v0.1.20/go.mod h1:DVG2QjwHNMFmjo+ZgzrIq2sfCh6rIHzy9d9d0B59I6w=
-github.com/blevesearch/go-faiss v1.0.21 h1:0PdlpvqTC9uP67TBNBVSw+aLoFqo8oulghQ3R9NZ4Pk=
-github.com/blevesearch/go-faiss v1.0.21/go.mod h1:OMGQwOaRRYxrmeNdMrXJPvVx8gBnvE5RYrr0BahNnkk=
+github.com/blevesearch/go-faiss v1.0.22-0.20240909180832-35a1ff78ead4 h1:riy8XP3UIBeVjMhsq1r1aGfjvTf3aPp2PuXxdiw9P4s=
+github.com/blevesearch/go-faiss v1.0.22-0.20240909180832-35a1ff78ead4/go.mod h1:OMGQwOaRRYxrmeNdMrXJPvVx8gBnvE5RYrr0BahNnkk=
 github.com/blevesearch/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:kDy+zgJFJJoJYBvdfBSiZYBbdsUL0XcjHYWezpQBGPA=
 github.com/blevesearch/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:9eJDeqxJ3E7WnLebQUlPD7ZjSce7AnDb9vjGmMCbD0A=
 github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo=
@@ -19,8 +19,8 @@ github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgY
 github.com/blevesearch/mmap-go v1.0.2/go.mod h1:ol2qBqYaOUsGdm7aRMRrYGgPvnwLe6Y+7LMvAB5IbSA=
 github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc=
 github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
-github.com/blevesearch/scorch_segment_api/v2 v2.2.15 h1:prV17iU/o+A8FiZi9MXmqbagd8I0bCqM7OKUYPbnb5Y=
-github.com/blevesearch/scorch_segment_api/v2 v2.2.15/go.mod h1:db0cmP03bPNadXrCDuVkKLV6ywFSiRgPFT1YVrestBc=
+github.com/blevesearch/scorch_segment_api/v2 v2.2.16 h1:uGvKVvG7zvSxCwcm4/ehBa9cCEuZVE+/zvrSl57QUVY=
+github.com/blevesearch/scorch_segment_api/v2 v2.2.16/go.mod h1:VF5oHVbIFTu+znY1v30GjSpT5+9YFs9dV2hjvuh34F0=
 github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
 github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
 github.com/blevesearch/snowball v0.6.1 h1:cDYjn/NCH+wwt2UdehaLpr2e4BwLIjN4V/TdLsL+B5A=
@@ -43,8 +43,8 @@ github.com/blevesearch/zapx/v14 v14.3.10 h1:SG6xlsL+W6YjhX5N3aEiL/2tcWh3DO75Bnz7
 github.com/blevesearch/zapx/v14 v14.3.10/go.mod h1:qqyuR0u230jN1yMmE4FIAuCxmahRQEOehF78m6oTgns=
 github.com/blevesearch/zapx/v15 v15.3.13 h1:6EkfaZiPlAxqXz0neniq35my6S48QI94W/wyhnpDHHQ=
 github.com/blevesearch/zapx/v15 v15.3.13/go.mod h1:Turk/TNRKj9es7ZpKK95PS7f6D44Y7fAFy8F4LXQtGg=
-github.com/blevesearch/zapx/v16 v16.1.6-0.20240904144721-dbcb3c000a29 h1:SpzoLVHa6DlbtS1o5WZu1qBzE15nQHKOViZkJHd0+XQ=
-github.com/blevesearch/zapx/v16 v16.1.6-0.20240904144721-dbcb3c000a29/go.mod h1:9WSiNE0zM1mu9cuJqq8peTXxccVHGIcIzeG/QKX13fc=
+github.com/blevesearch/zapx/v16 v16.1.6-0.20240909182401-e148470cefbe h1:S1rCvhrU2HqDrRtogYgM52rT5px7o2zFIB3Yo+JPFOU=
+github.com/blevesearch/zapx/v16 v16.1.6-0.20240909182401-e148470cefbe/go.mod h1:x9Kg015zbkSXxmE7F+0qeGxpeHJBwkDuxosrrDxYltU=
 github.com/couchbase/ghistogram v0.1.0 h1:b95QcQTCzjTUocDXp/uMgSNQi8oj1tGwnJ4bODWZnps=
 github.com/couchbase/ghistogram v0.1.0/go.mod h1:s1Jhy76zqfEecpNWJfWUiKZookAFaiGOEoyzgHt9i7k=
 github.com/couchbase/moss v0.2.0 h1:VCYrMzFwEryyhRSeI+/b3tRBSeTpi/8gn5Kf6dxqn+o=
diff --git a/index/scorch/optimize_knn.go b/index/scorch/optimize_knn.go
index 6b10a207c..8be394a12 100644
--- a/index/scorch/optimize_knn.go
+++ b/index/scorch/optimize_knn.go
@@ -34,6 +34,8 @@ type OptimizeVR struct {
 	totalCost uint64
 	// maps field to vector readers
 	vrs map[string][]*IndexSnapshotVectorReader
+	// if at least one of the vector readers requires filtered kNN.
+	requiresFiltering bool
 }
 
 // This setting _MUST_ only be changed during init and not after.
@@ -62,6 +64,8 @@ func (o *OptimizeVR) Finish() error {
 	var errorsM sync.Mutex
 	var errors []error
 
+	snapshotGlobalDocNums := o.snapshot.globalDocNums()
+
 	defer o.invokeSearcherEndCallback()
 
 	wg := sync.WaitGroup{}
@@ -77,7 +81,8 @@ func (o *OptimizeVR) Finish() error {
 					wg.Done()
 				}()
 				for field, vrs := range o.vrs {
-					vecIndex, err := segment.InterpretVectorIndex(field, origSeg.deleted)
+					vecIndex, err := segment.InterpretVectorIndex(field,
+						o.requiresFiltering, origSeg.deleted)
 					if err != nil {
 						errorsM.Lock()
 						errors = append(errors, err)
@@ -89,9 +94,34 @@ func (o *OptimizeVR) Finish() error {
 					vectorIndexSize := vecIndex.Size()
 					origSeg.cachedMeta.updateMeta(field, vectorIndexSize)
 					for _, vr := range vrs {
+						eligibleVectorInternalIDs := vr.getEligibleDocIDs()
+						if snapshotGlobalDocNums != nil {
+							// Only the eligible documents belonging to this segment
+							// will get filtered out.
+							// There is no way to determine which doc belongs to which segment
+							eligibleVectorInternalIDs.And(snapshotGlobalDocNums[index])
+						}
+
+						eligibleLocalDocNums := make([]uint64,
+							eligibleVectorInternalIDs.Stats().Cardinality)
+						// get the (segment-)local document numbers
+						for i, docNum := range eligibleVectorInternalIDs.ToArray() {
+							localDocNum := o.snapshot.localDocNumFromGlobal(index,
+								uint64(docNum))
+							eligibleLocalDocNums[i] = localDocNum
+						}
+
+						var pl segment_api.VecPostingsList
+						var err error
 						// for each VR, populate postings list and iterators
 						// by passing the obtained vector index and getting similar vectors.
-						pl, err := vecIndex.Search(vr.vector, vr.k, vr.searchParams)
+						if vr.eligibleDocIDs != nil && len(vr.eligibleDocIDs) > 0 {
+							pl, err = vecIndex.SearchWithFilter(vr.vector, vr.k,
+								eligibleLocalDocNums, vr.searchParams)
+						} else {
+							pl, err = vecIndex.Search(vr.vector, vr.k, vr.searchParams)
+						}
+
 						if err != nil {
 							errorsM.Lock()
 							errors = append(errors, err)
@@ -140,6 +170,9 @@ func (s *IndexSnapshotVectorReader) VectorOptimize(ctx context.Context,
 		return octx, nil
 	}
 	o.ctx = ctx
+	if !o.requiresFiltering {
+		o.requiresFiltering = len(s.eligibleDocIDs) > 0
+	}
 
 	if o.snapshot != s.snapshot {
 		o.invokeSearcherEndCallback()
diff --git a/index/scorch/snapshot_index.go b/index/scorch/snapshot_index.go
index f0e7ae1cf..79840a41f 100644
--- a/index/scorch/snapshot_index.go
+++ b/index/scorch/snapshot_index.go
@@ -471,16 +471,44 @@ func (is *IndexSnapshot) Document(id string) (rv index.Document, err error) {
 	return rvd, nil
 }
 
+// In a multi-segment index, each document has:
+// 1. a local docnum - local to the segment
+// 2. a global docnum - unique identifier across the index
+// This function returns the segment index(the segment in which the docnum is present)
+// and local docnum of a document.
 func (is *IndexSnapshot) segmentIndexAndLocalDocNumFromGlobal(docNum uint64) (int, uint64) {
 	segmentIndex := sort.Search(len(is.offsets),
 		func(x int) bool {
 			return is.offsets[x] > docNum
 		}) - 1
 
-	localDocNum := docNum - is.offsets[segmentIndex]
+	localDocNum := is.localDocNumFromGlobal(segmentIndex, docNum)
 	return int(segmentIndex), localDocNum
 }
 
+// This function returns the local docnum, given the segment index and global docnum
+func (is *IndexSnapshot) localDocNumFromGlobal(segmentIndex int, docNum uint64) uint64 {
+	return docNum - is.offsets[segmentIndex]
+}
+
+// Function to return a mapping of the segment index to the live global	doc nums
+// in the segment of the specified index snapshot.
+func (is *IndexSnapshot) globalDocNums() map[int]*roaring.Bitmap {
+	if len(is.segment) == 0 {
+		return nil
+	}
+
+	segmentIndexGlobalDocNums := make(map[int]*roaring.Bitmap)
+
+	for i := range is.segment {
+		segmentIndexGlobalDocNums[i] = roaring.NewBitmap()
+		for _, localDocNum := range is.segment[i].DocNumbersLive().ToArray() {
+			segmentIndexGlobalDocNums[i].Add(localDocNum + uint32(is.offsets[i]))
+		}
+	}
+	return segmentIndexGlobalDocNums
+}
+
 func (is *IndexSnapshot) ExternalID(id index.IndexInternalID) (string, error) {
 	docNum, err := docInternalToNumber(id)
 	if err != nil {
diff --git a/index/scorch/snapshot_index_vr.go b/index/scorch/snapshot_index_vr.go
index 30e03dcba..05b5167fd 100644
--- a/index/scorch/snapshot_index_vr.go
+++ b/index/scorch/snapshot_index_vr.go
@@ -24,6 +24,7 @@ import (
 	"fmt"
 	"reflect"
 
+	"github.com/RoaringBitmap/roaring"
 	"github.com/blevesearch/bleve/v2/size"
 	index "github.com/blevesearch/bleve_index_api"
 	segment_api "github.com/blevesearch/scorch_segment_api/v2"
@@ -51,6 +52,24 @@ type IndexSnapshotVectorReader struct {
 	ctx           context.Context
 
 	searchParams json.RawMessage
+
+	// The following fields are only applicable for vector readers which will
+	// process kNN queries.
+	eligibleDocIDs []index.IndexInternalID
+}
+
+// Function to convert the internal IDs of the eligible documents to a type suitable
+// for addition to a bitmap.
+// Useful to have the eligible doc IDs in a bitmap to leverage the fast intersection
+// (AND) operations. Eg. finding the eligible doc IDs present in a segment.
+func (i *IndexSnapshotVectorReader) getEligibleDocIDs() *roaring.Bitmap {
+	res := roaring.NewBitmap()
+	// converts the doc IDs to uint32 and returns
+	for _, eligibleDocInternalID := range i.eligibleDocIDs {
+		internalDocID, _ := docInternalToNumber(index.IndexInternalID(eligibleDocInternalID))
+		res.Add(uint32(internalDocID))
+	}
+	return res
 }
 
 func (i *IndexSnapshotVectorReader) Size() int {
@@ -108,7 +127,17 @@ func (i *IndexSnapshotVectorReader) Advance(ID index.IndexInternalID,
 	preAlloced *index.VectorDoc) (*index.VectorDoc, error) {
 
 	if i.currPosting != nil && bytes.Compare(i.currID, ID) >= 0 {
-		i2, err := i.snapshot.VectorReader(i.ctx, i.vector, i.field, i.k, i.searchParams)
+		var i2 index.VectorReader
+		var err error
+
+		if len(i.eligibleDocIDs) > 0 {
+			i2, err = i.snapshot.VectorReaderWithFilter(i.ctx, i.vector, i.field,
+				i.k, i.searchParams, i.eligibleDocIDs)
+		} else {
+			i2, err = i.snapshot.VectorReader(i.ctx, i.vector, i.field, i.k,
+				i.searchParams)
+		}
+
 		if err != nil {
 			return nil, err
 		}
diff --git a/index/scorch/snapshot_vector_index.go b/index/scorch/snapshot_vector_index.go
index 70546d4e3..bcb05024d 100644
--- a/index/scorch/snapshot_vector_index.go
+++ b/index/scorch/snapshot_vector_index.go
@@ -48,3 +48,29 @@ func (is *IndexSnapshot) VectorReader(ctx context.Context, vector []float32,
 
 	return rv, nil
 }
+
+func (is *IndexSnapshot) VectorReaderWithFilter(ctx context.Context, vector []float32,
+	field string, k int64, searchParams json.RawMessage,
+	filterIDs []index.IndexInternalID) (
+	index.VectorReader, error) {
+
+	rv := &IndexSnapshotVectorReader{
+		vector:         vector,
+		field:          field,
+		k:              k,
+		snapshot:       is,
+		searchParams:   searchParams,
+		eligibleDocIDs: filterIDs,
+	}
+
+	if rv.postings == nil {
+		rv.postings = make([]segment_api.VecPostingsList, len(is.segment))
+	}
+	if rv.iterators == nil {
+		rv.iterators = make([]segment_api.VecPostingsIterator, len(is.segment))
+	}
+
+	// initialize postings and iterators within the OptimizeVR's Finish()
+
+	return rv, nil
+}
diff --git a/search/collector/eligible.go b/search/collector/eligible.go
new file mode 100644
index 000000000..311f8c92b
--- /dev/null
+++ b/search/collector/eligible.go
@@ -0,0 +1,156 @@
+//  Copyright (c) 2024 Couchbase, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// 		http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collector
+
+import (
+	"context"
+	"time"
+
+	"github.com/blevesearch/bleve/v2/search"
+	index "github.com/blevesearch/bleve_index_api"
+)
+
+type EligibleCollector struct {
+	size    int
+	total   uint64
+	took    time.Duration
+	results search.DocumentMatchCollection
+
+	store collectorStore
+}
+
+func NewEligibleCollector(size int) *EligibleCollector {
+	return newEligibleCollector(size)
+}
+
+func newEligibleCollector(size int) *EligibleCollector {
+	// No sort order & skip always 0 since this is only to filter eligible docs.
+	hc := &EligibleCollector{size: size}
+
+	// comparator is a dummy here
+	hc.store = getOptimalCollectorStore(size, 0, func(i, j *search.DocumentMatch) int {
+		return 0
+	})
+
+	return hc
+}
+
+func (hc *EligibleCollector) Collect(ctx context.Context, searcher search.Searcher, reader index.IndexReader) error {
+	startTime := time.Now()
+	var err error
+	var next *search.DocumentMatch
+
+	backingSize := hc.size
+	if backingSize > PreAllocSizeSkipCap {
+		backingSize = PreAllocSizeSkipCap + 1
+	}
+	searchContext := &search.SearchContext{
+		DocumentMatchPool: search.NewDocumentMatchPool(backingSize+searcher.DocumentMatchPoolSize(), 0),
+		Collector:         hc,
+		IndexReader:       reader,
+	}
+
+	dmHandlerMaker := MakeEligibleDocumentMatchHandler
+	if cv := ctx.Value(search.MakeDocumentMatchHandlerKey); cv != nil {
+		dmHandlerMaker = cv.(search.MakeDocumentMatchHandler)
+	}
+	// use the application given builder for making the custom document match
+	// handler and perform callbacks/invocations on the newly made handler.
+	dmHandler, _, err := dmHandlerMaker(searchContext)
+	if err != nil {
+		return err
+	}
+	select {
+	case <-ctx.Done():
+		search.RecordSearchCost(ctx, search.AbortM, 0)
+		return ctx.Err()
+	default:
+		next, err = searcher.Next(searchContext)
+	}
+	for err == nil && next != nil {
+		if hc.total%CheckDoneEvery == 0 {
+			select {
+			case <-ctx.Done():
+				search.RecordSearchCost(ctx, search.AbortM, 0)
+				return ctx.Err()
+			default:
+			}
+		}
+		hc.total++
+
+		err = dmHandler(next)
+		if err != nil {
+			break
+		}
+
+		next, err = searcher.Next(searchContext)
+	}
+	if err != nil {
+		return err
+	}
+
+	// help finalize/flush the results in case
+	// of custom document match handlers.
+	err = dmHandler(nil)
+	if err != nil {
+		return err
+	}
+
+	// compute search duration
+	hc.took = time.Since(startTime)
+
+	// finalize actual results
+	err = hc.finalizeResults(reader)
+	if err != nil {
+		return err
+	}
+	return nil
+}
+
+func (hc *EligibleCollector) finalizeResults(r index.IndexReader) error {
+	var err error
+	hc.results, err = hc.store.Final(0, func(doc *search.DocumentMatch) error {
+		// Adding the results to the store without any modifications since we don't
+		// require the external ID of the filtered hits.
+		return nil
+	})
+	return err
+}
+
+func (hc *EligibleCollector) Results() search.DocumentMatchCollection {
+	return hc.results
+}
+
+func (hc *EligibleCollector) Total() uint64 {
+	return hc.total
+}
+
+// No concept of scoring in the eligible collector.
+func (hc *EligibleCollector) MaxScore() float64 {
+	return 0
+}
+
+func (hc *EligibleCollector) Took() time.Duration {
+	return hc.took
+}
+
+func (hc *EligibleCollector) SetFacetsBuilder(facetsBuilder *search.FacetsBuilder) {
+	// facet unsupported for pre-filtering in KNN search
+}
+
+func (hc *EligibleCollector) FacetResults() search.FacetResults {
+	// facet unsupported for pre-filtering in KNN search
+	return nil
+}
diff --git a/search/collector/heap.go b/search/collector/heap.go
index cd662bcf9..ab068b084 100644
--- a/search/collector/heap.go
+++ b/search/collector/heap.go
@@ -34,6 +34,11 @@ func newStoreHeap(capacity int, compare collectorCompare) *collectStoreHeap {
 	return rv
 }
 
+func (c *collectStoreHeap) Add(doc *search.DocumentMatch) *search.DocumentMatch {
+	c.add(doc)
+	return nil
+}
+
 func (c *collectStoreHeap) AddNotExceedingSize(doc *search.DocumentMatch,
 	size int) *search.DocumentMatch {
 	c.add(doc)
diff --git a/search/collector/list.go b/search/collector/list.go
index f73505e7d..b8b645199 100644
--- a/search/collector/list.go
+++ b/search/collector/list.go
@@ -34,6 +34,11 @@ func newStoreList(capacity int, compare collectorCompare) *collectStoreList {
 	return rv
 }
 
+func (c *collectStoreList) Add(doc *search.DocumentMatch, size int) *search.DocumentMatch {
+	c.results.PushBack(doc)
+	return nil
+}
+
 func (c *collectStoreList) AddNotExceedingSize(doc *search.DocumentMatch, size int) *search.DocumentMatch {
 	c.add(doc)
 	if c.len() > size {
diff --git a/search/collector/slice.go b/search/collector/slice.go
index 07534e693..03b212b0f 100644
--- a/search/collector/slice.go
+++ b/search/collector/slice.go
@@ -29,6 +29,11 @@ func newStoreSlice(capacity int, compare collectorCompare) *collectStoreSlice {
 	return rv
 }
 
+func (c *collectStoreSlice) Add(doc *search.DocumentMatch) *search.DocumentMatch {
+	c.slice = append(c.slice, doc)
+	return nil
+}
+
 func (c *collectStoreSlice) AddNotExceedingSize(doc *search.DocumentMatch,
 	size int) *search.DocumentMatch {
 	c.add(doc)
diff --git a/search/collector/topn.go b/search/collector/topn.go
index fc338f54e..5de473785 100644
--- a/search/collector/topn.go
+++ b/search/collector/topn.go
@@ -33,6 +33,10 @@ func init() {
 }
 
 type collectorStore interface {
+	// Adds a doc to the store without considering size.
+	// Returns nil if the doc was added successfully.
+	Add(doc *search.DocumentMatch) *search.DocumentMatch
+
 	// Add the document, and if the new store size exceeds the provided size
 	// the last element is removed and returned.  If the size has not been
 	// exceeded, nil is returned.
@@ -382,6 +386,27 @@ func (hc *TopNCollector) prepareDocumentMatch(ctx *search.SearchContext,
 	return nil
 }
 
+// Unlike TopNDocHandler, this will not eliminate docs based on score.
+func MakeEligibleDocumentMatchHandler(
+	ctx *search.SearchContext) (search.DocumentMatchHandler, bool, error) {
+
+	var hc *EligibleCollector
+	var ok bool
+
+	if hc, ok = ctx.Collector.(*EligibleCollector); ok {
+		return func(d *search.DocumentMatch) error {
+			if d == nil {
+				return nil
+			}
+
+			// No elements removed from the store here.
+			_ = hc.store.Add(d)
+			return nil
+		}, false, nil
+	}
+	return nil, false, nil
+}
+
 func MakeTopNDocumentMatchHandler(
 	ctx *search.SearchContext) (search.DocumentMatchHandler, bool, error) {
 	var hc *TopNCollector
diff --git a/search/query/knn.go b/search/query/knn.go
index 46eccb2a5..4d105d943 100644
--- a/search/query/knn.go
+++ b/search/query/knn.go
@@ -35,7 +35,9 @@ type KNNQuery struct {
 	BoostVal    *Boost    `json:"boost,omitempty"`
 
 	// see KNNRequest.Params for description
-	Params json.RawMessage `json:"params"`
+	Params        json.RawMessage `json:"params"`
+	FilterQuery   Query           `json:"filter,omitempty"`
+	filterResults []index.IndexInternalID
 }
 
 func NewKNNQuery(vector []float32) *KNNQuery {
@@ -67,6 +69,14 @@ func (q *KNNQuery) SetParams(params json.RawMessage) {
 	q.Params = params
 }
 
+func (q *KNNQuery) SetFilterQuery(f Query) {
+	q.FilterQuery = f
+}
+
+func (q *KNNQuery) SetFilterResults(results []index.IndexInternalID) {
+	q.filterResults = results
+}
+
 func (q *KNNQuery) Searcher(ctx context.Context, i index.IndexReader,
 	m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) {
 	fieldMapping := m.FieldMappingForPath(q.VectorField)
@@ -81,6 +91,8 @@ func (q *KNNQuery) Searcher(ctx context.Context, i index.IndexReader,
 		// normalize the vector
 		q.Vector = mapping.NormalizeVector(q.Vector)
 	}
+
 	return searcher.NewKNNSearcher(ctx, i, m, options, q.VectorField,
-		q.Vector, q.K, q.BoostVal.Value(), similarityMetric, q.Params)
+		q.Vector, q.K, q.BoostVal.Value(), similarityMetric, q.Params,
+		q.filterResults)
 }
diff --git a/search/searcher/search_knn.go b/search/searcher/search_knn.go
index e17bb7a0f..866900d4e 100644
--- a/search/searcher/search_knn.go
+++ b/search/searcher/search_knn.go
@@ -49,11 +49,20 @@ type KNNSearcher struct {
 
 func NewKNNSearcher(ctx context.Context, i index.IndexReader, m mapping.IndexMapping,
 	options search.SearcherOptions, field string, vector []float32, k int64,
-	boost float64, similarityMetric string, searchParams json.RawMessage) (
+	boost float64, similarityMetric string, searchParams json.RawMessage,
+	filterIDs []index.IndexInternalID) (
 	search.Searcher, error) {
 
 	if vr, ok := i.(index.VectorIndexReader); ok {
-		vectorReader, err := vr.VectorReader(ctx, vector, field, k, searchParams)
+		var vectorReader index.VectorReader
+		var err error
+
+		if len(filterIDs) > 0 {
+			vectorReader, err = vr.VectorReaderWithFilter(ctx, vector, field, k,
+				searchParams, filterIDs)
+		} else {
+			vectorReader, err = vr.VectorReader(ctx, vector, field, k, searchParams)
+		}
 		if err != nil {
 			return nil, err
 		}
diff --git a/search_knn.go b/search_knn.go
index 008a3615c..ec9c304b1 100644
--- a/search_knn.go
+++ b/search_knn.go
@@ -87,6 +87,10 @@ type KNNRequest struct {
 	//
 	// Consult go-faiss to know all supported search params
 	Params json.RawMessage `json:"params"`
+
+	// Filter query to use with kNN pre-filtering.
+	// Supports pre-filtering with all existing types of query clauses.
+	FilterQuery query.Query `JSON:"filter,omitempty"`
 }
 
 func (r *SearchRequest) AddKNN(field string, vector []float32, k int64, boost float64) {
@@ -99,6 +103,18 @@ func (r *SearchRequest) AddKNN(field string, vector []float32, k int64, boost fl
 	})
 }
 
+func (r *SearchRequest) AddKNNWithFilter(field string, vector []float32, k int64,
+	boost float64, filterQuery query.Query) {
+	b := query.Boost(boost)
+	r.KNN = append(r.KNN, &KNNRequest{
+		Field:       field,
+		Vector:      vector,
+		K:           k,
+		Boost:       &b,
+		FilterQuery: filterQuery,
+	})
+}
+
 func (r *SearchRequest) AddKNNOperator(operator knnOperator) {
 	r.KNNOperator = operator
 }
@@ -106,6 +122,15 @@ func (r *SearchRequest) AddKNNOperator(operator knnOperator) {
 // UnmarshalJSON deserializes a JSON representation of
 // a SearchRequest
 func (r *SearchRequest) UnmarshalJSON(input []byte) error {
+	type tempKNNReq struct {
+		Field        string          `json:"field"`
+		Vector       []float32       `json:"vector"`
+		VectorBase64 string          `json:"vector_base64"`
+		K            int64           `json:"k"`
+		Boost        *query.Boost    `json:"boost,omitempty"`
+		FilterQuery  json.RawMessage `JSON:"filter,omitempty"`
+	}
+
 	var temp struct {
 		Q                json.RawMessage   `json:"query"`
 		Size             *int              `json:"size"`
@@ -119,7 +144,7 @@ func (r *SearchRequest) UnmarshalJSON(input []byte) error {
 		Score            string            `json:"score"`
 		SearchAfter      []string          `json:"search_after"`
 		SearchBefore     []string          `json:"search_before"`
-		KNN              []*KNNRequest     `json:"knn"`
+		KNN              []*tempKNNReq     `json:"knn"`
 		KNNOperator      knnOperator       `json:"knn_operator"`
 		PreSearchData    json.RawMessage   `json:"pre_search_data"`
 	}
@@ -163,7 +188,21 @@ func (r *SearchRequest) UnmarshalJSON(input []byte) error {
 		r.From = 0
 	}
 
-	r.KNN = temp.KNN
+	r.KNN = make([]*KNNRequest, len(temp.KNN))
+	for i, knnReq := range temp.KNN {
+		r.KNN[i] = &KNNRequest{}
+		r.KNN[i].Field = temp.KNN[i].Field
+		r.KNN[i].Vector = temp.KNN[i].Vector
+		r.KNN[i].VectorBase64 = temp.KNN[i].VectorBase64
+		r.KNN[i].K = temp.KNN[i].K
+		r.KNN[i].Boost = temp.KNN[i].Boost
+		if len(knnReq.FilterQuery) == 0 {
+			// Setting this to nil to avoid ParseQuery() setting it to a match none
+			r.KNN[i].FilterQuery = nil
+		} else {
+			r.KNN[i].FilterQuery, err = query.ParseQuery(knnReq.FilterQuery)
+		}
+	}
 	r.KNNOperator = temp.KNNOperator
 	if r.KNNOperator == "" {
 		r.KNNOperator = knnOperatorOr
@@ -209,7 +248,9 @@ var (
 	knnOperatorOr  = knnOperator("or")
 )
 
-func createKNNQuery(req *SearchRequest) (query.Query, []int64, int64, error) {
+func createKNNQuery(req *SearchRequest, eligibleDocsMap map[int][]index.IndexInternalID,
+	requiresFiltering map[int]bool) (
+	query.Query, []int64, int64, error) {
 	if requestHasKNN(req) {
 		// first perform validation
 		err := validateKNN(req)
@@ -219,12 +260,25 @@ func createKNNQuery(req *SearchRequest) (query.Query, []int64, int64, error) {
 		var subQueries []query.Query
 		kArray := make([]int64, 0, len(req.KNN))
 		sumOfK := int64(0)
-		for _, knn := range req.KNN {
+		for i, knn := range req.KNN {
+			// If it's a filtered kNN but has no eligible filter hits, then
+			// do not run the kNN query.
+			if requiresFiltering[i] && len(eligibleDocsMap[i]) <= 0 {
+				continue
+			}
+
 			knnQuery := query.NewKNNQuery(knn.Vector)
 			knnQuery.SetFieldVal(knn.Field)
 			knnQuery.SetK(knn.K)
 			knnQuery.SetBoost(knn.Boost.Value())
 			knnQuery.SetParams(knn.Params)
+			if len(eligibleDocsMap[i]) > 0 {
+				knnQuery.SetFilterQuery(knn.FilterQuery)
+				filterResults, exists := eligibleDocsMap[i]
+				if exists {
+					knnQuery.SetFilterResults(filterResults)
+				}
+			}
 			subQueries = append(subQueries, knnQuery)
 			kArray = append(kArray, knn.K)
 			sumOfK += knn.K
@@ -303,7 +357,60 @@ func addSortAndFieldsToKNNHits(req *SearchRequest, knnHits []*search.DocumentMat
 }
 
 func (i *indexImpl) runKnnCollector(ctx context.Context, req *SearchRequest, reader index.IndexReader, preSearch bool) ([]*search.DocumentMatch, error) {
-	KNNQuery, kArray, sumOfK, err := createKNNQuery(req)
+	// maps the index of the KNN query in the req to the pre-filter hits aka
+	// eligible docs' internal IDs .
+	filterHitsMap := make(map[int][]index.IndexInternalID)
+	// Indicates if this query requires filtering downstream
+	// No filtering required if it's a match all query/no filters applied.
+	requiresFiltering := make(map[int]bool)
+
+	for idx, knnReq := range req.KNN {
+		// TODO Can use goroutines for this filter query stuff - do it if perf results
+		// show this to be significantly slow otherwise.
+		filterQ := knnReq.FilterQuery
+		if filterQ == nil {
+			requiresFiltering[idx] = false
+			continue
+		}
+
+		if _, ok := filterQ.(*query.MatchAllQuery); ok {
+			requiresFiltering[idx] = false
+			continue
+		}
+
+		if _, ok := filterQ.(*query.MatchNoneQuery); ok {
+			// Filtering required since no hits are eligible.
+			requiresFiltering[idx] = true
+			// a match none query just means none the documents are eligible
+			// hence, we can save on running the query.
+			continue
+		}
+
+		// Applies to all supported types of queries.
+		filterSearcher, _ := filterQ.Searcher(ctx, reader, i.m, search.SearcherOptions{
+			Score: "none", // just want eligible hits --> don't compute scores if not needed
+		})
+		// Using the index doc count to determine collector size since we do not
+		// have an estimate of the number of eligible docs in the index yet.
+		indexDocCount, err := i.DocCount()
+		if err != nil {
+			return nil, err
+		}
+		filterColl := collector.NewEligibleCollector(int(indexDocCount))
+		err = filterColl.Collect(ctx, filterSearcher, reader)
+		if err != nil {
+			return nil, err
+		}
+		filterHits := filterColl.Results()
+		filterHitsMap[idx] = make([]index.IndexInternalID, 0)
+		for _, docMatch := range filterHits {
+			filterHitsMap[idx] = append(filterHitsMap[idx], docMatch.IndexInternalID)
+		}
+		requiresFiltering[idx] = true
+	}
+
+	// Add the filter hits when creating the kNN query
+	KNNQuery, kArray, sumOfK, err := createKNNQuery(req, filterHitsMap, requiresFiltering)
 	if err != nil {
 		return nil, err
 	}
diff --git a/search_knn_test.go b/search_knn_test.go
index 3832e5d67..f7124bddc 100644
--- a/search_knn_test.go
+++ b/search_knn_test.go
@@ -1284,7 +1284,8 @@ func TestKNNOperator(t *testing.T) {
 
 	// Conjunction
 	searchRequest.AddKNNOperator(knnOperatorAnd)
-	conjunction, _, _, err := createKNNQuery(searchRequest)
+	requiresFiltering := make(map[int]bool)
+	conjunction, _, _, err := createKNNQuery(searchRequest, nil, requiresFiltering)
 	if err != nil {
 		t.Fatalf("unexpected error for AND knn operator")
 	}
@@ -1300,7 +1301,7 @@ func TestKNNOperator(t *testing.T) {
 
 	// Disjunction
 	searchRequest.AddKNNOperator(knnOperatorOr)
-	disjunction, _, _, err := createKNNQuery(searchRequest)
+	disjunction, _, _, err := createKNNQuery(searchRequest, nil, requiresFiltering)
 	if err != nil {
 		t.Fatalf("unexpected error for OR knn operator")
 	}
@@ -1316,12 +1317,144 @@ func TestKNNOperator(t *testing.T) {
 
 	// Incorrect operator.
 	searchRequest.AddKNNOperator("bs_op")
-	searchRequest.Query, _, _, err = createKNNQuery(searchRequest)
+	searchRequest.Query, _, _, err = createKNNQuery(searchRequest, nil, requiresFiltering)
 	if err == nil {
 		t.Fatalf("expected error for incorrect knn operator")
 	}
 }
 
+func TestKNNFiltering(t *testing.T) {
+	tmpIndexPath := createTmpIndexPath(t)
+	defer cleanupTmpIndexPath(t, tmpIndexPath)
+
+	const dims = 5
+	getRandomVector := func() []float32 {
+		vec := make([]float32, dims)
+		for i := 0; i < dims; i++ {
+			vec[i] = rand.Float32()
+		}
+		return vec
+	}
+
+	dataset := make([]map[string]interface{}, 0)
+
+	// Indexing just a few docs to populate index.
+	for i := 0; i < 10; i++ {
+		dataset = append(dataset, map[string]interface{}{
+			"type":    "vectorStuff",
+			"content": strconv.Itoa(i + 1000),
+			"vector":  getRandomVector(),
+		})
+	}
+
+	indexMapping := NewIndexMapping()
+	indexMapping.TypeField = "type"
+	indexMapping.DefaultAnalyzer = "en"
+	documentMapping := NewDocumentMapping()
+	indexMapping.AddDocumentMapping("vectorStuff", documentMapping)
+
+	contentFieldMapping := NewTextFieldMapping()
+	contentFieldMapping.Index = true
+	contentFieldMapping.Store = true
+	documentMapping.AddFieldMappingsAt("content", contentFieldMapping)
+
+	vecFieldMapping := mapping.NewVectorFieldMapping()
+	vecFieldMapping.Index = true
+	vecFieldMapping.Dims = 5
+	vecFieldMapping.Similarity = "dot_product"
+	documentMapping.AddFieldMappingsAt("vector", vecFieldMapping)
+
+	index, err := New(tmpIndexPath, indexMapping)
+	if err != nil {
+		t.Fatal(err)
+	}
+	defer func() {
+		err := index.Close()
+		if err != nil {
+			t.Fatal(err)
+		}
+	}()
+
+	batch := index.NewBatch()
+	for i := 0; i < len(dataset); i++ {
+		// the id of term "i" is (i-1000)
+		batch.Index(strconv.Itoa(i), dataset[i])
+	}
+
+	err = index.Batch(batch)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	termQuery := query.NewTermQuery("1004")
+	filterRequest := NewSearchRequest(termQuery)
+	filteredHits, err := index.Search(filterRequest)
+	if err != nil {
+		t.Fatal(err)
+	}
+	filteredDocIDs := make(map[string]struct{})
+	for _, match := range filteredHits.Hits {
+		filteredDocIDs[match.ID] = struct{}{}
+	}
+
+	searchRequest := NewSearchRequest(NewMatchNoneQuery())
+	searchRequest.AddKNNWithFilter("vector", getRandomVector(), 3, 2.0, termQuery)
+	searchRequest.Fields = []string{"content", "vector"}
+
+	res, err := index.Search(searchRequest)
+	if err != nil {
+		t.Fatal(err)
+	}
+	// check if any of the returned results are not part of the filtered hits.
+	for _, match := range res.Hits {
+		if _, exists := filteredDocIDs[match.ID]; !exists {
+			t.Errorf("returned result not present in filtered hits")
+		}
+	}
+
+	// No results should be returned with a match_none filter.
+	searchRequest = NewSearchRequest(NewMatchNoneQuery())
+	searchRequest.AddKNNWithFilter("vector", getRandomVector(), 3, 2.0,
+		NewMatchNoneQuery())
+	res, err = index.Search(searchRequest)
+	if err != nil {
+		t.Fatal(err)
+	}
+	if len(res.Hits) != 0 {
+		t.Errorf("match none filter should return no hits")
+	}
+
+	// Testing with a disjunction query.
+
+	termQuery = query.NewTermQuery("1003")
+	termQuery2 := query.NewTermQuery("1005")
+	disjQuery := query.NewDisjunctionQuery([]query.Query{termQuery, termQuery2})
+	filterRequest = NewSearchRequest(disjQuery)
+	filteredHits, err = index.Search(filterRequest)
+	if err != nil {
+		t.Fatal(err)
+	}
+	filteredDocIDs = make(map[string]struct{})
+	for _, match := range filteredHits.Hits {
+		filteredDocIDs[match.ID] = struct{}{}
+	}
+
+	searchRequest = NewSearchRequest(NewMatchNoneQuery())
+	searchRequest.AddKNNWithFilter("vector", getRandomVector(), 3, 2.0, disjQuery)
+	searchRequest.Fields = []string{"content", "vector"}
+
+	res, err = index.Search(searchRequest)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	for _, match := range res.Hits {
+		if _, exists := filteredDocIDs[match.ID]; !exists {
+			t.Errorf("returned result not present in filtered hits")
+		}
+	}
+}
+
 // -----------------------------------------------------------------------------
 // Test nested vectors
 
@@ -1393,7 +1526,8 @@ func TestNestedVectors(t *testing.T) {
 
 	for _, test := range tests {
 		searchReq := NewSearchRequest(query.NewMatchNoneQuery())
-		searchReq.AddKNN(vecFieldName, test.queryVec, k, 1000)
+		searchReq.AddKNNWithFilter(vecFieldName, test.queryVec, k, 1000,
+			NewMatchAllQuery())
 
 		res, err := index.Search(searchReq)
 		if err != nil {