Skip to content

Commit

Permalink
CBG-3921 use IsDocNotFoundError everywhere (#6828)
Browse files Browse the repository at this point in the history
  • Loading branch information
torcolvin authored May 17, 2024
1 parent c946bc8 commit cef73b7
Show file tree
Hide file tree
Showing 26 changed files with 105 additions and 96 deletions.
12 changes: 1 addition & 11 deletions base/bucket.go
Original file line number Diff line number Diff line change
Expand Up @@ -320,22 +320,12 @@ func GetBucket(ctx context.Context, spec BucketSpec) (bucket Bucket, err error)
// If the given key is not found in the bucket, this function returns a result of zero.
func GetCounter(datastore DataStore, k string) (result uint64, err error) {
_, err = datastore.Get(k, &result)
if datastore.IsError(err, sgbucket.KeyNotFoundError) {
if IsDocNotFoundError(err) {
return 0, nil
}
return result, err
}

func IsKeyNotFoundError(datastore DataStore, err error) bool {

if err == nil {
return false
}

unwrappedErr := pkgerrors.Cause(err)
return datastore.IsError(unwrappedErr, sgbucket.KeyNotFoundError)
}

func IsCasMismatch(err error) bool {
if err == nil {
return false
Expand Down
2 changes: 1 addition & 1 deletion base/bucket_gocb_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2275,7 +2275,7 @@ func TestGetExpiry(t *testing.T) {
// ensure expiry retrieval on non-existent doc returns key not found
_, nonExistentExpiryErr := dataStore.GetExpiry(ctx, "nonExistentKey")
assert.Error(t, nonExistentExpiryErr)
assert.True(t, IsKeyNotFoundError(dataStore, nonExistentExpiryErr))
RequireDocNotFoundError(t, nonExistentExpiryErr)
}

func TestGetStatsVbSeqNo(t *testing.T) {
Expand Down
4 changes: 2 additions & 2 deletions base/bucket_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -532,8 +532,8 @@ func TestKeyNotFound(t *testing.T) {
ds := bucket.GetSingleDataStore()
var body []byte
_, getErr := ds.Get("nonexistentKey", &body)
require.True(t, IsKeyNotFoundError(ds, getErr))
RequireDocNotFoundError(t, getErr)

_, _, getRawErr := ds.GetRaw("nonexistentKey")
require.True(t, IsKeyNotFoundError(ds, getRawErr))
RequireDocNotFoundError(t, getRawErr)
}
12 changes: 0 additions & 12 deletions base/collection.go
Original file line number Diff line number Diff line change
Expand Up @@ -363,18 +363,6 @@ func (b *GocbV2Bucket) getConfigSnapshot() (*gocbcore.ConfigSnapshot, error) {
return config, nil
}

func (b *GocbV2Bucket) IsError(err error, errorType sgbucket.DataStoreErrorType) bool {
if err == nil {
return false
}
switch errorType {
case sgbucket.KeyNotFoundError:
return errors.Is(err, gocb.ErrDocumentNotFound)
default:
return false
}
}

func (b *GocbV2Bucket) GetSpec() BucketSpec {
return b.Spec
}
Expand Down
4 changes: 0 additions & 4 deletions base/collection_gocb.go
Original file line number Diff line number Diff line change
Expand Up @@ -459,10 +459,6 @@ func (c *Collection) Exists(k string) (exists bool, err error) {
return res.Exists(), nil
}

func (c *Collection) IsError(err error, errorType sgbucket.DataStoreErrorType) bool {
return c.Bucket.IsError(err, errorType)
}

// SGJsonTranscoder reads and writes JSON, with relaxed datatype restrictions on decode, and
// embedded support for writing raw JSON on encode
type SGJSONTranscoder struct {
Expand Down
2 changes: 1 addition & 1 deletion base/constants_syncdocs.go
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ func InitSyncInfo(ds DataStore, metadataID string) (requiresResync bool, err err

var syncInfo SyncInfo
_, fetchErr := ds.Get(SGSyncInfo, &syncInfo)
if IsKeyNotFoundError(ds, fetchErr) {
if IsDocNotFoundError(fetchErr) {
if metadataID == "" {
return false, nil
}
Expand Down
4 changes: 2 additions & 2 deletions base/dcp_client_metadata.go
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ func (m *DCPMetadataCS) load(ctx context.Context, workerID int) {
var meta WorkerMetadata
_, err := m.dataStore.Get(m.getMetadataKey(workerID), &meta)
if err != nil {
if IsKeyNotFoundError(m.dataStore, err) {
if IsDocNotFoundError(err) {
return
}
InfofCtx(ctx, KeyDCP, "Error loading persisted metadata - metadata will be reset for worker %d: %s", workerID, err)
Expand All @@ -260,7 +260,7 @@ func (m *DCPMetadataCS) load(ctx context.Context, workerID int) {
func (m *DCPMetadataCS) Purge(ctx context.Context, numWorkers int) {
for i := 0; i < numWorkers; i++ {
err := m.dataStore.Delete(m.getMetadataKey(i))
if err != nil && !IsKeyNotFoundError(m.dataStore, err) {
if err != nil && !IsDocNotFoundError(err) {
InfofCtx(ctx, KeyDCP, "Unable to remove DCP checkpoint for key %s: %v", m.getMetadataKey(i), err)
}
}
Expand Down
2 changes: 1 addition & 1 deletion base/dcp_common.go
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ func (c *DCPCommon) loadCheckpoint(vbNo uint16) (vbMetadata []byte, snapshotStar
rawValue, _, err := c.metaStore.GetRaw(fmt.Sprintf("%s%d", c.checkpointPrefix, vbNo))
if err != nil {
// On a key not found error, metadata hasn't been persisted for this vbucket
if IsKeyNotFoundError(c.metaStore, err) {
if IsDocNotFoundError(err) {
return []byte{}, 0, 0, nil
} else {
return []byte{}, 0, 0, err
Expand Down
17 changes: 9 additions & 8 deletions base/error.go
Original file line number Diff line number Diff line change
Expand Up @@ -205,27 +205,28 @@ func CouchHTTPErrorName(status int) string {
return fmt.Sprintf("%d", status)
}

// Returns true if an error is a doc-not-found error
// IsDocNotFoundError returns true if an error is a doc-not-found error
func IsDocNotFoundError(err error) bool {

unwrappedErr := pkgerrors.Cause(err)
if unwrappedErr == nil {
if err == nil {
return false
}

if unwrappedErr == ErrNotFound {
if errors.Is(err, ErrNotFound) {
return true
}

if errors.Is(err, gocb.ErrDocumentNotFound) {
return true
}

var missingError sgbucket.MissingError
if errors.As(err, &missingError) {
return true
}
unwrappedErr := pkgerrors.Cause(err)

switch unwrappedErr := unwrappedErr.(type) {
case *gomemcached.MCResponse:
return unwrappedErr.Status == gomemcached.KEY_ENOENT || unwrappedErr.Status == gomemcached.NOT_STORED
case sgbucket.MissingError:
return true
case *HTTPError:
return unwrappedErr.Status == http.StatusNotFound
default:
Expand Down
86 changes: 65 additions & 21 deletions base/error_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -154,27 +154,71 @@ func TestCouchHTTPErrorName(t *testing.T) {
}

func TestIsDocNotFoundError(t *testing.T) {

fakeMCResponse := &gomemcached.MCResponse{Status: gomemcached.KEY_ENOENT}
assert.True(t, IsDocNotFoundError(fakeMCResponse))

fakeMCResponse = &gomemcached.MCResponse{Status: gomemcached.NOT_STORED}
assert.True(t, IsDocNotFoundError(fakeMCResponse))

fakeMCResponse = &gomemcached.MCResponse{Status: gomemcached.ROLLBACK}
assert.False(t, IsDocNotFoundError(fakeMCResponse))

fakeMissingError := sgbucket.MissingError{}
assert.True(t, IsDocNotFoundError(fakeMissingError))

fakeHTTPError := &HTTPError{Status: http.StatusNotFound}
assert.True(t, IsDocNotFoundError(fakeHTTPError))

fakeHTTPError = &HTTPError{Status: http.StatusForbidden}
assert.False(t, IsDocNotFoundError(fakeHTTPError))

fakeSyntaxError := &json.SyntaxError{}
assert.False(t, IsDocNotFoundError(fakeSyntaxError))
testCases := []struct {
name string
err error
isDocNotFound bool
}{
{
name: "gomemcached.MCResponse KEY_ENOENT",
err: &gomemcached.MCResponse{Status: gomemcached.KEY_ENOENT},
isDocNotFound: true,
},
{
name: "gomemcached.MCResponse NOT_STORED",
err: &gomemcached.MCResponse{Status: gomemcached.NOT_STORED},
isDocNotFound: true,
},
{
name: "gomemcached.MCResponse ROLLBACK",
err: &gomemcached.MCResponse{Status: gomemcached.ROLLBACK},
isDocNotFound: false,
},
{
name: "sgbucket.MissingError",
err: sgbucket.MissingError{},
isDocNotFound: true,
},
{
name: "HTTPError StatusNotFound",
err: &HTTPError{Status: http.StatusNotFound},
isDocNotFound: true,
},
{
name: "HTTPError StatusForbidden",
err: &HTTPError{Status: http.StatusForbidden},
isDocNotFound: false,
},
{
name: "json.SyntaxError",
err: &json.SyntaxError{},
isDocNotFound: false,
},
{
name: "nil",
err: nil,
isDocNotFound: false,
},
{
name: "other error",
err: fmt.Errorf("some error"),
isDocNotFound: false,
},
{
name: "sgbucket.MissingError with values",
err: sgbucket.MissingError{Key: "key"},
isDocNotFound: true,
},
}
for _, test := range testCases {
t.Run(test.name, func(t *testing.T) {
if test.isDocNotFound {
assert.True(t, IsDocNotFoundError(test.err))
} else {
assert.False(t, IsDocNotFoundError(test.err))
}
})
}
}

func TestMultiError(t *testing.T) {
Expand Down
6 changes: 3 additions & 3 deletions base/heartbeat.go
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ func (h *couchbaseHeartBeater) checkStaleHeartbeats(ctx context.Context) error {
_, _, err := h.datastore.GetRaw(timeoutDocID)
SyncGatewayStats.GlobalStats.ResourceUtilizationStats().NumIdleKvOps.Add(1)
if err != nil {
if !IsKeyNotFoundError(h.datastore, err) {
if !IsDocNotFoundError(err) {
// unexpected error
return err
}
Expand Down Expand Up @@ -316,7 +316,7 @@ func (h *couchbaseHeartBeater) sendHeartbeat() error {
}

// On KeyNotFound, recreate heartbeat timeout doc
if IsKeyNotFoundError(h.datastore, touchErr) {
if IsDocNotFoundError(touchErr) {
heartbeatDocBody := []byte(h.nodeUUID)
setErr := h.datastore.SetRaw(docID, h.heartbeatExpirySeconds, nil, heartbeatDocBody)
SyncGatewayStats.GlobalStats.ResourceUtilizationStats().NumIdleKvOps.Add(1)
Expand Down Expand Up @@ -473,7 +473,7 @@ func (dh *documentBackedListener) loadNodeIDs() error {
if err != nil {
dh.cas = 0
dh.nodeIDs = []string{}
if !IsKeyNotFoundError(dh.datastore, err) {
if !IsDocNotFoundError(err) {
return err
}
}
Expand Down
4 changes: 0 additions & 4 deletions base/leaky_bucket.go
Original file line number Diff line number Diff line change
Expand Up @@ -71,10 +71,6 @@ func (b *LeakyBucket) GetMaxVbno() (uint16, error) {
return b.bucket.GetMaxVbno()
}

func (b *LeakyBucket) IsError(err error, errorType sgbucket.DataStoreErrorType) bool {
return b.bucket.IsError(err, errorType)
}

func (b *LeakyBucket) DefaultDataStore() sgbucket.DataStore {
return NewLeakyDataStore(b, b.bucket.DefaultDataStore(), b.config)
}
Expand Down
4 changes: 0 additions & 4 deletions base/leaky_datastore.go
Original file line number Diff line number Diff line change
Expand Up @@ -326,10 +326,6 @@ func (lds *LeakyDataStore) SetUpdateCallback(callback func(key string)) {
lds.config.UpdateCallback = callback
}

func (lds *LeakyDataStore) IsError(err error, errorType sgbucket.DataStoreErrorType) bool {
return lds.dataStore.IsError(err, errorType)
}

func (lds *LeakyDataStore) IsSupported(feature sgbucket.BucketStoreFeature) bool {
return lds.dataStore.IsSupported(feature)
}
Expand Down
4 changes: 2 additions & 2 deletions base/sg_cluster_cfg.go
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ func (c *CfgSG) Get(cfgKey string, cas uint64) (
bucketKey := c.sgCfgBucketKey(cfgKey)
var value []byte
casOut, err := c.datastore.Get(bucketKey, &value)
if err != nil && !IsKeyNotFoundError(c.datastore, err) {
if err != nil && !IsDocNotFoundError(err) {
InfofCtx(c.loggingCtx, KeyCluster, "cfg_sg: Get, key: %s, cas: %d, err: %v", cfgKey, cas, err)
return nil, 0, err
}
Expand Down Expand Up @@ -116,7 +116,7 @@ func (c *CfgSG) Del(cfgKey string, cas uint64) error {
_, err := c.datastore.Remove(bucketKey, cas)
if IsCasMismatch(err) {
return ErrCfgCasError
} else if err != nil && !IsKeyNotFoundError(c.datastore, err) {
} else if err != nil && !IsDocNotFoundError(err) {
return err
}

Expand Down
2 changes: 1 addition & 1 deletion db/active_replicator_checkpointer.go
Original file line number Diff line number Diff line change
Expand Up @@ -516,7 +516,7 @@ func (c *Checkpointer) getLocalCheckpoint() (checkpoint *replicationCheckpoint,

checkpointBytes, err := getSpecialBytes(c.collectionDataStore, DocTypeLocal, CheckpointDocIDPrefix+c.clientID, c.localDocExpirySecs)
if err != nil {
if !base.IsKeyNotFoundError(c.collectionDataStore, err) {
if !base.IsDocNotFoundError(err) {
return &replicationCheckpoint{}, err
}
base.DebugfCtx(c.ctx, base.KeyReplicate, "couldn't find existing local checkpoint for client %q", c.clientID)
Expand Down
2 changes: 1 addition & 1 deletion db/active_replicator_common.go
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ func (arc *activeReplicatorCommon) startStatusReporter() error {
func getLocalStatusDoc(ctx context.Context, metadataStore base.DataStore, statusKey string) (*ReplicationStatusDoc, error) {
statusDocBytes, err := getWithTouch(metadataStore, statusKey, 0)
if err != nil {
if !base.IsKeyNotFoundError(metadataStore, err) {
if !base.IsDocNotFoundError(err) {
return nil, err
}
base.DebugfCtx(ctx, base.KeyReplicate, "couldn't find existing local checkpoint for ID %q", statusKey)
Expand Down
2 changes: 1 addition & 1 deletion db/blip_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -1266,7 +1266,7 @@ func (bh *blipHandler) handleProveAttachment(rq *blip.Message) error {
if bh.clientType == BLIPClientTypeSGR2 {
return ErrAttachmentNotFound
}
if base.IsKeyNotFoundError(bh.collection.dataStore, err) {
if base.IsDocNotFoundError(err) {
return ErrAttachmentNotFound
}
return base.HTTPErrorf(http.StatusInternalServerError, fmt.Sprintf("Error getting client attachment: %v", err))
Expand Down
4 changes: 2 additions & 2 deletions db/crud_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -410,7 +410,7 @@ func TestRevisionStorageConflictAndTombstones(t *testing.T) {

// Ensure previous revision body backup has been removed
_, _, err = db.MetadataStore.GetRaw(base.RevBodyPrefix + "4GctXhLVg13d59D0PUTPRD0i58Hbe1d0djgo1qOEpfI=")
assert.True(t, base.IsKeyNotFoundError(collection.dataStore, err), "Revision should be not found")
base.RequireDocNotFoundError(t, err)

// Validate the tombstone is stored inline (due to small size)
revTree, err = getRevTreeList(ctx, collection.dataStore, "doc1", db.UseXattrs())
Expand Down Expand Up @@ -644,7 +644,7 @@ func TestRevisionStoragePruneTombstone(t *testing.T) {
// Ensure previous tombstone body backup has been removed
log.Printf("Verify revision body doc has been removed from bucket")
_, _, err = collection.dataStore.GetRaw(base.SyncDocPrefix + "rb:ULDLuEgDoKFJeET2hojeFANXM8SrHdVfAGONki+kPxM=")
assert.True(t, base.IsKeyNotFoundError(collection.dataStore, err), "Revision should be not found")
base.RequireDocNotFoundError(t, err)

}

Expand Down
2 changes: 1 addition & 1 deletion db/design_doc.go
Original file line number Diff line number Diff line change
Expand Up @@ -750,7 +750,7 @@ func removeObsoleteDesignDocs(ctx context.Context, viewStore sgbucket.ViewStore,
return removedDesignDocs, nil
}

// Similar to IsKeyNotFoundError(), but for the specific error returned by GetDDoc/DeleteDDoc
// Similar to IsDocNotFoundError(), but for the specific error returned by GetDDoc/DeleteDDoc
func IsMissingDDocError(err error) bool {
if err == nil {
return false
Expand Down
2 changes: 1 addition & 1 deletion db/revision.go
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ func (db *DatabaseCollectionWithUser) setOldRevisionJSON(ctx context.Context, do
func (db *DatabaseCollectionWithUser) refreshPreviousRevisionBackup(ctx context.Context, docid string, revid string, body []byte, expiry uint32) error {

_, err := db.dataStore.Touch(oldRevisionKey(docid, revid), expiry)
if base.IsKeyNotFoundError(db.dataStore, err) && len(body) > 0 {
if base.IsDocNotFoundError(err) && len(body) > 0 {
return db.setOldRevisionJSON(ctx, docid, revid, body, expiry)
}
return err
Expand Down
2 changes: 1 addition & 1 deletion db/util_testing.go
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ FROM ` + base.KeyspaceQueryToken + ` AS ks USE INDEX (sg_allDocs_x1)`
} else {
purgeErr = dataStore.Delete(row.Id)
}
if base.IsKeyNotFoundError(dataStore, purgeErr) {
if base.IsDocNotFoundError(purgeErr) {
// If key no longer exists, need to add and remove to trigger removal from view
_, addErr := dataStore.Add(row.Id, 0, purgeBody)
if addErr != nil {
Expand Down
4 changes: 2 additions & 2 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ require (
github.com/couchbase/gocbcore/v10 v10.4.1
github.com/couchbase/gomemcached v0.2.1
github.com/couchbase/goutils v0.1.2
github.com/couchbase/sg-bucket v0.0.0-20240514135815-5f5e7aa8625c
github.com/couchbase/sg-bucket v0.0.0-20240516142514-d65d1f2192a1
github.com/couchbaselabs/go-fleecedelta v0.0.0-20220909152808-6d09efa7a338
github.com/couchbaselabs/gocbconnstr v1.0.5
github.com/couchbaselabs/rosmar v0.0.0-20240417141520-4127f7d4c389
github.com/couchbaselabs/rosmar v0.0.0-20240516145123-749ae63effda
github.com/elastic/gosigar v0.14.3
github.com/felixge/fgprof v0.9.4
github.com/google/uuid v1.6.0
Expand Down
Loading

0 comments on commit cef73b7

Please sign in to comment.