diff --git a/component/attr_cache/attr_cache.go b/component/attr_cache/attr_cache.go index f00d1a908..e201cc4a1 100644 --- a/component/attr_cache/attr_cache.go +++ b/component/attr_cache/attr_cache.go @@ -53,7 +53,7 @@ type AttrCache struct { noSymlinks bool cacheDirs bool maxFiles int - cacheMap map[string]*attrCacheItem + cacheMap *attrCacheItem cacheLock sync.RWMutex } @@ -103,7 +103,8 @@ func (ac *AttrCache) Start(ctx context.Context) error { log.Trace("AttrCache::Start : Starting component %s", ac.Name()) // AttrCache : start code goes here - ac.cacheMap = make(map[string]*attrCacheItem) + rootAttr := internal.CreateObjAttrDir("") + ac.cacheMap = newAttrCacheItem(rootAttr, true, time.Now()) return nil } @@ -164,42 +165,17 @@ func (ac *AttrCache) OnConfigChange() { // Helper Methods // deleteDirectory: Marks a directory and all its contents deleted. +// This should only be called when ac.cacheDirs is false. // This marks items deleted instead of invalidating them. // That way if a request came in for a deleted item, we can respond from the cache. func (ac *AttrCache) deleteDirectory(path string, time time.Time) { - // Delete all descendants of the path, then delete the path - // For example, filesystem: a/, a/b, a/c, aa/, ab. - // When we delete directory a, we only want to delete a/, a/b, and a/c. - // If we do not conditionally extend a, we would accidentally delete aa/ and ab - - // Add a trailing / so that we only delete child paths under the directory and not paths that have the same prefix - prefix := dirToPrefix(path) - - for key, value := range ac.cacheMap { - if strings.HasPrefix(key, prefix) { - value.markDeleted(time) - } - } - - // We need to delete the path itself since we only handle children above. - ac.deletePath(path, time) -} - -func dirToPrefix(dir string) string { - prefix := internal.ExtendDirName(dir) - if prefix == "/" { - prefix = "" - } - return prefix -} - -// deletePath: deletes a path -func (ac *AttrCache) deletePath(path string, time time.Time) { - // Keys in the cache map do not contain trailing /, truncate the path before referencing a key in the map. - value, found := ac.cacheMap[internal.TruncateDirName(path)] - if found { - value.markDeleted(time) + //get attrCacheItem + toBeDeleted, getErr := ac.cacheMap.get(path) + if getErr != nil || !toBeDeleted.exists() { + log.Warn("AttrCache::deleteDirectory : %s directory does not exist", path) + return } + toBeDeleted.markDeleted(time) } // deleteCachedDirectory: marks a directory and all its contents deleted @@ -210,42 +186,26 @@ func (ac *AttrCache) deletePath(path string, time time.Time) { func (ac *AttrCache) deleteCachedDirectory(path string, time time.Time) error { // Delete all descendants of the path, then delete the path - // For example, filesystem: a/, a/b, a/c, aa/, ab. - // When we delete directory a, we only want to delete a/, a/b, and a/c. - // If we do not conditionally extend a, we would accidentally delete aa/ and ab - // Add a trailing '/' so that we only delete child paths under the directory and not paths that have the same prefix - prefix := dirToPrefix(path) - // remember whether we actually found any contents - foundCachedContents := false - for key, value := range ac.cacheMap { - if strings.HasPrefix(key, prefix) { - foundCachedContents = true - value.markDeleted(time) - } - } - - // check if the directory to be deleted exists - if !foundCachedContents && !ac.pathExistsInCache(path) { - log.Err("AttrCache::deleteCachedDirectory : directory %s does not exist in attr cache.", path) + //get attrCacheItem + toBeDeleted, getErr := ac.cacheMap.get(path) + if getErr != nil || !toBeDeleted.exists() { + log.Err("AttrCache::deleteCachedDirectory : %s", getErr) return syscall.ENOENT } + toBeDeleted.markDeleted(time) - // We need to delete the path itself since we only handle children above. - ac.deletePath(path, time) - - // If this leaves the parent or any ancestor directory empty, record that. - // Although this involves an unnecessary second traversal through the cache, - // because of the code complexity, I think it's worth the readability gained. ac.updateAncestorsInCloud(getParentDir(path), time) - return nil } // pathExistsInCache: check if path is in cache, is valid, and not marked deleted func (ac *AttrCache) pathExistsInCache(path string) bool { - value, found := ac.cacheMap[internal.TruncateDirName(path)] - return (found && value.valid() && value.exists()) + value, getErr := ac.cacheMap.get(internal.TruncateDirName(path)) + if getErr != nil { + return false + } + return value.exists() } func getParentDir(childPath string) string { @@ -260,39 +220,27 @@ func getParentDir(childPath string) string { // Do not use this with ac.cacheDirs set func (ac *AttrCache) invalidateDirectory(path string) { // Invalidate all descendants of the path, then invalidate the path - // For example, filesystem: a/, a/b, a/c, aa/, ab. - // When we invalidate directory a, we only want to invalidate a/, a/b, and a/c. - // If we do not conditionally extend a, we would accidentally invalidate aa/ and ab - - // Add a trailing / so that we only invalidate child paths under the directory and not paths that have the same prefix - prefix := dirToPrefix(path) - - for key, value := range ac.cacheMap { - if strings.HasPrefix(key, prefix) { - // don't invalidate directories when cacheDirs is true - if ac.cacheDirs && value.attr.IsDir() { - continue - } - value.invalidate() - } - } - // We need to invalidate the path itself since we only handle children above. - if !ac.cacheDirs { - ac.invalidatePath(path) + toBeInvalid, getErr := ac.cacheMap.get(path) + if getErr != nil { + log.Err("AttrCache::invalidateDirectory : could not invalidate cached attr item: %s", getErr) + return + } + // only invalidate directories when cacheDirs is false + if !ac.cacheDirs || !toBeInvalid.attr.IsDir() { + toBeInvalid.invalidate() + return } -} -// invalidatePath: invalidates a path -func (ac *AttrCache) invalidatePath(path string) { - // Keys in the cache map do not contain trailing /, truncate the path before referencing a key in the map. - value, found := ac.cacheMap[internal.TruncateDirName(path)] - if found { - value.invalidate() + // recurse + for _, childItem := range toBeInvalid.children { + ac.invalidateDirectory(childItem.attr.Path) } } -// renameCachedDirectory: Renames a cached directory and all its contents when ac.cacheDirs is true. +// renameCachedDirectory: Renames a cached directory and all its contents +// this function assumes ac.cacheDirs is true +// input: source folder path, destination folder path, rename timestamp func (ac *AttrCache) renameCachedDirectory(srcDir string, dstDir string, time time.Time) error { // First, check if the destination directory already exists @@ -300,80 +248,57 @@ func (ac *AttrCache) renameCachedDirectory(srcDir string, dstDir string, time ti return os.ErrExist } - // Rename all descendants of srcDir, then rename the srcDir itself - // For example, filesystem: a/, a/b, a/c, aa/, ab. - // When we rename directory a, we only want to rename a/, a/b, and a/c. - // If we do not conditionally extend a, we would accidentally delete aa/ and ab - - // Add a trailing / so that we only rename child paths under the directory, - // and not paths that have the same prefix - srcDir = dirToPrefix(srcDir) - // Add a trailing / to destination (for string replacement) - dstDir = dirToPrefix(dstDir) - // remember whether we actually found any contents - foundCachedContents := false - movedObjects := false - for key, value := range ac.cacheMap { - if strings.HasPrefix(key, srcDir) { - foundCachedContents = true - dstKey := strings.Replace(key, srcDir, dstDir, 1) - // track whether the destination is gaining objects - movedObjects = movedObjects || (value.isInCloud() && value.exists() && value.valid()) - // to keep the directory cache coherent, - // any renamed directories need a new cache entry - if value.attr.IsDir() && value.valid() && value.exists() { - // add the destination directory to our cache - dstDirAttr := internal.CreateObjAttrDir(dstKey) - dstDirCacheItem := newAttrCacheItem(dstDirAttr, true, time) - dstDirCacheItem.markInCloud(value.isInCloud()) - ac.cacheMap[dstKey] = dstDirCacheItem - } else { - // invalidate files so attributes get refreshed from the backend - ac.invalidatePath(dstKey) - } - // either way, mark the old cache entry deleted - value.markDeleted(time) - } - } - - // if there were no cached entries to move, does this directory even exist? - if !foundCachedContents && !ac.pathExistsInCache(srcDir) { - log.Err("AttrCache::renameCachedDirectory : Source directory %s does not exist.", srcDir) + srcItem, getErr := ac.cacheMap.get(srcDir) + if getErr != nil || !srcItem.exists() { + log.Err("AttrCache::renameCachedDirectory : %s ", getErr) return syscall.ENOENT } - // record whether the destination directory's parent tree now contains objects - if movedObjects { - ac.markAncestorsInCloud(dstDir, time) + srcDir = internal.TruncateDirName(srcDir) + dstDir = internal.TruncateDirName(dstDir) + ac.moveAttrCachedItem(srcItem, srcDir, dstDir, time) + ac.updateAncestorsInCloud(srcDir, time) + ac.updateAncestorsInCloud(dstDir, time) + return nil +} + +// moveAttrItem: used to move a subtree within cacheMap to a new location of the cacheMap tree. +// input: attrCacheItem to be moved, source and destination path, move timestamp +func (ac *AttrCache) moveAttrCachedItem(srcItem *attrCacheItem, srcDir string, dstDir string, time time.Time) *attrCacheItem { + + // take the source name and change it to the destination name + dstPath := strings.Replace(srcItem.attr.Path, srcDir, dstDir, 1) + + // create an attribute using the destination name + var dstAttr *internal.ObjAttr + if srcItem.attr.IsDir() { + dstAttr = internal.CreateObjAttrDir(dstPath) } else { - // add the destination directory to our cache - dstDir = internal.TruncateDirName(dstDir) - dstDirAttr := internal.CreateObjAttrDir(dstDir) - dstDirAttrCacheItem := newAttrCacheItem(dstDirAttr, true, time) - dstDirAttrCacheItem.markInCloud(false) - ac.cacheMap[dstDir] = dstDirAttrCacheItem + dstAttr = internal.CreateObjAttr(dstPath, srcItem.attr.Size, srcItem.attr.Mtime) } - // delete the source directory from our cache - ac.deletePath(srcDir, time) + // insert the attribute from previous step into the cacheMap + dstItem := ac.cacheMap.insert(dstAttr, srcItem.exists(), srcItem.cachedAt) - // If this leaves the parent or ancestor directories empty, record that. - // Although this involves an unnecessary second traversal through the cache, - // because of the code complexity, I think it's worth the readability gained. - ac.updateAncestorsInCloud(getParentDir(srcDir), time) + // mark whether the item is in the cloud + dstItem.markInCloud(srcItem.isInCloud()) - return nil + // recurse over children + for _, srcChildItm := range srcItem.children { + ac.moveAttrCachedItem(srcChildItm, srcDir, dstDir, time) + } + srcItem.markDeleted(time) + return dstItem } func (ac *AttrCache) markAncestorsInCloud(dirPath string, time time.Time) { - dirPath = internal.TruncateDirName(dirPath) if len(dirPath) != 0 { - dirCacheItem, found := ac.cacheMap[dirPath] - if !(found && dirCacheItem.valid() && dirCacheItem.exists()) { + dirCacheItem, getErr := ac.cacheMap.get(dirPath) + if getErr != nil || !dirCacheItem.exists() { dirObjAttr := internal.CreateObjAttrDir(dirPath) - dirCacheItem = newAttrCacheItem(dirObjAttr, true, time) - ac.cacheMap[dirPath] = dirCacheItem + dirCacheItem = ac.cacheMap.insert(dirObjAttr, true, time) } + dirCacheItem.markInCloud(true) // recurse ac.markAncestorsInCloud(getParentDir(dirPath), time) @@ -381,26 +306,27 @@ func (ac *AttrCache) markAncestorsInCloud(dirPath string, time time.Time) { } // ------------------------- Methods implemented by this component ------------------------------------------- -// CreateDir: Mark the directory invalid +// CreateDir: Mark the directory invalid, or +// insert the dir item into cache when cacheDirs is true. func (ac *AttrCache) CreateDir(options internal.CreateDirOptions) error { log.Trace("AttrCache::CreateDir : %s", options.Name) err := ac.NextComponent().CreateDir(options) - if err == nil { ac.cacheLock.Lock() defer ac.cacheLock.Unlock() if ac.cacheDirs { // check if directory already exists - newDirPath := internal.TruncateDirName(options.Name) - if ac.pathExistsInCache(newDirPath) { + if ac.pathExistsInCache(options.Name) { return os.ErrExist } - newDirAttr := internal.CreateObjAttrDir(newDirPath) - newDirAttrCacheItem := newAttrCacheItem(newDirAttr, true, time.Now()) + newDirAttr := internal.CreateObjAttrDir(options.Name) + newDirAttrCacheItem := ac.cacheMap.insert(newDirAttr, true, time.Now()) newDirAttrCacheItem.markInCloud(false) - ac.cacheMap[newDirPath] = newDirAttrCacheItem } else { - ac.invalidatePath(options.Name) + dirAttrCacheItem, getErr := ac.cacheMap.get(options.Name) + if getErr == nil { + dirAttrCacheItem.invalidate() + } } } return err @@ -455,34 +381,23 @@ func (ac *AttrCache) ReadDir(options internal.ReadDirOptions) (pathList []*inter // merge results from our cache into pathMap func (ac *AttrCache) addDirsNotInCloudToListing(listPath string, pathList []*internal.ObjAttr) ([]*internal.ObjAttr, int) { - prefix := dirToPrefix(listPath) - if prefix == "/" { - prefix = "" - } numAdded := 0 - ac.cacheLock.RLock() - for key, value := range ac.cacheMap { - // ignore invalid and deleted items - if !value.valid() || !value.exists() { - continue - } - // the only entries missing are the directories with no objects - if !value.attr.IsDir() || value.isInCloud() { - continue - } - // look for matches - if strings.HasPrefix(key, prefix) { - // exclude entries in subdirectories - pathInsideDirectory := strings.TrimPrefix(key, prefix) - if strings.Contains(pathInsideDirectory, "/") { - continue - } - pathList = append(pathList, value.attr) + dir, getErr := ac.cacheMap.get(listPath) + if getErr != nil || !dir.exists() { + log.Err("AttrCache:: addDirsNotInCloudToListing : %s does not exist in cache", listPath) + return pathList, 0 + } + + ac.cacheLock.RLock() + for _, child := range dir.children { + if child.exists() && !child.isInCloud() { + pathList = append(pathList, child.attr) numAdded++ } } ac.cacheLock.RUnlock() + // values should be returned in ascending order by key // sort the list before returning it sort.Slice(pathList, func(i, j int) bool { @@ -525,14 +440,11 @@ func (ac *AttrCache) cacheAttributes(pathList []*internal.ObjAttr) { ac.cacheLock.Lock() defer ac.cacheLock.Unlock() for _, attr := range pathList { - // TODO: will this cause a bug when cacheDirs is enabled? - if len(ac.cacheMap) > ac.maxFiles { - log.Debug("AttrCache::cacheAttributes : %s skipping adding path to attribute cache because it is full", pathList) - break - } - ac.cacheMap[internal.TruncateDirName(attr.Path)] = newAttrCacheItem(attr, true, currTime) + ac.cacheMap.insert(attr, true, currTime) } - + // pathList was returned by the cloud storage component when listing a directory + // so that directory is clearly in the cloud + ac.markAncestorsInCloud(getParentDir(pathList[0].Path), currTime) } } @@ -566,13 +478,15 @@ func (ac *AttrCache) IsDirEmpty(options internal.IsDirEmptyOptions) bool { } func (ac *AttrCache) anyContentsInCache(prefix string) bool { - // Add a trailing / so that we only find child paths under the directory and not paths that have the same prefix - prefix = dirToPrefix(prefix) ac.cacheLock.RLock() defer ac.cacheLock.RUnlock() - for key, value := range ac.cacheMap { - if strings.HasPrefix(key, prefix) && value.valid() && value.exists() { - return true + + directory, getErr := ac.cacheMap.get(prefix) + if getErr == nil && directory.exists() { + for _, chldItem := range directory.children { + if chldItem.exists() { + return true + } } } return false @@ -623,7 +537,10 @@ func (ac *AttrCache) CreateFile(options internal.CreateFileOptions) (*handlemap. } // TODO: we assume that the OS will call GetAttr after this. // if it doesn't, will invalidating this entry cause problems? - ac.invalidatePath(options.Name) + toBeInvalid, getErr := ac.cacheMap.get(options.Name) + if getErr == nil { + toBeInvalid.invalidate() + } } return h, err @@ -638,7 +555,12 @@ func (ac *AttrCache) DeleteFile(options internal.DeleteFileOptions) error { deletionTime := time.Now() ac.cacheLock.RLock() defer ac.cacheLock.RUnlock() - ac.deletePath(options.Name, deletionTime) + toBeDeleted, getErr := ac.cacheMap.get(options.Name) + if getErr != nil { + log.Err("AttrCache::DeleteFile : %s", getErr) + } else { + toBeDeleted.markDeleted(deletionTime) + } if ac.cacheDirs { ac.updateAncestorsInCloud(getParentDir(options.Name), deletionTime) } @@ -651,60 +573,28 @@ func (ac *AttrCache) DeleteFile(options internal.DeleteFileOptions) error { // and search the contents of all of its ancestors, // to record which of them contain objects in their subtrees func (ac *AttrCache) updateAncestorsInCloud(dirPath string, time time.Time) { - // first gather a list of ancestors - var ancestorCacheItems []*attrCacheItem - ancestorPath := internal.TruncateDirName(dirPath) - for ancestorPath != "" { - ancestorCacheItem, found := ac.cacheMap[ancestorPath] - if !(found && ancestorCacheItem.valid() && ancestorCacheItem.exists()) { - ancestorObjAttr := internal.CreateObjAttrDir(ancestorPath) - ancestorCacheItem = newAttrCacheItem(ancestorObjAttr, true, time) - ac.cacheMap[ancestorPath] = ancestorCacheItem - } - ancestorCacheItems = append(ancestorCacheItems, ancestorCacheItem) - // speculatively set all ancestors as not in cloud storage - // all will be set correctly in the loop below - ancestorCacheItem.markInCloud(false) - // move on to the next ancestor - ancestorPath = getParentDir(ancestorPath) - } - // if we're at the root, no need to search (the root is always in the cloud) - if len(ancestorCacheItems) == 0 { - return - } - // search the cache to check whether each ancestor is in cloud storage -cacheSearch: - for key, value := range ac.cacheMap { - // ignore items that are deleted, invalid, or directories that are not in cloud storage - if !value.exists() || !value.valid() || !value.isInCloud() { - continue - } - // iterate over ancestors, from the deepest up - prefixMatchFound := false - matchAncestors: - for ancestorIndex, ancestor := range ancestorCacheItems { - // don't visit ancestors that have already been updated - if ancestor.isInCloud() { - // if all ancestors have been updated, the entire search is done - if ancestorIndex == 0 { - break cacheSearch - } - break matchAncestors - } - // we already found that one ancestor is in the cloud - // so its ancestors are too - if prefixMatchFound { - ancestor.markInCloud(true) - continue matchAncestors - } - // check for a prefix match - prefix := dirToPrefix(ancestor.attr.Path) - if strings.HasPrefix(key, prefix) { - prefixMatchFound = true - // update this ancestor - ancestor.markInCloud(true) + for dirPath != "" { + ancestorCacheItem, getErr := ac.cacheMap.get(dirPath) + if getErr != nil { + ancestorObjAttr := internal.CreateObjAttrDir(dirPath) + ancestorCacheItem = ac.cacheMap.insert(ancestorObjAttr, true, time) + } + var anyChildrenInCloud bool + + for _, item := range ancestorCacheItem.children { + if item.exists() && item.isInCloud() { + anyChildrenInCloud = true + break } } + if ancestorCacheItem.isInCloud() != anyChildrenInCloud { + ancestorCacheItem.markInCloud(anyChildrenInCloud) + } else { + //if we didn't change the parent, then no change is visible to the grandparent, etc. + break + } + // move on to the next ancestor + dirPath = getParentDir(dirPath) } } @@ -715,20 +605,24 @@ func (ac *AttrCache) RenameFile(options internal.RenameFileOptions) error { err := ac.NextComponent().RenameFile(options) if err == nil { renameTime := time.Now() + ac.cacheLock.RLock() + defer ac.cacheLock.RUnlock() + + //get the source item + sourceItem, getErr := ac.cacheMap.get(options.Src) + if getErr != nil || !sourceItem.exists() { + log.Warn("AttrCache::RenameFile : Source %s does not exist in cache", options.Src) + return nil + } + + // move source item to destination + ac.moveAttrCachedItem(sourceItem, options.Src, options.Dst, time.Now()) if ac.cacheDirs { - ac.cacheLock.Lock() ac.updateAncestorsInCloud(getParentDir(options.Src), renameTime) // mark the destination parent directory tree as containing objects ac.markAncestorsInCloud(getParentDir(options.Dst), renameTime) - ac.cacheLock.Unlock() } - ac.cacheLock.RLock() - defer ac.cacheLock.RUnlock() - // TODO: Can we just copy over the attributes from the source to the destination so we don't have to invalidate? - ac.deletePath(options.Src, renameTime) - ac.invalidatePath(options.Dst) } - return err } @@ -752,7 +646,13 @@ func (ac *AttrCache) WriteFile(options internal.WriteFileOptions) (int, error) { ac.cacheLock.RLock() defer ac.cacheLock.RUnlock() // TODO: Could we just update the size and mod time of the file here? Or can other attributes change here? - ac.invalidatePath(options.Handle.Path) + + toBeInvalid, getErr := ac.cacheMap.get(attr.Path) + if getErr != nil { + log.Err("AttrCache::WriteFile : %s", getErr) + } else { + toBeInvalid.invalidate() + } } return size, err } @@ -766,11 +666,15 @@ func (ac *AttrCache) TruncateFile(options internal.TruncateFileOptions) error { ac.cacheLock.RLock() defer ac.cacheLock.RUnlock() - // no need to truncate the name of the file - value, found := ac.cacheMap[options.Name] - if found && value.valid() && value.exists() { - value.setSize(options.Size) + truncatedItem, getErr := ac.cacheMap.get(options.Name) + if getErr != nil || !truncatedItem.exists() { + log.Warn("AttrCache::TruncateFile : %s replacing missing cache entry", options.Name) + // replace the missing entry + entryTime := time.Now() + truncatedAttr := internal.CreateObjAttr(options.Name, options.Size, entryTime) + truncatedItem = ac.cacheMap.insert(truncatedAttr, true, entryTime) } + truncatedItem.setSize(options.Size) } return err } @@ -800,21 +704,39 @@ func (ac *AttrCache) CopyFromFile(options internal.CopyFromFileOptions) error { // Mark ancestors as existing in cloud storage now ac.markAncestorsInCloud(getParentDir(options.Name), time.Now()) } - // TODO: Could we just update the size and mod time of the file here? Or can other attributes change here? + // TODO: we're RLocking the cache but we need to also lock this attr item because another thread could be reading this attr item - ac.invalidatePath(options.Name) + toBeUpdated, getErr := ac.cacheMap.get(options.Name) + if getErr != nil { + log.Warn("AttrCache::CopyFromFile : %s", getErr) + return nil + } + + fileStat, statErr := options.File.Stat() + if statErr != nil { + log.Warn("AttrCache::CopyFromFile : Can't get new file size: %s", statErr) + toBeUpdated.invalidate() + return nil + } + + // get the size of the source file + fileSize := fileStat.Size() + movedItem := ac.moveAttrCachedItem(toBeUpdated, options.Name, options.Name, time.Now()) + movedItem.attr.Size = fileSize } return err } func (ac *AttrCache) SyncFile(options internal.SyncFileOptions) error { log.Trace("AttrCache::SyncFile : %s", options.Handle.Path) - err := ac.NextComponent().SyncFile(options) if err == nil { ac.cacheLock.RLock() defer ac.cacheLock.RUnlock() - ac.invalidatePath(options.Handle.Path) + toBeInvalid, getErr := ac.cacheMap.get(options.Handle.Path) + if getErr == nil { + toBeInvalid.invalidate() + } } return err } @@ -834,20 +756,16 @@ func (ac *AttrCache) SyncDir(options internal.SyncDirOptions) error { // GetAttr : Try to serve the request from the attribute cache, otherwise cache attributes of the path returned by next component func (ac *AttrCache) GetAttr(options internal.GetAttrOptions) (*internal.ObjAttr, error) { log.Trace("AttrCache::GetAttr : %s", options.Name) - truncatedPath := internal.TruncateDirName(options.Name) - ac.cacheLock.RLock() - value, found := ac.cacheMap[truncatedPath] + value, getErr := ac.cacheMap.get(options.Name) ac.cacheLock.RUnlock() - - // Try to serve the request from the attribute cache - if found && value.valid() && time.Since(value.cachedAt).Seconds() < float64(ac.cacheTimeout) { + if getErr == nil && value.valid() && time.Since(value.cachedAt).Seconds() < float64(ac.cacheTimeout) { + // Try to serve the request from the attribute cache // Is the entry marked deleted? if value.isDeleted() { log.Debug("AttrCache::GetAttr : %s served from cache", options.Name) return &internal.ObjAttr{}, syscall.ENOENT } - // IsMetadataRetrieved is false in the case of ADLS List since the API does not support metadata. // Once migration of ADLS list to blob endpoint is done (in future service versions), we can remove this. // options.RetrieveMetadata is set by CopyFromFile and WriteFile which need metadata to ensure it is preserved. @@ -859,29 +777,23 @@ func (ac *AttrCache) GetAttr(options internal.GetAttrOptions) (*internal.ObjAttr } // Get the attributes from next component and cache them - pathAttr, err := ac.NextComponent().GetAttr(options) + pathAttr, getErr := ac.NextComponent().GetAttr(options) ac.cacheLock.Lock() defer ac.cacheLock.Unlock() - if err == nil { + if getErr == nil { // Retrieved attributes so cache them - // TODO: bug: when cacheDirs is true, the cache limit will cause some directories to be double-listed - // TODO: shouldn't this be an LRU? This sure looks like the opposite... - if len(ac.cacheMap) < ac.maxFiles { - ac.cacheMap[truncatedPath] = newAttrCacheItem(pathAttr, true, time.Now()) - } else { - log.Debug("AttrCache::GetAttr : %s skipping adding to attribute cache because it is full", options.Name) - } + ac.cacheMap.insert(pathAttr, true, time.Now()) if ac.cacheDirs { ac.markAncestorsInCloud(getParentDir(options.Name), time.Now()) } - } else if err == syscall.ENOENT { - // Path does not exist so cache a no-entry item - ac.cacheMap[truncatedPath] = newAttrCacheItem(&internal.ObjAttr{}, false, time.Now()) + } else if getErr == syscall.ENOENT { + // cache this entity not existing + // TODO: change the tests to no longer use empty structs. use internal.createAttr() to define a path instead of a literal. + ac.cacheMap.insert(&internal.ObjAttr{Path: internal.TruncateDirName(options.Name)}, false, time.Now()) } - - return pathAttr, err + return pathAttr, getErr } // CreateLink : Mark the link and target invalid @@ -893,7 +805,10 @@ func (ac *AttrCache) CreateLink(options internal.CreateLinkOptions) error { if err == nil { ac.cacheLock.RLock() defer ac.cacheLock.RUnlock() - ac.invalidatePath(options.Name) + toBeInvalid, getErr := ac.cacheMap.get(options.Name) + if getErr == nil { + toBeInvalid.invalidate() + } if ac.cacheDirs { ac.markAncestorsInCloud(getParentDir(options.Name), time.Now()) } @@ -909,8 +824,10 @@ func (ac *AttrCache) FlushFile(options internal.FlushFileOptions) error { if err == nil { ac.cacheLock.RLock() defer ac.cacheLock.RUnlock() - - ac.invalidatePath(options.Handle.Path) + toBeInvalid, getErr := ac.cacheMap.get(options.Handle.Path) + if getErr == nil { + toBeInvalid.invalidate() + } } return err } @@ -925,12 +842,13 @@ func (ac *AttrCache) Chmod(options internal.ChmodOptions) error { ac.cacheLock.RLock() defer ac.cacheLock.RUnlock() - value, found := ac.cacheMap[internal.TruncateDirName(options.Name)] - if found && value.valid() && value.exists() { + value, getErr := ac.cacheMap.get(internal.TruncateDirName(options.Name)) + if getErr != nil || !value.exists() { + log.Err("AttrCache::Chmod : %s not in cache", options.Name) + } else { value.setMode(options.Mode) } } - return err } diff --git a/component/attr_cache/attr_cache_test.go b/component/attr_cache/attr_cache_test.go index 20786c2d5..5c6efc9a5 100644 --- a/component/attr_cache/attr_cache_test.go +++ b/component/attr_cache/attr_cache_test.go @@ -97,6 +97,11 @@ func getPathAttr(path string, size int64, mode os.FileMode, metadata bool) *inte } } +func assertNotInCache(assert *assert.Assertions, attrCache *AttrCache, path string) { + _, err := attrCache.cacheMap.get(path) + assert.NotNil(err) +} + func addPathToCache(assert *assert.Assertions, attrCache *AttrCache, path string, metadata bool) { isDir := path[len(path)-1] == '/' path = internal.TruncateDirName(path) @@ -104,53 +109,58 @@ func addPathToCache(assert *assert.Assertions, attrCache *AttrCache, path string if isDir { pathAttr = getDirPathAttr(path) } - attrCache.cacheMap[path] = newAttrCacheItem(pathAttr, true, time.Now()) - assert.Contains(attrCache.cacheMap, path) + attrCache.cacheMap.insert(pathAttr, true, time.Now()) } func assertDeleted(suite *attrCacheTestSuite, path string) { - suite.assert.Contains(suite.attrCache.cacheMap, path) - suite.assert.EqualValues(&internal.ObjAttr{}, suite.attrCache.cacheMap[path].attr) - suite.assert.True(suite.attrCache.cacheMap[path].valid()) - suite.assert.False(suite.attrCache.cacheMap[path].exists()) + cacheItem, err := suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + suite.assert.EqualValues(&internal.ObjAttr{}, cacheItem.attr) + suite.assert.True(cacheItem.valid()) + suite.assert.False(cacheItem.exists()) } func assertInvalid(suite *attrCacheTestSuite, path string) { - suite.assert.Contains(suite.attrCache.cacheMap, path) - suite.assert.EqualValues(&internal.ObjAttr{}, suite.attrCache.cacheMap[path].attr) - suite.assert.False(suite.attrCache.cacheMap[path].valid()) + cacheItem, err := suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + suite.assert.EqualValues(&internal.ObjAttr{}, cacheItem.attr) + suite.assert.False(cacheItem.valid()) } func assertUntouched(suite *attrCacheTestSuite, path string) { - suite.assert.Contains(suite.attrCache.cacheMap, path) - suite.assert.NotEqualValues(suite.attrCache.cacheMap[path].attr, &internal.ObjAttr{}) - suite.assert.EqualValues(defaultSize, suite.attrCache.cacheMap[path].attr.Size) - suite.assert.EqualValues(defaultMode, suite.attrCache.cacheMap[path].attr.Mode) - suite.assert.True(suite.attrCache.cacheMap[path].valid()) - suite.assert.True(suite.attrCache.cacheMap[path].exists()) + cacheItem, err := suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + suite.assert.NotEqualValues(cacheItem.attr, &internal.ObjAttr{}) + suite.assert.EqualValues(defaultSize, cacheItem.attr.Size) + suite.assert.EqualValues(defaultMode, cacheItem.attr.Mode) + suite.assert.True(cacheItem.valid()) + suite.assert.True(cacheItem.exists()) } func assertExists(suite *attrCacheTestSuite, path string) { - suite.assert.Contains(suite.attrCache.cacheMap, path) - suite.assert.NotEqualValues(suite.attrCache.cacheMap[path].attr, &internal.ObjAttr{}) - suite.assert.True(suite.attrCache.cacheMap[path].valid()) - suite.assert.True(suite.attrCache.cacheMap[path].exists()) + checkItem, err := suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + suite.assert.NotEqualValues(checkItem.attr, &internal.ObjAttr{}) + suite.assert.True(checkItem.valid()) + suite.assert.True(checkItem.exists()) } func assertInCloud(suite *attrCacheTestSuite, path string) { - suite.assert.Contains(suite.attrCache.cacheMap, path) - suite.assert.NotEqualValues(suite.attrCache.cacheMap[path].attr, &internal.ObjAttr{}) - suite.assert.True(suite.attrCache.cacheMap[path].valid()) - suite.assert.True(suite.attrCache.cacheMap[path].exists()) - suite.assert.True(suite.attrCache.cacheMap[path].isInCloud()) + checkItem, err := suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + suite.assert.NotEqualValues(checkItem.attr, &internal.ObjAttr{}) + suite.assert.True(checkItem.valid()) + suite.assert.True(checkItem.exists()) + suite.assert.True(checkItem.isInCloud()) } func assertNotInCloud(suite *attrCacheTestSuite, path string) { - suite.assert.Contains(suite.attrCache.cacheMap, path) - suite.assert.NotEqualValues(suite.attrCache.cacheMap[path].attr, &internal.ObjAttr{}) - suite.assert.True(suite.attrCache.cacheMap[path].valid()) - suite.assert.True(suite.attrCache.cacheMap[path].exists()) - suite.assert.False(suite.attrCache.cacheMap[path].isInCloud()) + checkItem, err := suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + suite.assert.NotEqualValues(checkItem.attr, &internal.ObjAttr{}) + suite.assert.True(checkItem.valid()) + suite.assert.True(checkItem.exists()) + suite.assert.False(checkItem.isInCloud()) } // Directory structure @@ -165,7 +175,7 @@ func assertNotInCloud(suite *attrCacheTestSuite, path string) { // ab/c1 // // ac -func generateNestedDirectory(path string) (*list.List, *list.List, *list.List) { +func generateDirectory(path string) (*list.List, *list.List, *list.List) { path = internal.TruncateDirName(path) aPaths := list.New() @@ -186,7 +196,7 @@ func generateNestedDirectory(path string) (*list.List, *list.List, *list.List) { } func generateNestedPathAttr(path string, size int64, mode os.FileMode) []*internal.ObjAttr { - a, _, _ := generateNestedDirectory(path) + a, _, _ := generateDirectory(path) pathAttrs := make([]*internal.ObjAttr, 0) i := 0 for p := a.Front(); p != nil; p = p.Next() { @@ -205,7 +215,7 @@ func generateNestedPathAttr(path string, size int64, mode os.FileMode) []*intern func addDirectoryToCache(assert *assert.Assertions, attrCache *AttrCache, path string, metadata bool) (*list.List, *list.List, *list.List) { // TODO: flag directories as such, or else recursion based on IsDir() won't work... - aPaths, abPaths, acPaths := generateNestedDirectory(path) + aPaths, abPaths, acPaths := generateDirectory(path) for p := aPaths.Front(); p != nil; p = p.Next() { addPathToCache(assert, attrCache, p.Value.(string), metadata) @@ -309,7 +319,7 @@ func (suite *attrCacheTestSuite) TestCreateDir() { err := suite.attrCache.CreateDir(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Success // Entry Does Not Already Exist @@ -317,14 +327,18 @@ func (suite *attrCacheTestSuite) TestCreateDir() { err = suite.attrCache.CreateDir(options) suite.assert.Nil(err) - suite.assert.Contains(suite.attrCache.cacheMap, truncatedPath) + + _, err = suite.attrCache.cacheMap.get(truncatedPath) + suite.assert.Nil(err) // Entry Already Exists suite.mock.EXPECT().CreateDir(options).Return(nil) err = suite.attrCache.CreateDir(options) suite.assert.Equal(os.ErrExist, err) - suite.assert.Contains(suite.attrCache.cacheMap, truncatedPath) + + _, err = suite.attrCache.cacheMap.get(truncatedPath) + suite.assert.Nil(err) }) } } @@ -353,7 +367,7 @@ func (suite *attrCacheTestSuite) TestCreateDirNoCacheDirs() { err := suite.attrCache.CreateDir(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Success // Entry Does Not Already Exist @@ -361,7 +375,7 @@ func (suite *attrCacheTestSuite) TestCreateDirNoCacheDirs() { err = suite.attrCache.CreateDir(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Entry Already Exists addPathToCache(suite.assert, suite.attrCache, extendedPath, false) @@ -392,14 +406,14 @@ func (suite *attrCacheTestSuite) TestDeleteDir() { err := suite.attrCache.DeleteDir(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Entry Does Not Exist suite.mock.EXPECT().DeleteDir(options).Return(nil) err = suite.attrCache.DeleteDir(options) suite.assert.True(os.IsNotExist(err)) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Entry Exists a, ab, ac := addDirectoryToCache(suite.assert, suite.attrCache, path, false) @@ -444,7 +458,7 @@ func (suite *attrCacheTestSuite) TestDeleteDirNoCacheDirs() { err := suite.attrCache.DeleteDir(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Success // Entry Does Not Already Exist @@ -452,7 +466,7 @@ func (suite *attrCacheTestSuite) TestDeleteDirNoCacheDirs() { err = suite.attrCache.DeleteDir(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Entry Already Exists a, ab, ac := addDirectoryToCache(suite.assert, suite.attrCache, path, false) @@ -495,22 +509,22 @@ func (suite *attrCacheTestSuite) TestReadDirDoesNotExist() { // Entries Do Not Already Exist suite.mock.EXPECT().ReadDir(options).Return(aAttr, nil) - suite.assert.Empty(suite.attrCache.cacheMap) // cacheMap should be empty before call + suite.assert.Empty(suite.attrCache.cacheMap.children) // cacheMap should be empty before call returnedAttr, err := suite.attrCache.ReadDir(options) suite.assert.Nil(err) suite.assert.Equal(aAttr, returnedAttr) - suite.assert.Equal(len(suite.attrCache.cacheMap), len(aAttr)) // Entries should now be in the cache for _, p := range aAttr { - suite.assert.Contains(suite.attrCache.cacheMap, p.Path) - suite.assert.NotEqualValues(suite.attrCache.cacheMap[p.Path].attr, &internal.ObjAttr{}) + checkItem, err := suite.attrCache.cacheMap.get(p.Path) + suite.assert.Nil(err) + suite.assert.NotEqualValues(checkItem.attr, &internal.ObjAttr{}) if !p.IsDir() { - suite.assert.EqualValues(size, suite.attrCache.cacheMap[p.Path].attr.Size) // new size should be set - suite.assert.EqualValues(mode, suite.attrCache.cacheMap[p.Path].attr.Mode) // new mode should be set + suite.assert.EqualValues(size, checkItem.attr.Size) // new size should be set + suite.assert.EqualValues(mode, checkItem.attr.Mode) // new mode should be set } - suite.assert.True(suite.attrCache.cacheMap[p.Path].valid()) - suite.assert.True(suite.attrCache.cacheMap[p.Path].exists()) + suite.assert.True(checkItem.valid()) + suite.assert.True(checkItem.exists()) } }) } @@ -548,14 +562,15 @@ func (suite *attrCacheTestSuite) TestReadDirExists() { for p := a.Front(); p != nil; p = p.Next() { pString := p.Value.(string) cachePath := internal.TruncateDirName(pString) - suite.assert.Contains(suite.attrCache.cacheMap, cachePath) - suite.assert.NotEqualValues(suite.attrCache.cacheMap[cachePath].attr, &internal.ObjAttr{}) - if !suite.attrCache.cacheMap[cachePath].attr.IsDir() { - suite.assert.EqualValues(size, suite.attrCache.cacheMap[cachePath].attr.Size) // new size should be set - suite.assert.EqualValues(mode, suite.attrCache.cacheMap[cachePath].attr.Mode) // new mode should be set + checkItem, err := suite.attrCache.cacheMap.get(cachePath) + suite.assert.Nil(err) + suite.assert.NotEqualValues(checkItem.attr, &internal.ObjAttr{}) + if !checkItem.attr.IsDir() { + suite.assert.EqualValues(size, checkItem.attr.Size) // new size should be set + suite.assert.EqualValues(mode, checkItem.attr.Mode) // new mode should be set } - suite.assert.True(suite.attrCache.cacheMap[cachePath].valid()) - suite.assert.True(suite.attrCache.cacheMap[cachePath].exists()) + suite.assert.True(checkItem.valid()) + suite.assert.True(checkItem.exists()) } // ab and ac paths should be untouched @@ -615,13 +630,12 @@ func (suite *attrCacheTestSuite) TestReadDirNoCacheOnList() { options := internal.ReadDirOptions{Name: path} suite.mock.EXPECT().ReadDir(options).Return(aAttr, nil) - suite.assert.Empty(suite.attrCache.cacheMap) // cacheMap should be empty before call + suite.assert.Empty(suite.attrCache.cacheMap.children) // cacheMap should be empty before call returnedAttr, err := suite.attrCache.ReadDir(options) suite.assert.Nil(err) suite.assert.Equal(aAttr, returnedAttr) // cacheMap should only have the listed after the call - suite.assert.EqualValues(1, len(suite.attrCache.cacheMap)) assertExists(suite, path) } @@ -642,12 +656,12 @@ func (suite *attrCacheTestSuite) TestReadDirNoCacheOnListNoCacheDirs() { options := internal.ReadDirOptions{Name: path} suite.mock.EXPECT().ReadDir(options).Return(aAttr, nil) - suite.assert.Empty(suite.attrCache.cacheMap) // cacheMap should be empty before call + suite.assert.Empty(suite.attrCache.cacheMap.children) // cacheMap should be empty before call returnedAttr, err := suite.attrCache.ReadDir(options) suite.assert.Nil(err) suite.assert.Equal(aAttr, returnedAttr) - suite.assert.Empty(suite.attrCache.cacheMap) // cacheMap should be empty after call + suite.assert.Empty(suite.attrCache.cacheMap.children) // cacheMap should be empty after call } func (suite *attrCacheTestSuite) TestReadDirError() { @@ -667,7 +681,7 @@ func (suite *attrCacheTestSuite) TestReadDirError() { _, err := suite.attrCache.ReadDir(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) }) } } @@ -741,17 +755,17 @@ func (suite *attrCacheTestSuite) TestRenameDir() { err := suite.attrCache.RenameDir(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedSrc) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedDst) + assertNotInCache(suite.assert, suite.attrCache, truncatedSrc) + assertNotInCache(suite.assert, suite.attrCache, truncatedDst) // Error // Source Entry Does Not Exist suite.mock.EXPECT().RenameDir(options).Return(nil) err = suite.attrCache.RenameDir(options) - suite.assert.Equal(err, syscall.ENOENT) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedSrc) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedDst) + suite.assert.NotNil(err) + assertNotInCache(suite.assert, suite.attrCache, truncatedSrc) + assertNotInCache(suite.assert, suite.attrCache, truncatedDst) // Error // Destination Entry (ab) Already Exists @@ -826,8 +840,8 @@ func (suite *attrCacheTestSuite) TestRenameDirNoCacheDirs() { err := suite.attrCache.RenameDir(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedSrc) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedDst) + assertNotInCache(suite.assert, suite.attrCache, truncatedSrc) + assertNotInCache(suite.assert, suite.attrCache, truncatedDst) // Success // Entry Does Not Already Exist @@ -835,8 +849,8 @@ func (suite *attrCacheTestSuite) TestRenameDirNoCacheDirs() { err = suite.attrCache.RenameDir(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedSrc) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedDst) + assertNotInCache(suite.assert, suite.attrCache, truncatedSrc) + assertNotInCache(suite.assert, suite.attrCache, truncatedDst) // Entry Already Exists a, ab, ac := addDirectoryToCache(suite.assert, suite.attrCache, input.src, false) @@ -876,7 +890,7 @@ func (suite *attrCacheTestSuite) TestCreateFile() { _, err := suite.attrCache.CreateFile(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, path) + assertNotInCache(suite.assert, suite.attrCache, path) // Success // Entry Does Not Already Exist @@ -884,7 +898,7 @@ func (suite *attrCacheTestSuite) TestCreateFile() { _, err = suite.attrCache.CreateFile(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, path) + assertNotInCache(suite.assert, suite.attrCache, path) // Entry Already Exists addPathToCache(suite.assert, suite.attrCache, path, false) @@ -907,7 +921,7 @@ func (suite *attrCacheTestSuite) TestDeleteFile() { err := suite.attrCache.DeleteFile(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, path) + assertNotInCache(suite.assert, suite.attrCache, path) // Success // Entry Does Not Already Exist @@ -915,7 +929,7 @@ func (suite *attrCacheTestSuite) TestDeleteFile() { err = suite.attrCache.DeleteFile(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, path) + assertNotInCache(suite.assert, suite.attrCache, path) // Entry Already Exists addPathToCache(suite.assert, suite.attrCache, path, false) @@ -941,7 +955,7 @@ func (suite *attrCacheTestSuite) TestSyncFile() { err := suite.attrCache.SyncFile(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, path) + assertNotInCache(suite.assert, suite.attrCache, path) // Success // Entry Does Not Already Exist @@ -949,7 +963,7 @@ func (suite *attrCacheTestSuite) TestSyncFile() { err = suite.attrCache.SyncFile(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, path) + assertNotInCache(suite.assert, suite.attrCache, path) // Entry Already Exists addPathToCache(suite.assert, suite.attrCache, path, false) @@ -977,7 +991,7 @@ func (suite *attrCacheTestSuite) TestSyncDir() { err := suite.attrCache.SyncDir(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Success // Entry Does Not Already Exist @@ -985,7 +999,7 @@ func (suite *attrCacheTestSuite) TestSyncDir() { err = suite.attrCache.SyncDir(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Entry Already Exists a, ab, ac := addDirectoryToCache(suite.assert, suite.attrCache, path, false) @@ -1035,7 +1049,7 @@ func (suite *attrCacheTestSuite) TestSyncDirNoCacheDirs() { err := suite.attrCache.SyncDir(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Success // Entry Does Not Already Exist @@ -1043,7 +1057,7 @@ func (suite *attrCacheTestSuite) TestSyncDirNoCacheDirs() { err = suite.attrCache.SyncDir(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Entry Already Exists a, ab, ac := addDirectoryToCache(suite.assert, suite.attrCache, path, false) @@ -1079,8 +1093,8 @@ func (suite *attrCacheTestSuite) TestRenameFile() { err := suite.attrCache.RenameFile(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, src) - suite.assert.NotContains(suite.attrCache.cacheMap, dst) + assertNotInCache(suite.assert, suite.attrCache, src) + assertNotInCache(suite.assert, suite.attrCache, dst) // Success // Entry Does Not Already Exist @@ -1088,8 +1102,8 @@ func (suite *attrCacheTestSuite) TestRenameFile() { err = suite.attrCache.RenameFile(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, src) - suite.assert.NotContains(suite.attrCache.cacheMap, dst) + assertNotInCache(suite.assert, suite.attrCache, src) + assertNotInCache(suite.assert, suite.attrCache, dst) // Entry Already Exists addPathToCache(suite.assert, suite.attrCache, src, false) @@ -1099,7 +1113,6 @@ func (suite *attrCacheTestSuite) TestRenameFile() { err = suite.attrCache.RenameFile(options) suite.assert.Nil(err) assertDeleted(suite, src) - assertInvalid(suite, dst) } // Tests Write File @@ -1113,12 +1126,14 @@ func (suite *attrCacheTestSuite) TestWriteFileError() { options := internal.WriteFileOptions{Handle: &handle, Metadata: nil} // Error - suite.mock.EXPECT().GetAttr(internal.GetAttrOptions{Name: path, RetrieveMetadata: true}).Return(nil, nil) + suite.mock.EXPECT().GetAttr(internal.GetAttrOptions{Name: path, RetrieveMetadata: true}).Return(&internal.ObjAttr{Path: path}, nil) suite.mock.EXPECT().WriteFile(options).Return(0, errors.New("Failed to write a file")) _, err := suite.attrCache.WriteFile(options) suite.assert.NotNil(err) - suite.assert.Contains(suite.attrCache.cacheMap, path) // GetAttr call will add this to the cache + _, err = suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + // GetAttr call will add this to the cache } func (suite *attrCacheTestSuite) TestWriteFileDoesNotExist() { @@ -1131,12 +1146,15 @@ func (suite *attrCacheTestSuite) TestWriteFileDoesNotExist() { options := internal.WriteFileOptions{Handle: &handle, Metadata: nil} // Success // Entry Does Not Already Exist - suite.mock.EXPECT().GetAttr(internal.GetAttrOptions{Name: path, RetrieveMetadata: true}).Return(nil, nil) + suite.mock.EXPECT().GetAttr(internal.GetAttrOptions{Name: path, RetrieveMetadata: true}).Return(&internal.ObjAttr{Path: path}, nil) suite.mock.EXPECT().WriteFile(options).Return(0, nil) _, err := suite.attrCache.WriteFile(options) suite.assert.Nil(err) - suite.assert.Contains(suite.attrCache.cacheMap, path) // GetAttr call will add this to the cache + suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + // GetAttr call will add this to the cache + } func (suite *attrCacheTestSuite) TestWriteFileExists() { @@ -1169,7 +1187,7 @@ func (suite *attrCacheTestSuite) TestTruncateFile() { err := suite.attrCache.TruncateFile(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, path) + assertNotInCache(suite.assert, suite.attrCache, path) // Success // Entry Does Not Already Exist @@ -1177,7 +1195,8 @@ func (suite *attrCacheTestSuite) TestTruncateFile() { err = suite.attrCache.TruncateFile(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, path) + _, err = suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) // Entry Already Exists addPathToCache(suite.assert, suite.attrCache, path, false) @@ -1185,12 +1204,14 @@ func (suite *attrCacheTestSuite) TestTruncateFile() { err = suite.attrCache.TruncateFile(options) suite.assert.Nil(err) - suite.assert.Contains(suite.attrCache.cacheMap, path) - suite.assert.NotEqualValues(suite.attrCache.cacheMap[path].attr, &internal.ObjAttr{}) - suite.assert.EqualValues(size, suite.attrCache.cacheMap[path].attr.Size) // new size should be set - suite.assert.EqualValues(defaultMode, suite.attrCache.cacheMap[path].attr.Mode) - suite.assert.True(suite.attrCache.cacheMap[path].valid()) - suite.assert.True(suite.attrCache.cacheMap[path].exists()) + + checkItem, err := suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + suite.assert.NotEqualValues(checkItem.attr, &internal.ObjAttr{}) + suite.assert.EqualValues(size, checkItem.attr.Size) // new size should be set + suite.assert.EqualValues(defaultMode, checkItem.attr.Mode) + suite.assert.True(checkItem.valid()) + suite.assert.True(checkItem.exists()) } // Tests CopyFromFile @@ -1199,13 +1220,15 @@ func (suite *attrCacheTestSuite) TestCopyFromFileError() { path := "a" options := internal.CopyFromFileOptions{Name: path, File: nil, Metadata: nil} - suite.mock.EXPECT().GetAttr(internal.GetAttrOptions{Name: path, RetrieveMetadata: true}).Return(nil, nil) + suite.mock.EXPECT().GetAttr(internal.GetAttrOptions{Name: path, RetrieveMetadata: true}).Return(&internal.ObjAttr{Path: path}, nil) // Error suite.mock.EXPECT().CopyFromFile(options).Return(errors.New("Failed to copy from file")) err := suite.attrCache.CopyFromFile(options) suite.assert.NotNil(err) - suite.assert.Contains(suite.attrCache.cacheMap, path) // GetAttr call will add this to the cache + _, err = suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + // GetAttr call will add this to the cache } func (suite *attrCacheTestSuite) TestCopyFromFileDoesNotExist() { @@ -1215,26 +1238,31 @@ func (suite *attrCacheTestSuite) TestCopyFromFileDoesNotExist() { options := internal.CopyFromFileOptions{Name: path, File: nil, Metadata: nil} // Success // Entry Does Not Already Exist - suite.mock.EXPECT().GetAttr(internal.GetAttrOptions{Name: path, RetrieveMetadata: true}).Return(nil, nil) + suite.mock.EXPECT().GetAttr(internal.GetAttrOptions{Name: path, RetrieveMetadata: true}).Return(&internal.ObjAttr{Path: path}, nil) suite.mock.EXPECT().CopyFromFile(options).Return(nil) err := suite.attrCache.CopyFromFile(options) suite.assert.Nil(err) - suite.assert.Contains(suite.attrCache.cacheMap, path) // GetAttr call will add this to the cache + _, err = suite.attrCache.cacheMap.get(path) + suite.assert.Nil(err) + // GetAttr call will add this to the cache } func (suite *attrCacheTestSuite) TestCopyFromFileExists() { defer suite.cleanupTest() - path := "a" + path := "a" options := internal.CopyFromFileOptions{Name: path, File: nil, Metadata: nil} + // Entry Already Exists addPathToCache(suite.assert, suite.attrCache, path, true) suite.mock.EXPECT().CopyFromFile(options).Return(nil) + _, getErr := suite.attrCache.cacheMap.get(options.Name) + suite.assert.Nil(getErr) + err := suite.attrCache.CopyFromFile(options) suite.assert.Nil(err) - assertInvalid(suite, path) } // GetAttr @@ -1351,7 +1379,7 @@ func (suite *attrCacheTestSuite) TestGetAttrDoesNotExist() { // attributes should not be accessible so call the mock suite.mock.EXPECT().GetAttr(options).Return(getPathAttr(path, defaultSize, fs.FileMode(defaultMode), false), nil) - suite.assert.Empty(suite.attrCache.cacheMap) // cacheMap should be empty before call + suite.assert.Empty(suite.attrCache.cacheMap.children) // cacheMap should be empty before call _, err := suite.attrCache.GetAttr(options) suite.assert.Nil(err) assertUntouched(suite, truncatedPath) // item added to cache after @@ -1376,7 +1404,7 @@ func (suite *attrCacheTestSuite) TestGetAttrOtherError() { result, err := suite.attrCache.GetAttr(options) suite.assert.Equal(err, os.ErrNotExist) suite.assert.EqualValues(&internal.ObjAttr{}, result) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) }) } } @@ -1398,11 +1426,12 @@ func (suite *attrCacheTestSuite) TestGetAttrEnoentError() { result, err := suite.attrCache.GetAttr(options) suite.assert.Equal(err, syscall.ENOENT) suite.assert.EqualValues(&internal.ObjAttr{}, result) - suite.assert.Contains(suite.attrCache.cacheMap, truncatedPath) - suite.assert.EqualValues(&internal.ObjAttr{}, suite.attrCache.cacheMap[truncatedPath].attr) - suite.assert.True(suite.attrCache.cacheMap[truncatedPath].valid()) - suite.assert.False(suite.attrCache.cacheMap[truncatedPath].exists()) - suite.assert.NotNil(suite.attrCache.cacheMap[truncatedPath].cachedAt) + checkItem, err := suite.attrCache.cacheMap.get(truncatedPath) + suite.assert.Nil(err) + suite.assert.EqualValues(&internal.ObjAttr{Path: "a"}, checkItem.attr) + suite.assert.True(checkItem.valid()) + suite.assert.False(checkItem.exists()) + suite.assert.NotNil(checkItem.cachedAt) }) } } @@ -1421,7 +1450,7 @@ func (suite *attrCacheTestSuite) TestCacheTimeout() { // attributes should not be accessible so call the mock suite.mock.EXPECT().GetAttr(options).Return(getPathAttr(path, defaultSize, fs.FileMode(defaultMode), true), nil) - suite.assert.Empty(suite.attrCache.cacheMap) // cacheMap should be empty before call + suite.assert.Empty(suite.attrCache.cacheMap.children) // cacheMap should be empty before call _, err := suite.attrCache.GetAttr(options) suite.assert.Nil(err) assertUntouched(suite, path) // item added to cache after @@ -1452,8 +1481,7 @@ func (suite *attrCacheTestSuite) TestCreateLink() { err := suite.attrCache.CreateLink(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, link) - suite.assert.NotContains(suite.attrCache.cacheMap, path) + assertNotInCache(suite.assert, suite.attrCache, link) // Success // Entry Does Not Already Exist @@ -1461,8 +1489,7 @@ func (suite *attrCacheTestSuite) TestCreateLink() { err = suite.attrCache.CreateLink(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, link) - suite.assert.NotContains(suite.attrCache.cacheMap, path) + assertNotInCache(suite.assert, suite.attrCache, link) // Entry Already Exists addPathToCache(suite.assert, suite.attrCache, link, false) @@ -1494,7 +1521,7 @@ func (suite *attrCacheTestSuite) TestChmod() { err := suite.attrCache.Chmod(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Success // Entry Does Not Already Exist @@ -1502,7 +1529,7 @@ func (suite *attrCacheTestSuite) TestChmod() { err = suite.attrCache.Chmod(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Entry Already Exists addPathToCache(suite.assert, suite.attrCache, path, false) @@ -1510,12 +1537,15 @@ func (suite *attrCacheTestSuite) TestChmod() { err = suite.attrCache.Chmod(options) suite.assert.Nil(err) - suite.assert.Contains(suite.attrCache.cacheMap, truncatedPath) - suite.assert.NotEqualValues(suite.attrCache.cacheMap[truncatedPath].attr, &internal.ObjAttr{}) - suite.assert.EqualValues(defaultSize, suite.attrCache.cacheMap[truncatedPath].attr.Size) - suite.assert.EqualValues(mode, suite.attrCache.cacheMap[truncatedPath].attr.Mode) // new mode should be set - suite.assert.True(suite.attrCache.cacheMap[truncatedPath].valid()) - suite.assert.True(suite.attrCache.cacheMap[truncatedPath].exists()) + + checkItem, err := suite.attrCache.cacheMap.get(truncatedPath) + suite.assert.Nil(err) + + suite.assert.NotEqualValues(checkItem.attr, &internal.ObjAttr{}) + suite.assert.EqualValues(defaultSize, checkItem.attr.Size) + suite.assert.EqualValues(mode, checkItem.attr.Mode) // new mode should be set + suite.assert.True(checkItem.valid()) + suite.assert.True(checkItem.exists()) }) } } @@ -1541,7 +1571,7 @@ func (suite *attrCacheTestSuite) TestChown() { err := suite.attrCache.Chown(options) suite.assert.NotNil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Success // Entry Does Not Already Exist @@ -1549,7 +1579,7 @@ func (suite *attrCacheTestSuite) TestChown() { err = suite.attrCache.Chown(options) suite.assert.Nil(err) - suite.assert.NotContains(suite.attrCache.cacheMap, truncatedPath) + assertNotInCache(suite.assert, suite.attrCache, truncatedPath) // Entry Already Exists addPathToCache(suite.assert, suite.attrCache, path, false) diff --git a/component/attr_cache/cacheMap.go b/component/attr_cache/cacheMap.go index 824bd8df9..68ac6a34f 100644 --- a/component/attr_cache/cacheMap.go +++ b/component/attr_cache/cacheMap.go @@ -26,7 +26,9 @@ package attr_cache import ( + "fmt" "os" + "strings" "time" "github.com/Seagate/cloudfuse/common" @@ -47,6 +49,7 @@ type attrCacheItem struct { attr *internal.ObjAttr cachedAt time.Time attrFlag common.BitMap16 + children map[string]*attrCacheItem } func newAttrCacheItem(attr *internal.ObjAttr, exists bool, cachedAt time.Time) *attrCacheItem { @@ -55,23 +58,76 @@ func newAttrCacheItem(attr *internal.ObjAttr, exists bool, cachedAt time.Time) * attrFlag: 0, cachedAt: cachedAt, } - item.attrFlag.Set(AttrFlagValid) if exists { item.attrFlag.Set(AttrFlagExists) } - return item } +func (value *attrCacheItem) insert(attr *internal.ObjAttr, exists bool, cachedAt time.Time) *attrCacheItem { + if attr == nil { + return nil + } + path := internal.TruncateDirName(attr.Path) + //start recursion + cachedItem := value.insertHelper(attr, exists, cachedAt, path, "") + return cachedItem +} + +// TODO: write unit tests for this +func (value *attrCacheItem) insertHelper(attr *internal.ObjAttr, exists bool, cachedAt time.Time, path string, itemPath string) *attrCacheItem { + var cachedItem *attrCacheItem + paths := strings.SplitN(path, "/", 2) // paths[0] is home paths[1] is user/folder/file + if value.children == nil { + value.children = make(map[string]*attrCacheItem) + } + if len(paths) < 2 { + // this is a leaf + cachedItem = newAttrCacheItem(attr, exists, cachedAt) + value.children[paths[0]] = cachedItem + } else { + itemPath += paths[0] + "/" + //see if the directory exists. if not, create it. + _, ok := value.children[paths[0]] + if !ok { + value.children[paths[0]] = newAttrCacheItem(internal.CreateObjAttrDir(itemPath), exists, cachedAt) + } + cachedItem = value.children[paths[0]].insertHelper(attr, exists, cachedAt, paths[1], itemPath) + } + return cachedItem +} + +// get returns the *attrCacheItem from the cacheMap based on the provided path string +func (value *attrCacheItem) get(path string) (*attrCacheItem, error) { + path = internal.TruncateDirName(path) + paths := strings.Split(path, "/") + currentItem := value + for _, pathElement := range paths { + //check if we are at the last element in the paths list + if path == "" { + break + } + var ok bool + currentItem, ok = currentItem.children[pathElement] + //check to see if directory (pathElement) exists + if !ok { + return nil, fmt.Errorf("Cache entry for path %s not found", path) + } + //TODO: side note: cacheLocks. channel, sync, semaphore. + } + return currentItem, nil +} + func (value *attrCacheItem) valid() bool { return value.attrFlag.IsSet(AttrFlagValid) } func (value *attrCacheItem) exists() bool { - return value.attrFlag.IsSet(AttrFlagExists) + return value.valid() && value.attrFlag.IsSet(AttrFlagExists) } +// TODO: don't return true for deleted files. func (value *attrCacheItem) isInCloud() bool { isObject := !value.attr.IsDir() isDirInCloud := value.attr.IsDir() && !value.attrFlag.IsSet(AttrFlagNotInCloud) @@ -79,15 +135,25 @@ func (value *attrCacheItem) isInCloud() bool { } func (value *attrCacheItem) markDeleted(deletedTime time.Time) { - value.attrFlag.Clear(AttrFlagExists) - value.attrFlag.Set(AttrFlagValid) - value.cachedAt = deletedTime - value.attr = &internal.ObjAttr{} + if !value.isDeleted() { + value.attrFlag.Clear(AttrFlagExists) + value.attrFlag.Set(AttrFlagValid) + value.cachedAt = deletedTime + value.attr = &internal.ObjAttr{} + for _, val := range value.children { + val.markDeleted(deletedTime) + } + } } func (value *attrCacheItem) invalidate() { - value.attrFlag.Clear(AttrFlagValid) - value.attr = &internal.ObjAttr{} + if value.valid() { + value.attrFlag.Clear(AttrFlagValid) + value.attr = &internal.ObjAttr{} + for _, val := range value.children { + val.invalidate() + } + } } func (value *attrCacheItem) markInCloud(inCloud bool) { diff --git a/component/attr_cache/cacheMap_test.go b/component/attr_cache/cacheMap_test.go new file mode 100644 index 000000000..ff471ba4f --- /dev/null +++ b/component/attr_cache/cacheMap_test.go @@ -0,0 +1,335 @@ +/* + Licensed under the MIT License . + + Copyright © 2023 Seagate Technology LLC and/or its Affiliates + Copyright © 2020-2023 Microsoft Corporation. All rights reserved. + Author : + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE +*/ + +package attr_cache + +import ( + "container/list" + "testing" + "time" + + "github.com/Seagate/cloudfuse/internal" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" +) + +type cacheMapTestSuite struct { + suite.Suite + assert *assert.Assertions + rootAttrCacheItem attrCacheItem +} + +// what is every test going to need to test with? +func (suite *cacheMapTestSuite) SetupTest() { + suite.assert = assert.New(suite.T()) + + rootAttr := internal.CreateObjAttrDir("") + suite.rootAttrCacheItem = *newAttrCacheItem(rootAttr, true, time.Now()) + + //set up nested Dir tree + nestedDir, nestedFiles := generateFSTree("a") + + for dir := nestedDir.Front(); dir != nil; dir = dir.Next() { + attr := internal.CreateObjAttrDir(dir.Value.(string)) + suite.rootAttrCacheItem.insert(attr, true, time.Now()) + } + + for file := nestedFiles.Front(); file != nil; file = file.Next() { + attr := internal.CreateObjAttr(file.Value.(string), 1024, time.Now()) + suite.rootAttrCacheItem.insert(attr, true, time.Now()) + } + +} + +func (suite *cacheMapTestSuite) TestInsertFile() { + //create path string in form of test/dir/file + path := "a/c1/TestFile.txt" + startTime := time.Now() + attr := internal.CreateObjAttr(path, 1024, startTime) + + //insert path into suite.rootAttrCacheItem + suite.rootAttrCacheItem.insert(attr, true, startTime) + + //verify correct values are in cacheMapTree + cachedItem, err := suite.rootAttrCacheItem.get(path) + suite.assert.Nil(err) + suite.assert.NotNil(cachedItem) + suite.assert.EqualValues(path, cachedItem.attr.Path) + suite.assert.EqualValues(1024, cachedItem.attr.Size) + suite.assert.EqualValues(false, cachedItem.attr.IsDir()) +} + +func (suite *cacheMapTestSuite) TestInsertFolder() { + //create path string in form of test/dir/file + path := "a/c1/TestFolder" + startTime := time.Now() + attr := internal.CreateObjAttrDir(path) + + //insert path into suite.rootAttrCacheItem + + suite.rootAttrCacheItem.insert(attr, true, startTime) + + //verify correct values are in cacheMapTree + cachedItem, err := suite.rootAttrCacheItem.get(path) + suite.assert.Nil(err) + suite.assert.NotNil(cachedItem) + suite.assert.EqualValues(path, cachedItem.attr.Path) + suite.assert.EqualValues(4096, cachedItem.attr.Size) + suite.assert.EqualValues(true, cachedItem.attr.IsDir()) +} + +func (suite *cacheMapTestSuite) TestInsertDirsAndFiles() { + path := "a/c1/c12/c3p0/newfile.txt" + timestamp := time.Now() + attr := internal.CreateObjAttr(path, 1024, timestamp) + + cachedItem := suite.rootAttrCacheItem.insert(attr, true, timestamp) + + suite.assert.NotNil(cachedItem) + suite.assert.Equal(path, cachedItem.attr.Path) + suite.assert.Equal(int64(1024), cachedItem.attr.Size) + suite.assert.False(cachedItem.attr.IsDir()) +} + +func (suite *cacheMapTestSuite) TestMarkDeleted() { + deleteTime := time.Now() + + //insert an item + path := "a/c1/TempFile.txt" + startTime := time.Now() + attr := internal.CreateObjAttr(path, 1024, startTime) + + //insert path into suite.rootAttrCacheItem + cachedItem := suite.rootAttrCacheItem.insert(attr, true, startTime) + + //validate it is there + suite.assert.NotNil(cachedItem) + suite.assert.EqualValues(path, cachedItem.attr.Path) + suite.assert.EqualValues(1024, cachedItem.attr.Size) + suite.assert.EqualValues(startTime, cachedItem.attr.Mtime) + suite.assert.EqualValues(false, cachedItem.attr.IsDir()) + suite.assert.EqualValues("TempFile.txt", cachedItem.attr.Name) + suite.assert.EqualValues(true, cachedItem.attrFlag.IsSet(AttrFlagValid)) + + // mark it deleted + cachedItem.markDeleted(deleteTime) + + //verify it is marked deleted + suite.confirmMarkedDeleted(cachedItem) +} + +func (suite *cacheMapTestSuite) TestInvalidate() { + //insert an item + path := "a/c1/TempFile.txt" + startTime := time.Now() + attr := internal.CreateObjAttr(path, 1024, startTime) + + //insert path into suite.rootAttrCacheItem + cachedItem := suite.rootAttrCacheItem.insert(attr, true, startTime) + + //validate it is there + suite.assert.NotNil(cachedItem) + suite.assert.EqualValues(path, cachedItem.attr.Path) + suite.assert.EqualValues(1024, cachedItem.attr.Size) + suite.assert.EqualValues("TempFile.txt", cachedItem.attr.Name) + suite.assert.EqualValues(startTime, cachedItem.attr.Mtime) + suite.assert.EqualValues(false, cachedItem.attr.IsDir()) + suite.assert.EqualValues(true, cachedItem.attrFlag.IsSet(AttrFlagValid)) + suite.assert.EqualValues(true, cachedItem.attrFlag.IsSet(AttrFlagExists)) + + //invalidate + cachedItem.invalidate() + + //verify it is invalid + suite.confirmInvalidated(cachedItem) +} + +func (suite *cacheMapTestSuite) TestDeleteFolder() { + deleteTime := time.Now() + + //insert an item + path := "a/c1/f/TempFile.txt" + parentPath := "a/c1" + startTime := time.Now() + attr := internal.CreateObjAttr(path, 1024, startTime) + + //insert path into suite.rootAttrCacheItem + cachedItem := suite.rootAttrCacheItem.insert(attr, true, startTime) + + //validate file is there + suite.assert.NotNil(cachedItem) + suite.assert.EqualValues(path, cachedItem.attr.Path) + suite.assert.EqualValues(1024, cachedItem.attr.Size) + suite.assert.EqualValues(startTime, cachedItem.attr.Mtime) + suite.assert.EqualValues(false, cachedItem.attr.IsDir()) + suite.assert.EqualValues("TempFile.txt", cachedItem.attr.Name) + suite.assert.EqualValues(path, cachedItem.attr.Path) + suite.assert.EqualValues(true, cachedItem.attrFlag.IsSet(AttrFlagValid)) + + //validate folder "c1" + cachedItem, err := suite.rootAttrCacheItem.get(parentPath) + suite.assert.Nil(err) + suite.assert.NotNil(cachedItem) + suite.assert.EqualValues(parentPath, cachedItem.attr.Path) + suite.assert.EqualValues(4096, cachedItem.attr.Size) + suite.assert.EqualValues(true, cachedItem.attr.IsDir()) + suite.assert.EqualValues("c1", cachedItem.attr.Name) + suite.assert.EqualValues(parentPath, cachedItem.attr.Path) + suite.assert.EqualValues(true, cachedItem.attrFlag.IsSet(AttrFlagValid)) + + //mark "c1" folder deleted + cachedItem.markDeleted(deleteTime) + + //verify "c1" folder is marked deleted + suite.confirmMarkedDeleted(cachedItem) +} + +func (suite *cacheMapTestSuite) TestInvalidateFolder() { + //insert an item + path := "a/c1/f/TempFile.txt" + parentPath := "a/c1" + startTime := time.Now() + attr := internal.CreateObjAttr(path, 1024, startTime) + + //insert path into suite.rootAttrCacheItem + suite.rootAttrCacheItem.insert(attr, true, startTime) + + //validate file is there + cachedItem, err := suite.rootAttrCacheItem.get(path) + suite.assert.Nil(err) + suite.assert.NotNil(cachedItem) + suite.assert.EqualValues(path, cachedItem.attr.Path) + suite.assert.EqualValues(1024, cachedItem.attr.Size) + suite.assert.EqualValues(startTime, cachedItem.attr.Mtime) + suite.assert.EqualValues(false, cachedItem.attr.IsDir()) + suite.assert.EqualValues("TempFile.txt", cachedItem.attr.Name) + suite.assert.EqualValues(path, cachedItem.attr.Path) + suite.assert.EqualValues(true, cachedItem.attrFlag.IsSet(AttrFlagValid)) + + //validate folder "g" + cachedItem, err = suite.rootAttrCacheItem.get(parentPath) + suite.assert.Nil(err) + suite.assert.NotNil(cachedItem) + suite.assert.EqualValues(parentPath, cachedItem.attr.Path) + suite.assert.EqualValues(4096, cachedItem.attr.Size) + suite.assert.EqualValues(true, cachedItem.attr.IsDir()) + suite.assert.EqualValues("c1", cachedItem.attr.Name) + suite.assert.EqualValues(parentPath, cachedItem.attr.Path) + suite.assert.EqualValues(true, cachedItem.attrFlag.IsSet(AttrFlagValid)) + + //mark "c1" folder as invalid + cachedItem.invalidate() + + //verify "c1" folder is invalid + cachedItem, err = suite.rootAttrCacheItem.get(parentPath) + suite.assert.Nil(err) + suite.assert.NotNil(cachedItem) + suite.assert.EqualValues(false, cachedItem.attrFlag.IsSet(AttrFlagValid)) + suite.assert.EqualValues(cachedItem.attr, &internal.ObjAttr{}) + + // verify subtree is invalid + suite.assert.NotNil(cachedItem.children) + suite.confirmInvalidated(cachedItem) +} + +func (suite *cacheMapTestSuite) TestGetRoot() { + path := "" + item, err := suite.rootAttrCacheItem.get(path) + suite.assert.Nil(err) + suite.assert.NotNil(item) + attrStr := item.attr.Path + suite.assert.EqualValues(path, attrStr) +} + +func (suite *cacheMapTestSuite) TestGet() { + path := "a/c1/gc1" + item, err := suite.rootAttrCacheItem.get(path) + suite.assert.Nil(err) + suite.assert.NotNil(item) + attrStr := item.attr.Path + suite.assert.EqualValues(path, attrStr) +} + +func TestCacheMapTestSuite(t *testing.T) { + suite.Run(t, new(cacheMapTestSuite)) +} + +// Directory structure +// a/ +// +// a/c1/ +// a/c1/gc1 +// a/c2 +// +// ab/ +// +// ab/c1 +// +// ac +func generateFSTree(path string) (*list.List, *list.List) { + path = internal.TruncateDirName(path) + + dirPaths := list.New() + dirPaths.PushBack(path + "/") + dirPaths.PushBack(path + "/c1" + "/") + dirPaths.PushBack(path + "b" + "/") + + filePaths := list.New() + filePaths.PushBack(path + "/c2") + filePaths.PushBack(path + "/c1" + "/gc1") + filePaths.PushBack(path + "b" + "/c1") + filePaths.PushBack(path + "c") + + return dirPaths, filePaths +} + +func (suite *cacheMapTestSuite) confirmMarkedDeleted(item *attrCacheItem) { + + suite.assert.NotNil(item) + suite.assert.EqualValues(true, item.isDeleted()) + suite.assert.EqualValues(false, item.exists()) + suite.assert.EqualValues(item.attr, &internal.ObjAttr{}) + + if item.children != nil { + for _, val := range item.children { + suite.confirmMarkedDeleted(val) + } + } +} + +func (suite *cacheMapTestSuite) confirmInvalidated(item *attrCacheItem) { + + suite.assert.NotNil(item) + suite.assert.EqualValues(false, item.attrFlag.IsSet(AttrFlagValid)) + suite.assert.EqualValues(item.attr, &internal.ObjAttr{}) + + if item.children != nil { + for _, val := range item.children { + suite.confirmInvalidated(val) + } + } +}