Skip to content

Commit

Permalink
Unexport some externally irrelevant symbols from uploadstore (sourceg…
Browse files Browse the repository at this point in the history
…raph#63647)

These symbols aren't used outside of the package, so unexporting them
for a cleaner API surface.

Test plan:

Go compiler doesn't complain.
  • Loading branch information
eseliger authored Jul 10, 2024
1 parent 9435fde commit 2e392e0
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 11 deletions.
4 changes: 2 additions & 2 deletions internal/uploadstore/gcs_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ func (s *gcsStore) Get(ctx context.Context, key string) (_ io.ReadCloser, err er
return nil, errors.Wrap(err, "failed to get object")
}

return NewExtraCloser(rc, done), nil
return newExtraCloser(rc, done), nil
}

func (s *gcsStore) Upload(ctx context.Context, key string, r io.Reader) (_ int64, err error) {
Expand Down Expand Up @@ -229,7 +229,7 @@ func (s *gcsStore) create(ctx context.Context, bucket gcsBucketHandle) error {
}

func (s *gcsStore) deleteSources(ctx context.Context, bucket gcsBucketHandle, sources []string) error {
return ForEachString(sources, func(index int, source string) error {
return forEachString(sources, func(index int, source string) error {
if err := bucket.Object(source).Delete(ctx); err != nil {
return errors.Wrap(err, "failed to delete source object")
}
Expand Down
4 changes: 2 additions & 2 deletions internal/uploadstore/pool.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,12 @@ import (
"github.com/sourcegraph/conc/pool"
)

// ForEachString invokes the given callback once for each of the
// forEachString invokes the given callback once for each of the
// given string values. The callback function will receive the index as well
// as the string value as parameters. Callbacks will be invoked in a number
// of concurrent routines proportional to the maximum number of CPUs that
// can be executing simultaneously.
func ForEachString(values []string, f func(index int, value string) error) error {
func forEachString(values []string, f func(index int, value string) error) error {
p := pool.New().
WithErrors().
WithMaxGoroutines(runtime.GOMAXPROCS(0))
Expand Down
4 changes: 2 additions & 2 deletions internal/uploadstore/reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ func (c *closeWrapper) Close() error {
return nil
}

// NewExtraCloser returns wraps a ReadCloser with an extra close function
// newExtraCloser returns wraps a ReadCloser with an extra close function
// that will be called after the underlying ReadCloser has been closed.
func NewExtraCloser(rc io.ReadCloser, close func()) io.ReadCloser {
func newExtraCloser(rc io.ReadCloser, close func()) io.ReadCloser {
return &closeWrapper{ReadCloser: rc, close: close}
}
10 changes: 5 additions & 5 deletions internal/uploadstore/s3_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ func (s *s3Store) List(ctx context.Context, prefix string) (_ *iterator.Iterator
}

func (s *s3Store) Get(ctx context.Context, key string) (_ io.ReadCloser, err error) {
ctx, _, endObservation := s.operations.Get.With(ctx, &err, observation.Args{Attrs: []attribute.KeyValue{
ctx, traceLogger, endObservation := s.operations.Get.With(ctx, &err, observation.Args{Attrs: []attribute.KeyValue{
attribute.String("key", key),
}})
done := func() { endObservation(1, observation.Args{}) }
Expand All @@ -144,7 +144,7 @@ func (s *s3Store) Get(ctx context.Context, key string) (_ io.ReadCloser, err err
}

byteOffset += n
s.operations.Get.Logger.Warn("Transient error while reading payload", log.String("key", key), log.Error(err))
traceLogger.Warn("Transient error while reading payload", log.String("key", key), log.Error(err))

if n == 0 {
zeroReads++
Expand All @@ -158,7 +158,7 @@ func (s *s3Store) Get(ctx context.Context, key string) (_ io.ReadCloser, err err
}
})

return NewExtraCloser(io.NopCloser(reader), done), nil
return newExtraCloser(io.NopCloser(reader), done), nil
}

// ioCopyHook is a pointer to io.Copy. This function is replaced in unit tests so that we can
Expand Down Expand Up @@ -243,7 +243,7 @@ func (s *s3Store) Compose(ctx context.Context, destination string, sources ...st
var m sync.Mutex
etags := map[int]*string{}

if err := ForEachString(sources, func(index int, source string) error {
if err := forEachString(sources, func(index int, source string) error {
partNumber := index + 1

copyResult, err := s.client.UploadPartCopy(ctx, &s3.UploadPartCopyInput{
Expand Down Expand Up @@ -374,7 +374,7 @@ func (s *s3Store) create(ctx context.Context) error {
}

func (s *s3Store) deleteSources(ctx context.Context, bucket string, sources []string) error {
return ForEachString(sources, func(index int, source string) error {
return forEachString(sources, func(index int, source string) error {
if _, err := s.client.DeleteObject(ctx, &s3.DeleteObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(source),
Expand Down

0 comments on commit 2e392e0

Please sign in to comment.