Skip to content

Commit

Permalink
(maint) add a little bit of verbosity to deploy_signed_repos_to_s3
Browse files Browse the repository at this point in the history
- Make deploy_signed_repos_to_s3 print a message that it is doing the s3sync
- Refactor messy code around it
- Make rspec work
  • Loading branch information
e-gris committed Oct 23, 2024
1 parent e03e517 commit 396115e
Show file tree
Hide file tree
Showing 4 changed files with 39 additions and 19 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
This changelog adheres to [Keep a CHANGELOG](http://keepachangelog.com/).

## [Unreleased]
### Changed
- (maint) Make deploy_signed_repos_to_s3 just a bit more verbose

## [0.122.2] - 2024-10-07
### Changed
Expand Down
17 changes: 11 additions & 6 deletions lib/packaging/util/net.rb
Original file line number Diff line number Diff line change
Expand Up @@ -216,15 +216,20 @@ def rsync_from(source, origin_host, dest, opts = {})
)
end

def s3sync_to(source, target_bucket, target_directory = "", flags = [])
def s3sync_to(source, target_bucket, target_directory = '', flags = [])
s3cmd = Pkg::Util::Tool.check_tool('s3cmd')
s3cfg_path = File.join(ENV['HOME'], '.s3cfg')

if Pkg::Util::File.file_exists?(File.join(ENV['HOME'], '.s3cfg'))
stdout, = Pkg::Util::Execution.capture3("#{s3cmd} sync #{flags.join(' ')} '#{source}' s3://#{target_bucket}/#{target_directory}/")
stdout
else
fail "#{File.join(ENV['HOME'], '.s3cfg')} does not exist. It is required to ship files using s3cmd."
unless File.exist?(s3cfg_path)
fail "#{s3cfg_path} does not exist. It is required to ship files using s3cmd."
end

sync_command = "#{s3cmd} sync #{flags.join(' ')} '#{source}' " \
"s3://#{target_bucket}/#{target_directory}/"

stdout, = Pkg::Util::Execution.capture3(sync_command)

stdout
end

# This is fairly absurd. We're implementing curl by shelling out. What do I
Expand Down
12 changes: 6 additions & 6 deletions spec/lib/packaging/util/net_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -182,8 +182,8 @@

it 'should fail if ~/.s3cfg is not present' do
expect(Pkg::Util::Tool).to receive(:check_tool).with("s3cmd").and_return(s3cmd)
expect(Pkg::Util::File)
.to receive(:file_exists?)
expect(File)
.to receive(:exist?)
.with(File.join(ENV['HOME'], '.s3cfg'))
.and_return(false)
expect { Pkg::Util::Net.s3sync_to('foo', 'bar', 'boo') }
Expand All @@ -192,8 +192,8 @@

it "should s3 sync 'thing' to 's3://foo@bar/home/foo/' with no flags" do
expect(Pkg::Util::Tool).to receive(:check_tool).with("s3cmd").and_return(s3cmd)
expect(Pkg::Util::File)
.to receive(:file_exists?)
expect(File)
.to receive(:exist?)
.with(File.join(ENV['HOME'], '.s3cfg'))
.and_return(true)
expect(Pkg::Util::Execution)
Expand All @@ -204,8 +204,8 @@

it "should s3sync 'thing' to 's3://foo@bar/home/foo/' with --delete-removed and --acl-public" do
expect(Pkg::Util::Tool).to receive(:check_tool).with("s3cmd").and_return(s3cmd)
expect(Pkg::Util::File)
.to receive(:file_exists?)
expect(File)
.to receive(:exist?)
.with(File.join(ENV['HOME'], '.s3cfg'))
.and_return(true)
expect(Pkg::Util::Execution)
Expand Down
27 changes: 20 additions & 7 deletions tasks/nightly_repos.rake
Original file line number Diff line number Diff line change
Expand Up @@ -181,15 +181,28 @@ namespace :pl do
end

task :deploy_signed_repos_to_s3, [:target_bucket] => "pl:fetch" do |t, args|
target_bucket = args.target_bucket or fail ":target_bucket is a required argument to #{t}"
fail ":target_bucket is a required argument to #{t}" unless args.target_bucket
target_bucket = args.target_bucket

# Ship it to the target for consumption
# First we ship the latest and clean up any repo-configs that are no longer valid with --delete-removed and --acl-public
Pkg::Util::Net.s3sync_to("pkg/#{Pkg::Config.project}-latest/", target_bucket, "#{Pkg::Config.project}-latest", ["--acl-public", "--delete-removed", "--follow-symlinks"])
# Then we ship the sha version with just --acl-public
Pkg::Util::Net.s3sync_to("pkg/#{Pkg::Config.project}/", target_bucket, Pkg::Config.project, ["--acl-public", "--follow-symlinks"])
# Ship it to the target for consumption.

puts "'#{Pkg::Config.ref}' of '#{Pkg::Config.project}' has been shipped via s3 to '#{target_bucket}'"
# First we ship the latest and clean up any repo-configs that
# are no longer valid with --delete-removed and --acl-public
source = "pkg/#{Pkg::Config.project}-latest/"
target_directory = "#{Pkg::Config.project}-latest",
puts("S3 sync from '#{Dir.pwd}/#{source}' to 's3://#{target_bucket}/#{target_directory}'")
Pkg::Util::Net.s3sync_to(source, target_bucket, target_directory,
['--acl-public', '--delete-removed', '--follow-symlinks'])

# Then we ship the sha version with just --acl-public
source = "pkg/#{Pkg::Config.project}/"
target_directory = Pkg::Config.project
puts("S3 sync from '#{Dir.pwd}/#{source}' to 's3://#{target_bucket}/#{target_directory}'")
Pkg::Util::Net.s3sync_to(source, target_bucket, target_directory,
['--acl-public', '--follow-symlinks'])

puts "'#{Pkg::Config.ref}' of '#{Pkg::Config.project}' has been uploaded to" \
"'s3://#{target_bucket}'"
end

task :generate_signed_repo_configs, [:target_prefix] => "pl:fetch" do |t, args|
Expand Down

0 comments on commit 396115e

Please sign in to comment.