diff --git a/CHANGELOG.md b/CHANGELOG.md index b91d6bd9..db219b09 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,8 @@ This changelog adheres to [Keep a CHANGELOG](http://keepachangelog.com/). ## [Unreleased] +### Changed +- (maint) Make deploy_signed_repos_to_s3 just a bit more verbose ## [0.122.2] - 2024-10-07 ### Changed diff --git a/lib/packaging/util/net.rb b/lib/packaging/util/net.rb index 3eac2453..7357a0ce 100644 --- a/lib/packaging/util/net.rb +++ b/lib/packaging/util/net.rb @@ -216,15 +216,20 @@ def rsync_from(source, origin_host, dest, opts = {}) ) end - def s3sync_to(source, target_bucket, target_directory = "", flags = []) + def s3sync_to(source, target_bucket, target_directory = '', flags = []) s3cmd = Pkg::Util::Tool.check_tool('s3cmd') + s3cfg_path = File.join(ENV['HOME'], '.s3cfg') - if Pkg::Util::File.file_exists?(File.join(ENV['HOME'], '.s3cfg')) - stdout, = Pkg::Util::Execution.capture3("#{s3cmd} sync #{flags.join(' ')} '#{source}' s3://#{target_bucket}/#{target_directory}/") - stdout - else - fail "#{File.join(ENV['HOME'], '.s3cfg')} does not exist. It is required to ship files using s3cmd." + unless File.exist?(s3cfg_path) + fail "#{s3cfg_path} does not exist. It is required to ship files using s3cmd." end + + sync_command = "#{s3cmd} sync #{flags.join(' ')} '#{source}' " \ + "s3://#{target_bucket}/#{target_directory}/" + + stdout, = Pkg::Util::Execution.capture3(sync_command) + + stdout end # This is fairly absurd. We're implementing curl by shelling out. What do I diff --git a/spec/lib/packaging/util/net_spec.rb b/spec/lib/packaging/util/net_spec.rb index b9b3c5ff..5a12410d 100644 --- a/spec/lib/packaging/util/net_spec.rb +++ b/spec/lib/packaging/util/net_spec.rb @@ -182,8 +182,8 @@ it 'should fail if ~/.s3cfg is not present' do expect(Pkg::Util::Tool).to receive(:check_tool).with("s3cmd").and_return(s3cmd) - expect(Pkg::Util::File) - .to receive(:file_exists?) + expect(File) + .to receive(:exist?) .with(File.join(ENV['HOME'], '.s3cfg')) .and_return(false) expect { Pkg::Util::Net.s3sync_to('foo', 'bar', 'boo') } @@ -192,8 +192,8 @@ it "should s3 sync 'thing' to 's3://foo@bar/home/foo/' with no flags" do expect(Pkg::Util::Tool).to receive(:check_tool).with("s3cmd").and_return(s3cmd) - expect(Pkg::Util::File) - .to receive(:file_exists?) + expect(File) + .to receive(:exist?) .with(File.join(ENV['HOME'], '.s3cfg')) .and_return(true) expect(Pkg::Util::Execution) @@ -204,8 +204,8 @@ it "should s3sync 'thing' to 's3://foo@bar/home/foo/' with --delete-removed and --acl-public" do expect(Pkg::Util::Tool).to receive(:check_tool).with("s3cmd").and_return(s3cmd) - expect(Pkg::Util::File) - .to receive(:file_exists?) + expect(File) + .to receive(:exist?) .with(File.join(ENV['HOME'], '.s3cfg')) .and_return(true) expect(Pkg::Util::Execution) diff --git a/tasks/nightly_repos.rake b/tasks/nightly_repos.rake index 399720bb..9a7f2e15 100644 --- a/tasks/nightly_repos.rake +++ b/tasks/nightly_repos.rake @@ -181,15 +181,28 @@ namespace :pl do end task :deploy_signed_repos_to_s3, [:target_bucket] => "pl:fetch" do |t, args| - target_bucket = args.target_bucket or fail ":target_bucket is a required argument to #{t}" + fail ":target_bucket is a required argument to #{t}" unless args.target_bucket + target_bucket = args.target_bucket - # Ship it to the target for consumption - # First we ship the latest and clean up any repo-configs that are no longer valid with --delete-removed and --acl-public - Pkg::Util::Net.s3sync_to("pkg/#{Pkg::Config.project}-latest/", target_bucket, "#{Pkg::Config.project}-latest", ["--acl-public", "--delete-removed", "--follow-symlinks"]) - # Then we ship the sha version with just --acl-public - Pkg::Util::Net.s3sync_to("pkg/#{Pkg::Config.project}/", target_bucket, Pkg::Config.project, ["--acl-public", "--follow-symlinks"]) + # Ship it to the target for consumption. - puts "'#{Pkg::Config.ref}' of '#{Pkg::Config.project}' has been shipped via s3 to '#{target_bucket}'" + # First we ship the latest and clean up any repo-configs that + # are no longer valid with --delete-removed and --acl-public + source = "pkg/#{Pkg::Config.project}-latest/" + target_directory = "#{Pkg::Config.project}-latest", + puts("S3 sync from '#{Dir.pwd}/#{source}' to 's3://#{target_bucket}/#{target_directory}'") + Pkg::Util::Net.s3sync_to(source, target_bucket, target_directory, + ['--acl-public', '--delete-removed', '--follow-symlinks']) + + # Then we ship the sha version with just --acl-public + source = "pkg/#{Pkg::Config.project}/" + target_directory = Pkg::Config.project + puts("S3 sync from '#{Dir.pwd}/#{source}' to 's3://#{target_bucket}/#{target_directory}'") + Pkg::Util::Net.s3sync_to(source, target_bucket, target_directory, + ['--acl-public', '--follow-symlinks']) + + puts "'#{Pkg::Config.ref}' of '#{Pkg::Config.project}' has been uploaded to" \ + "'s3://#{target_bucket}'" end task :generate_signed_repo_configs, [:target_prefix] => "pl:fetch" do |t, args|