From 02618f39cef8d447306cd21e007dbdfd5a5ba875 Mon Sep 17 00:00:00 2001 From: Eric Griswold Date: Wed, 23 Oct 2024 16:29:27 -0700 Subject: [PATCH] (maint) add a little bit of verbosity to deploy_signed_repos_to_s3 - Make deploy_signed_repos_to_s3 print a message that it is doing the s3sync - Refactor messy code around it - Make rspec work - Update rubocop complaint --- CHANGELOG.md | 2 ++ lib/packaging/util/net.rb | 17 ++++++++++------- spec/lib/packaging/util/net_spec.rb | 18 ++++++++++-------- tasks/nightly_repos.rake | 27 ++++++++++++++++++++------- 4 files changed, 42 insertions(+), 22 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b91d6bd90..db219b099 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,8 @@ This changelog adheres to [Keep a CHANGELOG](http://keepachangelog.com/). ## [Unreleased] +### Changed +- (maint) Make deploy_signed_repos_to_s3 just a bit more verbose ## [0.122.2] - 2024-10-07 ### Changed diff --git a/lib/packaging/util/net.rb b/lib/packaging/util/net.rb index 3eac24534..c69d3fd9f 100644 --- a/lib/packaging/util/net.rb +++ b/lib/packaging/util/net.rb @@ -184,7 +184,7 @@ def rsync_exec(source, opts = {}) target_host: nil, extra_flags: nil, dryrun: ENV['DRYRUN'] -}.merge(opts.merge(opts.delete_if { |_, value| value.nil? })) # rubocop:disable Style/CollectionCompact +}.merge(opts.merge(opts.delete_if { |_, value| value.nil? })) stdout, = Pkg::Util::Execution.capture3(rsync_cmd(source, options), true) stdout @@ -216,15 +216,18 @@ def rsync_from(source, origin_host, dest, opts = {}) ) end - def s3sync_to(source, target_bucket, target_directory = "", flags = []) + def s3sync_to(source, target_bucket, target_directory = '', flags = []) s3cmd = Pkg::Util::Tool.check_tool('s3cmd') + s3cfg_path = File.join(ENV['HOME'], '.s3cfg') - if Pkg::Util::File.file_exists?(File.join(ENV['HOME'], '.s3cfg')) - stdout, = Pkg::Util::Execution.capture3("#{s3cmd} sync #{flags.join(' ')} '#{source}' s3://#{target_bucket}/#{target_directory}/") - stdout - else - fail "#{File.join(ENV['HOME'], '.s3cfg')} does not exist. It is required to ship files using s3cmd." + unless File.exist?(s3cfg_path) + fail "#{s3cfg_path} does not exist. It is required to ship files using s3cmd." end + + sync_command = "#{s3cmd} sync #{flags.join(' ')} '#{source}' " \ + "s3://#{target_bucket}/#{target_directory}/" + + Pkg::Util::Execution.capture3(sync_command, true) end # This is fairly absurd. We're implementing curl by shelling out. What do I diff --git a/spec/lib/packaging/util/net_spec.rb b/spec/lib/packaging/util/net_spec.rb index b9b3c5ffe..a53ca544d 100644 --- a/spec/lib/packaging/util/net_spec.rb +++ b/spec/lib/packaging/util/net_spec.rb @@ -182,8 +182,8 @@ it 'should fail if ~/.s3cfg is not present' do expect(Pkg::Util::Tool).to receive(:check_tool).with("s3cmd").and_return(s3cmd) - expect(Pkg::Util::File) - .to receive(:file_exists?) + expect(File) + .to receive(:exist?) .with(File.join(ENV['HOME'], '.s3cfg')) .and_return(false) expect { Pkg::Util::Net.s3sync_to('foo', 'bar', 'boo') } @@ -192,25 +192,27 @@ it "should s3 sync 'thing' to 's3://foo@bar/home/foo/' with no flags" do expect(Pkg::Util::Tool).to receive(:check_tool).with("s3cmd").and_return(s3cmd) - expect(Pkg::Util::File) - .to receive(:file_exists?) + expect(File) + .to receive(:exist?) .with(File.join(ENV['HOME'], '.s3cfg')) .and_return(true) expect(Pkg::Util::Execution) .to receive(:capture3) - .with("#{s3cmd} sync 'thing' s3://foo@bar/home/foo/") + .with("#{s3cmd} sync 'thing' s3://foo@bar/home/foo/", anything) Pkg::Util::Net.s3sync_to("thing", "foo@bar", "home/foo") end it "should s3sync 'thing' to 's3://foo@bar/home/foo/' with --delete-removed and --acl-public" do expect(Pkg::Util::Tool).to receive(:check_tool).with("s3cmd").and_return(s3cmd) - expect(Pkg::Util::File) - .to receive(:file_exists?) + expect(File) + .to receive(:exist?) .with(File.join(ENV['HOME'], '.s3cfg')) .and_return(true) expect(Pkg::Util::Execution) .to receive(:capture3) - .with("#{s3cmd} sync --delete-removed --acl-public 'thing' s3://foo@bar/home/foo/") + .with( + "#{s3cmd} sync --delete-removed --acl-public 'thing' s3://foo@bar/home/foo/", anything + ) Pkg::Util::Net.s3sync_to("thing", "foo@bar", "home/foo", ["--delete-removed", "--acl-public"]) end end diff --git a/tasks/nightly_repos.rake b/tasks/nightly_repos.rake index 399720bbd..acf0c0039 100644 --- a/tasks/nightly_repos.rake +++ b/tasks/nightly_repos.rake @@ -181,15 +181,28 @@ namespace :pl do end task :deploy_signed_repos_to_s3, [:target_bucket] => "pl:fetch" do |t, args| - target_bucket = args.target_bucket or fail ":target_bucket is a required argument to #{t}" + fail ":target_bucket is a required argument to #{t}" unless args.target_bucket + target_bucket = args.target_bucket - # Ship it to the target for consumption - # First we ship the latest and clean up any repo-configs that are no longer valid with --delete-removed and --acl-public - Pkg::Util::Net.s3sync_to("pkg/#{Pkg::Config.project}-latest/", target_bucket, "#{Pkg::Config.project}-latest", ["--acl-public", "--delete-removed", "--follow-symlinks"]) - # Then we ship the sha version with just --acl-public - Pkg::Util::Net.s3sync_to("pkg/#{Pkg::Config.project}/", target_bucket, Pkg::Config.project, ["--acl-public", "--follow-symlinks"]) + # Ship it to the target for consumption. - puts "'#{Pkg::Config.ref}' of '#{Pkg::Config.project}' has been shipped via s3 to '#{target_bucket}'" + # First we ship the latest and clean up any repo-configs that + # are no longer valid with --delete-removed and --acl-public + source = "pkg/#{Pkg::Config.project}-latest/" + target_directory = "#{Pkg::Config.project}-latest" + puts("S3 sync from '#{Dir.pwd}/#{source}' to 's3://#{target_bucket}/#{target_directory}'") + Pkg::Util::Net.s3sync_to(source, target_bucket, target_directory, + ['--acl-public', '--delete-removed', '--follow-symlinks']) + + # Then we ship the sha version with just --acl-public + source = "pkg/#{Pkg::Config.project}/" + target_directory = Pkg::Config.project + puts("S3 sync from '#{Dir.pwd}/#{source}' to 's3://#{target_bucket}/#{target_directory}'") + Pkg::Util::Net.s3sync_to(source, target_bucket, target_directory, + ['--acl-public', '--follow-symlinks']) + + puts "'#{Pkg::Config.ref}' of '#{Pkg::Config.project}' has been uploaded to" \ + "'s3://#{target_bucket}'" end task :generate_signed_repo_configs, [:target_prefix] => "pl:fetch" do |t, args|