Skip to content

Commit

Permalink
renamed variables for readability/clarity and updated gems for security
Browse files Browse the repository at this point in the history
  • Loading branch information
genschmitt committed Jun 5, 2024
1 parent 02cb59b commit 2900351
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 64 deletions.
69 changes: 17 additions & 52 deletions Gemfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -2,86 +2,51 @@ GEM
remote: https://rubygems.org/
specs:
aws-eventstream (1.3.0)
aws-partitions (1.862.0)
aws-sdk-core (3.190.0)
aws-partitions (1.940.0)
aws-sdk-core (3.197.0)
aws-eventstream (~> 1, >= 1.3.0)
aws-partitions (~> 1, >= 1.651.0)
aws-sigv4 (~> 1.8)
jmespath (~> 1, >= 1.6.1)
aws-sdk-kms (1.74.0)
aws-sdk-core (~> 3, >= 3.188.0)
aws-sdk-kms (1.83.0)
aws-sdk-core (~> 3, >= 3.197.0)
aws-sigv4 (~> 1.1)
aws-sdk-s3 (1.141.0)
aws-sdk-core (~> 3, >= 3.189.0)
aws-sdk-s3 (1.151.0)
aws-sdk-core (~> 3, >= 3.194.0)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.8)
aws-sdk-sqs (1.69.0)
aws-sdk-core (~> 3, >= 3.188.0)
aws-sdk-sqs (1.74.0)
aws-sdk-core (~> 3, >= 3.193.0)
aws-sigv4 (~> 1.1)
aws-sigv4 (1.8.0)
aws-eventstream (~> 1, >= 1.0.2)
concurrent-ruby (1.2.2)
config (5.0.0)
config (5.5.1)
deep_merge (~> 1.2, >= 1.2.1)
dry-validation (~> 1.0, >= 1.0.0)
deep_merge (1.2.2)
docile (1.4.0)
dry-configurable (1.1.0)
dry-core (~> 1.0, < 2)
zeitwerk (~> 2.6)
dry-core (1.0.1)
concurrent-ruby (~> 1.0)
zeitwerk (~> 2.6)
dry-inflector (1.0.0)
dry-initializer (3.1.1)
dry-logic (1.5.0)
concurrent-ruby (~> 1.0)
dry-core (~> 1.0, < 2)
zeitwerk (~> 2.6)
dry-schema (1.13.3)
concurrent-ruby (~> 1.0)
dry-configurable (~> 1.0, >= 1.0.1)
dry-core (~> 1.0, < 2)
dry-initializer (~> 3.0)
dry-logic (>= 1.4, < 2)
dry-types (>= 1.7, < 2)
zeitwerk (~> 2.6)
dry-types (1.7.1)
concurrent-ruby (~> 1.0)
dry-core (~> 1.0)
dry-inflector (~> 1.0)
dry-logic (~> 1.4)
zeitwerk (~> 2.6)
dry-validation (1.10.0)
concurrent-ruby (~> 1.0)
dry-core (~> 1.0, < 2)
dry-initializer (~> 3.0)
dry-schema (>= 1.12, < 2)
zeitwerk (~> 2.6)
ffi (1.16.3)
ffi-libarchive (1.1.13)
ffi (1.17.0-x86_64-darwin)
ffi-libarchive (1.1.14)
ffi (~> 1.0)
jmespath (1.6.2)
mime-types (3.5.1)
mime-types (3.5.2)
mime-types-data (~> 3.2015)
mime-types-data (3.2023.1003)
mime-types-data (3.2024.0507)
mimemagic (0.3.10)
nokogiri (~> 1)
rake
minitest (5.20.0)
nokogiri (1.15.5-x86_64-darwin)
minitest (5.23.1)
nokogiri (1.16.5-x86_64-darwin)
racc (~> 1.4)
os (1.1.4)
racc (1.7.3)
rake (13.1.0)
racc (1.8.0)
rake (13.2.1)
rubyzip (2.3.2)
simplecov (0.22.0)
docile (~> 1.1)
simplecov-html (~> 0.11)
simplecov_json_formatter (~> 0.1)
simplecov-html (0.12.3)
simplecov_json_formatter (0.1.4)
zeitwerk (2.6.12)

PLATFORMS
x86_64-darwin-21
Expand Down
24 changes: 12 additions & 12 deletions lib/archive_extractor.rb
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,14 @@ def extract
get_object(local_path, error)

extraction = Extraction.new(@binary_name, local_path, @web_id, @mime_type)
return_value = perform_extraction(extraction, error)
extraction_return_value = perform_extraction(extraction, error)
s3_path = "messages/#{@web_id}.json"
s3_put_status, s3_put_error = put_json_response(return_value, s3_path)
s3_put_status, s3_put_error = put_json_response(extraction_return_value, s3_path)

s3_put_errors = s3_put_error.map {|o| Hash[o.each_pair.to_a]}

return_value = {"bucket_name" => @bucket_name, "object_key" => s3_path, "s3_status" => s3_put_status, "error" => s3_put_errors}
send_sqs_message(return_value)
s3_message = {"bucket_name" => @bucket_name, "object_key" => s3_path, "s3_status" => s3_put_status, "error" => s3_put_errors}
send_sqs_message(s3_message)

ensure
FileUtils.rm_rf(dirname, :secure => true)
Expand Down Expand Up @@ -96,16 +96,16 @@ def perform_extraction(extraction, error)
error.concat(extraction.error)
items = extraction.nested_items.map { |o| Hash[o.each_pair.to_a] }
errors = error.map {|o| Hash[o.each_pair.to_a]}
return_value = {"web_id" => @web_id, "status" => status, "error" => errors, "peek_type" => extraction.peek_type, "peek_text" => extraction.peek_text, "nested_items" => items}
extraction_return_value = {"web_id" => @web_id, "status" => status, "error" => errors, "peek_type" => extraction.peek_type, "peek_text" => extraction.peek_text, "nested_items" => items}
rescue StandardError => e
error.push({"task_id" => @web_id, "extraction_process_report" => "Error extracting #{@object_key} with ID #{@web_id}: #{e.message}"})
errors = error.map {|o| Hash[o.each_pair.to_a]}
return_value = {"web_id" => @web_id, "status" => ExtractionStatus::ERROR, "error" => errors, "peek_type" => PeekType::NONE, "peek_text" => nil, "nested_items" => []}
extraction_return_value = {"web_id" => @web_id, "status" => ExtractionStatus::ERROR, "error" => errors, "peek_type" => PeekType::NONE, "peek_text" => nil, "nested_items" => []}
end
return return_value
return extraction_return_value
end

def send_sqs_message(return_value)
def send_sqs_message(s3_message)
# Send a message to a queue.
queue_name = Settings.aws.sqs.queue_name
queue_url = Settings.aws.sqs.queue_url
Expand All @@ -114,7 +114,7 @@ def send_sqs_message(return_value)
# Create and send a message.
@sqs.send_message({
queue_url: queue_url,
message_body: return_value.to_json,
message_body: s3_message.to_json,
message_attributes: {}
})
LOGGER.info("Sending message in queue #{queue_name} for object #{@object_key} with ID #{@web_id}")
Expand All @@ -123,16 +123,16 @@ def send_sqs_message(return_value)
end
end

def put_json_response(return_value, s3_path)
def put_json_response(extraction_return_value, s3_path)
s3_put_error = []
json_bucket = Settings.aws.s3.json_bucket
begin
@s3.put_object({
body: return_value.to_json,
body: extraction_return_value.to_json,
bucket: json_bucket,
key: s3_path,
})
LOGGER.info(return_value.to_json)
LOGGER.info(extraction_return_value.to_json)
LOGGER.info("Putting json response for object #{@object_key} with ID #{@web_id} in S3 bucket #{json_bucket} with key #{s3_path}")
s3_put_status = ExtractionStatus::SUCCESS
rescue StandardError => e
Expand Down

0 comments on commit 2900351

Please sign in to comment.