diff --git a/.gitignore b/.gitignore index be002c3712..71326a8a4a 100644 --- a/.gitignore +++ b/.gitignore @@ -53,5 +53,5 @@ yarn-debug.log* /app/assets/builds/* !/app/assets/builds/.keep -# Ignore OSX system files +.env .DS_Store diff --git a/Capfile b/Capfile new file mode 100644 index 0000000000..00b4c4af92 --- /dev/null +++ b/Capfile @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'capistrano/setup' + +require 'capistrano/deploy' + +require 'capistrano/scm/git' +install_plugin Capistrano::SCM::Git + +require 'capistrano/rbenv' + +require 'capistrano/bundler' +require 'capistrano/rails/assets' +require 'capistrano/rails/migrations' + +require 'capistrano/puma' +require 'capistrano/puma/workers' + +install_plugin Capistrano::Puma +install_plugin Capistrano::Puma::Systemd + +require 'capistrano-resque' +require 'whenever/capistrano' + +# Load custom tasks from `lib/capistrano/tasks` if you have any defined +Dir.glob('lib/capistrano/tasks/*.rake').each { |r| import r } diff --git a/Gemfile b/Gemfile index feab266737..f06a454997 100644 --- a/Gemfile +++ b/Gemfile @@ -1,7 +1,8 @@ +# frozen_string_literal: true + source 'https://rubygems.org' gem 'rails', '~> 7.0.8' -gem 'dotenv', '~> 3.1', '>= 3.1.2' gem 'base64', '~> 0.2.0' @@ -153,7 +154,6 @@ gem 'react-rails', '~> 3.0.0' # See https://github.com/shakacode/shakapacker#upgrading gem 'shakapacker', '~> 6.5.4' gem 'cssbundling-rails', '~> 1.2' # Management of css (Less) files conversion - # Temporarily locking the 'mail' version until the next version of Rails is released # https://github.com/rails/rails/pull/46650 gem 'mail', '~> 2.7.1' @@ -162,6 +162,12 @@ gem 'bootsnap', require: false gem 'rails_semantic_logger', '~> 4.14' +gem 'dotenv', '~> 3.1' +gem 'whenever', '~> 1.0', require: false + +# web server +gem 'puma', '~> 5.6' + # Bundle gems for the local environment. Make sure to # put test-only gems in this group so their generators # and rake tasks are available in development mode: @@ -176,6 +182,13 @@ group :development do # gem 'web-console' # Add speed badges [https://github.com/MiniProfiler/rack-mini-profiler] # gem "rack-mini-profiler" + + # Deployment + gem 'capistrano-resque', '~> 0.2.3', require: false + gem 'capistrano', '~> 3.19', require: false + gem 'capistrano-rails', '~> 1.6', require: false + gem 'capistrano-rbenv', '~> 2.2', require: false + gem 'capistrano3-puma', '~> 5.2', require: false end group :development, :test do @@ -192,7 +205,6 @@ group :development, :test do # For improved console readability: # https://github.com/amazing-print/amazing_print gem 'amazing_print', '~> 1.4' - gem 'puma', '~> 5.6' gem 'debug' end diff --git a/Gemfile.lock b/Gemfile.lock index a07fe648e3..2130973eca 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -172,6 +172,8 @@ GEM after_commit_action (1.1.0) activerecord (>= 3.0.0) activesupport (>= 3.0.0) + airbrussh (1.5.2) + sshkit (>= 1.6.1, != 1.7.0) amazing_print (1.5.0) american_date (1.1.1) ast (2.4.2) @@ -180,12 +182,12 @@ GEM activerecord (>= 5.2, < 7.1) activesupport (>= 5.2, < 7.1) request_store (~> 1.0) - aws-eventstream (1.2.0) + aws-eventstream (1.3.0) aws-partitions (1.810.0) - aws-sdk-core (3.181.0) - aws-eventstream (~> 1, >= 1.0.2) + aws-sdk-core (3.201.1) + aws-eventstream (~> 1, >= 1.3.0) aws-partitions (~> 1, >= 1.651.0) - aws-sigv4 (~> 1.5) + aws-sigv4 (~> 1.8) jmespath (~> 1, >= 1.6.1) aws-sdk-kms (1.71.0) aws-sdk-core (~> 3, >= 3.177.0) @@ -194,7 +196,7 @@ GEM aws-sdk-core (~> 3, >= 3.120.0) aws-sdk-kms (~> 1) aws-sigv4 (~> 1.4) - aws-sigv4 (1.6.0) + aws-sigv4 (1.8.0) aws-eventstream (~> 1, >= 1.0.2) axe-core-api (4.7.0) dumb_delegator @@ -221,6 +223,27 @@ GEM msgpack (~> 1.2) builder (3.2.4) byebug (11.1.3) + capistrano (3.19.1) + airbrussh (>= 1.0.0) + i18n + rake (>= 10.0.0) + sshkit (>= 1.9.0) + capistrano-bundler (2.1.0) + capistrano (~> 3.1) + capistrano-rails (1.6.3) + capistrano (~> 3.1) + capistrano-bundler (>= 1.1, < 3) + capistrano-rbenv (2.2.0) + capistrano (~> 3.1) + sshkit (~> 1.3) + capistrano-resque (0.2.3) + capistrano + resque + resque-scheduler + capistrano3-puma (5.2.0) + capistrano (~> 3.7) + capistrano-bundler + puma (>= 4.0, < 6.0) capybara (3.39.2) addressable matrix @@ -233,6 +256,7 @@ GEM capybara-screenshot (1.0.26) capybara (>= 1.0, < 4) launchy + chronic (0.10.2) chunky_png (1.4.0) cld3 (3.6.0) climate_control (0.2.0) @@ -505,8 +529,13 @@ GEM net-protocol net-protocol (0.2.2) timeout + net-scp (4.0.0) + net-ssh (>= 2.6.5, < 8.0.0) + net-sftp (4.0.0) + net-ssh (>= 5.0.0, < 8.0.0) net-smtp (0.4.0.1) net-protocol + net-ssh (7.2.3) newrelic_rpm (9.10.2) nio4r (2.7.1) nokogiri (1.16.6) @@ -783,6 +812,11 @@ GEM actionpack (>= 5.2) activesupport (>= 5.2) sprockets (>= 3.0.0) + sshkit (1.23.0) + base64 + net-scp (>= 1.1.2) + net-sftp (>= 2.1.2) + net-ssh (>= 2.8.0) sys-uname (1.2.3) ffi (~> 1.1) temple (0.10.2) @@ -837,6 +871,8 @@ GEM websocket-driver (0.7.6) websocket-extensions (>= 0.1.0) websocket-extensions (0.1.5) + whenever (1.0.0) + chronic (>= 0.6.3) will_paginate (3.3.1) will_paginate-bootstrap (1.0.2) will_paginate (>= 3.0.3) @@ -866,6 +902,11 @@ DEPENDENCIES base64 (~> 0.2.0) bigdecimal (~> 3.1, >= 3.1.8) bootsnap + capistrano (~> 3.19) + capistrano-rails (~> 1.6) + capistrano-rbenv (~> 2.2) + capistrano-resque (~> 0.2.3) + capistrano3-puma (~> 5.2) capybara (~> 3.26) capybara-screenshot cld3 (~> 3.6.0) @@ -882,7 +923,7 @@ DEPENDENCIES debug dogapi (~> 1.45) dogstatsd-ruby (~> 3.2.0) - dotenv (~> 3.1, >= 3.1.2) + dotenv (~> 3.1) elasticsearch! elasticsearch-xpack (~> 7.4.0) email_spec (~> 2.2) @@ -976,6 +1017,7 @@ DEPENDENCIES virtus (~> 1.0.5) webdrivers (~> 5.0) webmock (~> 3.8) + whenever (~> 1.0) will_paginate (~> 3.3.0) will_paginate-bootstrap (~> 1.0.1) diff --git a/app/models/click.rb b/app/models/click.rb index 0eba046283..92b5464892 100644 --- a/app/models/click.rb +++ b/app/models/click.rb @@ -34,11 +34,16 @@ def initialize(params) end def log - Rails.logger.info("[Click] #{click_hash.to_json}") + clicks_logger.info("[Click] #{click_hash.to_json}") + Rails.logger.info('[Click]', search_data: click_hash) end private + def clicks_logger + @@logger ||= Logger.new('log/clicks.log') + end + # prevent validation from choking on "invalid byte sequence in UTF-8" def validate_url_encoding return unless url diff --git a/app/models/rtu_dashboard.rb b/app/models/rtu_dashboard.rb index c0ea7003b2..2328765d21 100644 --- a/app/models/rtu_dashboard.rb +++ b/app/models/rtu_dashboard.rb @@ -29,7 +29,7 @@ def top_urls end def trending_urls - redis = Redis.new(:host => REDIS_HOST, :port => REDIS_PORT) + redis = Redis.new(url: ENV.fetch('REDIS_SYSTEM_URL')) trending_urls_key = ['TrendingUrls', @site.name].join(':') redis.smembers(trending_urls_key) end diff --git a/app/models/search_impression.rb b/app/models/search_impression.rb index a4f08facf0..204f518873 100644 --- a/app/models/search_impression.rb +++ b/app/models/search_impression.rb @@ -16,7 +16,8 @@ def self.log(search, vertical, params, request) modules: search.modules.join('|'), params: clean_params(params)) - Rails.logger.info("[Search Impression] #{hash.to_json}") + impression_logger.info("[Search Impression] #{hash.to_json}") + Rails.logger.info('[Search Impression]', search_data: hash) end def self.clean_params(params) @@ -37,4 +38,8 @@ def self.get_url_from_request(request) def self.flatten_diagnostics_hash(diagnostics_hash) diagnostics_hash.keys.sort.map { |k| diagnostics_hash[k].merge(module: k) } end + + def self.impression_logger + @@logger ||= Logger.new('log/impressions.log') + end end diff --git a/app/models/trending_url.rb b/app/models/trending_url.rb index bd1d37075d..000b7e732e 100644 --- a/app/models/trending_url.rb +++ b/app/models/trending_url.rb @@ -6,7 +6,7 @@ class TrendingUrl attr_accessor :affiliate, :url cattr_reader :redis - @@redis = Redis.new(:host => REDIS_HOST, :port => REDIS_PORT) + @@redis = Redis.new(url: ENV.fetch('REDIS_SYSTEM_URL')) def self.all sorted_trending_affiliate_keys = @@redis.keys("TrendingUrls:*").sort diff --git a/appspec.yml b/appspec.yml new file mode 100644 index 0000000000..6a90d0c3cc --- /dev/null +++ b/appspec.yml @@ -0,0 +1,15 @@ + +version: 0.0 +os: linux +# files: +# - source: / +# destination: /home/search/cicd_temp + +hooks: + + BeforeInstall: + - location: cicd-scripts/fetch_env_vars.sh + timeout: 300 + runas: search + + diff --git a/buildspec_searchgov.yml b/buildspec_searchgov.yml new file mode 100644 index 0000000000..87b0abdafc --- /dev/null +++ b/buildspec_searchgov.yml @@ -0,0 +1,48 @@ + +version: 0.2 +env: + parameter-store: + # env_used_in_app: "env_in_param_store" + APP_SERVER_ADDRESSES: "DEPLOY_SEARCHGOV_SERVER_ADDRESS" + RESQUE_SERVER_ADDRESSES: "DEPLOY_RESQUE_SERVER_ADDRESSES" + DEPLOYMENT_PATH: "DEPLOY_SEARCHGOV_DEPLOYMENT_PATH" + SERVER_DEPLOYMENT_USER: "DEPLOY_SERVER_DEPLOYMENT_USER" + RESQUE_WORKERS_COUNT: "DEPLOY_RESQUE_WORKERS_COUNT" + # SSH_KEY_PATH: "DEPLOY_SSH_KEY_PATH" - defined below + + # shared deployment variables with subsequent stages - might not to export as this is the final stage +exported-variables: + - APP_SERVER_ADDRESSES + - RESQUE_SERVER_ADDRESSES + - DEPLOYMENT_PATH + - SERVER_DEPLOYMENT_USER + - SSH_KEY_PATH + +phases: + install: + runtime-versions: + python: 3.x + commands: + - export PATH="$HOME/.rbenv/bin:$PATH" + - eval "$(rbenv init -)" + + + pre_build: + commands: + - aws secretsmanager get-secret-value --secret-id $SEARCH_SECRETSMANAGER_KEY_SECRET_NAME --region $SEARCH_AWS_REGION --query 'SecretString' --output text > $SEARCH_ENV_EC2_KEY + build: + commands: + - CURRENT_LOCATION=$(pwd) # would look something like this - /codebuild/output/src559980389/src - a temp dir created by codebuild + - SSH_KEY_PATH="${CURRENT_LOCATION}/${SEARCH_ENV_EC2_KEY}" + - echo $SSH_KEY_PATH + - echo "deploying searchgov app with capistrano" + - bundle install + - cap $SEARCH_ENV puma:config puma:systemd:config puma:systemd:enable + - cap $SEARCH_ENV deploy + - cap $SEARCH_ENV --tasks + - cap $SEARCH_ENV resque:start + - cap $SEARCH_ENV puma:restart + +artifacts: + files: + - '**/*' diff --git a/cicd-scripts/fetch_env_vars.sh b/cicd-scripts/fetch_env_vars.sh new file mode 100644 index 0000000000..40e14c35f4 --- /dev/null +++ b/cicd-scripts/fetch_env_vars.sh @@ -0,0 +1,67 @@ +#!/bin/bash +set -x +# Move to a writable location +cd /home/search/cicd_temp + +# Leave PARAM_PATH empty to fetch all parameters in the region +PARAM_PATH="" + +# Clear the .env file if it exists +> .env + +echo "Starting the script" +# Fetch all parameter names in the region +REGION=$(curl -s http://169.254.169.254/latest/meta-data/placement/region) +echo $REGION +if [ -n "$PARAM_PATH" ]; then + PARAM_KEYS=$(aws ssm get-parameters-by-path --path "$PARAM_PATH" --recursive --query "Parameters[*].Name" --output text --region $REGION) +else + PARAM_KEYS=$(aws ssm describe-parameters --query "Parameters[*].Name" --output text --region $REGION) +fi +echo "Fetched parameter keys: $PARAM_KEYS" + +# Loop through each parameter key +for PARAM in $PARAM_KEYS; do + # Exclude parameters that start with "DEPLOY_" or match "*_EC2_PEM_KEY" or match LOGIN_DOT_GOV_PEM + if [[ $PARAM != DEPLOY_* && ! $PARAM =~ .*_EC2_PEM_KEY$ && $PARAM != "LOGIN_DOT_GOV_PEM" ]]; then + # Fetch the parameter value from SSM + VALUE=$(aws ssm get-parameter --name "$PARAM" --with-decryption --query "Parameter.Value" --output text --region $REGION) + + # Rename parameters that start with "SEARCH_AWS_" to "AWS_" + if [[ $PARAM == SEARCH_AWS_* ]]; then + PARAM=${PARAM/SEARCH_AWS_/AWS_} + fi + + # Write the key=value pair to the .env file + echo "$PARAM=$VALUE" >> .env + fi +done + +# Output the result +echo ".env file created with the following content:" +cat .env +cp /home/search/cicd_temp/.env /home/search/searchgov/shared/ + +# Fetch a specific parameter and save it to a file +aws ssm get-parameter --name "LOGIN_DOT_GOV_PEM" --region us-east-2 --with-decryption --query "Parameter.Value" --output text > /home/search/searchgov/logindotgov.pem + +# create puma folders and files + +# Create directories if they do not already exist +[ ! -d /home/search/searchgov/shared/tmp/pids/ ] && mkdir -p /home/search/searchgov/shared/tmp/pids/ +[ ! -d /home/search/searchgov/shared/log ] && mkdir -p /home/search/searchgov/shared/log + +# Create log files if they do not already exist +[ ! -f /home/search/searchgov/shared/log/puma_access.log ] && touch /home/search/searchgov/shared/log/puma_access.log +[ ! -f /home/search/searchgov/shared/log/puma_error.log ] && touch /home/search/searchgov/shared/log/puma_error.log + + +# Set ownership and permissions +chown -R search:search /home/search/searchgov/ +chmod -R 777 /home/search/searchgov/ + +find /home/search/searchgov/ -type d -exec chmod 2777 {} \; + +umask 000 + +sudo rm -rf /home/search/cicd_temp/* diff --git a/config/database.yml b/config/database.yml index 265f1dc2f8..075ee9782d 100644 --- a/config/database.yml +++ b/config/database.yml @@ -27,7 +27,7 @@ development: production: <<: *DEFAULT database: <%= ENV['DB_NAME'] %> - username: <%= ENV['DB_USERNAME'] %> + username: <%= ENV['DB_USER'] %> password: <%= ENV['DB_PASSWORD'] %> host: <%= ENV['DB_HOST'] %> port: <%= ENV['DB_PORT'] %> diff --git a/config/deploy.rb b/config/deploy.rb new file mode 100644 index 0000000000..b9d990266a --- /dev/null +++ b/config/deploy.rb @@ -0,0 +1,37 @@ +# config valid for current version and patch releases of Capistrano +lock '~> 3.19.1' + +set :application, 'search-gov' +set :branch, ENV.fetch('SEARCH_ENV', 'staging') +set :default_env, { SECRET_KEY_BASE: '1' } +set :deploy_to, ENV['DEPLOYMENT_PATH'] +set :format, :pretty +set :puma_access_log, "#{release_path}/log/puma.access.log" +set :puma_bind, 'tcp://0.0.0.0:3000' +set :puma_error_log, "#{release_path}/log/puma.error.log" +set :rails_env, 'production' +set :rbenv_ruby, '3.1.4' +set :rbenv_type, :user +set :repo_url, 'https://github.com/GSA/search-gov' +set :resque_environment_task, true +set :user, ENV['SERVER_DEPLOYMENT_USER'] +set :whenever_roles, :cron +set :workers, { '*' => ENV.fetch('RESQUE_WORKERS_COUNT', '5').to_i } + +append :linked_dirs, 'log', 'tmp', 'node_modules', 'public' +append :linked_files, '.env', 'config/logindotgov.pem' + +role :app, JSON.parse(ENV.fetch('APP_SERVER_ADDRESSES', '[]')), user: ENV['SERVER_DEPLOYMENT_USER'] +role :cron, JSON.parse(ENV.fetch('CRON_SERVER_ADDRESSES', '[]')), user: ENV['SERVER_DEPLOYMENT_USER'] +role :db, JSON.parse(ENV.fetch('APP_SERVER_ADDRESSES', '[]')), user: ENV['SERVER_DEPLOYMENT_USER'] +role :resque_worker, JSON.parse(ENV.fetch('RESQUE_SERVER_ADDRESSES', '[]')), user: ENV['SERVER_DEPLOYMENT_USER'] +role :web, JSON.parse(ENV.fetch('APP_SERVER_ADDRESSES', '[]')), user: ENV['SERVER_DEPLOYMENT_USER'] + +set :ssh_options, { + auth_methods: %w(publickey), + forward_agent: false, + keys: [ENV['SSH_KEY_PATH']], + user: ENV['SERVER_DEPLOYMENT_USER'], +} + +after 'deploy:finished', 'resque:restart' diff --git a/config/deploy/production.rb b/config/deploy/production.rb new file mode 100644 index 0000000000..9ac5b511fd --- /dev/null +++ b/config/deploy/production.rb @@ -0,0 +1,55 @@ +# config/deploy/production.rb + +# Server-based syntax +# ====================== +# Defines a single server with a list of roles and multiple properties. +# You can define all roles on a single server, or split them: + +# server "production.server.com", user: "deploy", roles: %w{app db web} + +# Role-based syntax +# ================== +# Defines a role with one or multiple servers. +# The primary server in each group is considered to be the first unless any hosts have the primary property set. +# Specify the username and a domain or IP for the server. + +# role :app, %w{deploy@production.server.com} +# role :web, %w{user1@production.server.com user2@production.server.com} +# role :db, %w{deploy@production.server.com} + +# Configuration +# ============= +# You can set any configuration variable like in config/deploy.rb. +# These variables are then only loaded and set in this stage. +# For available Capistrano configuration variables see the documentation page. +# http://capistranorb.com/documentation/getting-started/configuration/ +# Feel free to add new variables to customize your setup. + +# set :rails_env, 'production' +# set :aws_ssm_path, '/your/application/env/production/' + +# Custom SSH Options +# ================== +# You may pass any option but keep in mind that net/ssh understands a limited set of options, consult the Net::SSH documentation. +# http://net-ssh.github.io/net-ssh/classes/Net/SSH.html#method-c-start + +# Global options +# -------------- +# set :ssh_options, { +# keys: %w(/home/deploy/.ssh/id_rsa), +# forward_agent: false, +# auth_methods: %w(publickey) +# } + +# The server-based syntax can be used to override options: +# ------------------------------------ +# server "production.server.com", +# user: "deploy", +# roles: %w{web app}, +# ssh_options: { +# user: "deploy", # overrides user setting above +# keys: %w(/home/deploy/.ssh/id_rsa), +# forward_agent: false, +# auth_methods: %w(publickey password) +# # password: "please use keys" +# } diff --git a/config/deploy/staging.rb b/config/deploy/staging.rb new file mode 100644 index 0000000000..af74c8d2fa --- /dev/null +++ b/config/deploy/staging.rb @@ -0,0 +1,22 @@ +# config/deploy/staging.rb + +# Server-based syntax +# ====================== +# Defines a single server with a list of roles and multiple properties. +# You can define all roles on a single server, or split them: + +# Configuration +# ============= +# You can set any configuration variable like in config/deploy.rb. +# These variables are then only loaded and set in this stage. +# For available Capistrano configuration variables see the documentation page. +# http://capistranorb.com/documentation/getting-started/configuration/ +# Feel free to add new variables to customize your setup. + +# Custom SSH Options +# ================== +# You may pass any option but keep in mind that net/ssh understands a limited set of options, consult the Net/SSH documentation. +# http://net-ssh.github.io/net-ssh/classes/Net/SSH.html#method-c-start + +# Global options +# -------------- diff --git a/config/environments/production.rb b/config/environments/production.rb index ff3e9fa049..b5d2b8f60d 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -49,7 +49,7 @@ # config.action_cable.allowed_request_origins = [ "http://example.com", /http:\/\/example.*/ ] # Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies. - config.force_ssl = true + config.force_ssl = false config.ssl_options[:secure_cookies] = true # Include generic and useful information about system operation, but avoid logging too much @@ -84,12 +84,7 @@ # require "syslog/logger" # config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new "app-name") - if ENV["RAILS_LOG_TO_STDOUT"].present? - $stdout.sync = true - config.rails_semantic_logger.add_file_appender = false - config.rails_semantic_logger.format = :json - config.semantic_logger.add_appender(io: $stdout, formatter: config.rails_semantic_logger.format) - end + config.rails_semantic_logger.format = :json # Do not dump schema after migrations. config.active_record.dump_schema_after_migration = false @@ -114,6 +109,8 @@ # config.active_record.database_selector = { delay: 2.seconds } # config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver # config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session + + config.consider_all_requests_local = ENV['LOCAL_REQUEST'].present? end ADDITIONAL_BING_PARAMS = {} diff --git a/config/initializers/load_resque.rb b/config/initializers/load_resque.rb index e50217cae1..e070d5b34c 100644 --- a/config/initializers/load_resque.rb +++ b/config/initializers/load_resque.rb @@ -7,10 +7,7 @@ require 'resque/server' require 'resque/job_timeout' -host = ENV['REDIS_HOST'] -port = ENV['REDIS_PORT'] - -Resque.redis = "#{host}:#{port}" +Resque.redis = ENV.fetch('REDIS_SYSTEM_URL') Resque::Failure::Multiple.classes = [Resque::Failure::Redis] Resque::Failure.backend = Resque::Failure::Multiple diff --git a/config/initializers/session_store.rb b/config/initializers/session_store.rb index 0cd3a5df02..d62d1011c1 100644 --- a/config/initializers/session_store.rb +++ b/config/initializers/session_store.rb @@ -3,8 +3,8 @@ secure: Rails.application.config.ssl_options[:secure_cookies], key: '_usasearch_session', servers: { - host: ENV['REDIS_HOST'], - port: ENV['REDIS_PORT'], + host: ENV['REDIS_SESSION_HOST'], + port: ENV['REDIS_SESSION_PORT'], db: 2, key_prefix: 'usasearch:session' } diff --git a/config/initializers/sidekiq.rb b/config/initializers/sidekiq.rb index e0606f71c2..0203f127ce 100644 --- a/config/initializers/sidekiq.rb +++ b/config/initializers/sidekiq.rb @@ -3,9 +3,9 @@ sidekiq = Rails.application.config_for(:sidekiq) Sidekiq.configure_server do |config| - config.redis = { url: sidekiq['url'] } + config.redis = { host: sidekiq['host'], port: sidekiq['port'] } end Sidekiq.configure_client do |config| - config.redis = { url: sidekiq['url'] } + config.redis = { host: sidekiq['host'], port: sidekiq['port'] } end diff --git a/config/puma.rb b/config/puma.rb index daaf036999..3d26a2ecf7 100644 --- a/config/puma.rb +++ b/config/puma.rb @@ -10,34 +10,20 @@ # Specifies the `worker_timeout` threshold that Puma will use to wait before # terminating a worker in development environments. -# worker_timeout 3600 if ENV.fetch("RAILS_ENV", "development") == "development" +# Comment out the port setting if you are using bind # Specifies the `port` that Puma will listen on to receive requests; default is 3000. -# -port ENV.fetch("PORT") { 3000 } +# port ENV.fetch("PORT") { 3000 } # Specifies the `environment` that Puma will run in. -# environment ENV.fetch("RAILS_ENV") { "development" } # Specifies the `pidfile` that Puma will use. pidfile ENV.fetch("PIDFILE") { "tmp/pids/server.pid" } -# Specifies the number of `workers` to boot in clustered mode. -# Workers are forked web server processes. If using threads and workers together -# the concurrency of the application would be max `threads` * `workers`. -# Workers do not work on JRuby or Windows (both of which do not support -# processes). -# -# workers ENV.fetch("WEB_CONCURRENCY") { 2 } - -# Use the `preload_app!` method when specifying a `workers` number. -# This directive tells Puma to first boot the application and load code -# before forking the application. This takes advantage of Copy On Write -# process behavior so workers use less memory. -# -# preload_app! +# Use the bind directive to specify the address and port to listen on +bind "tcp://0.0.0.0:3000" # Allow puma to be restarted by `bin/rails restart` command. plugin :tmp_restart diff --git a/config/routes.rb b/config/routes.rb index 47716806f3..20bc79804b 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -33,6 +33,7 @@ get '/sayt' => 'sayt#index' post '/clicked' => 'clicked#create' get '/healthcheck' => 'health_checks#new' + get '/up' => 'health_checks#new' get '/login' => 'user_sessions#security_notification', as: :login get '/signup' => 'user_sessions#security_notification', as: :signup get '/dcv/:affiliate.txt' => 'statuses#domain_control_validation', diff --git a/config/schedule.rb b/config/schedule.rb new file mode 100644 index 0000000000..4896dd0a97 --- /dev/null +++ b/config/schedule.rb @@ -0,0 +1,15 @@ +every 1.month, roles: [:cron] do + rake 'search:reports:email_monthly_reports' +end + +every '35 21 18 12 *', roles: [:cron] do + rake 'search:reports:email_yearly_reports' +end + +every '18 9 * * 1-5', roles: [:cron] do + rake 'search:federal_register:import_agencies' +end + +every '18 9 * * 1-5', roles: [:cron] do + rake 'search:federal_register:import_documents' +end diff --git a/config/sidekiq.yml b/config/sidekiq.yml index c02cf92d34..cd2456e415 100644 --- a/config/sidekiq.yml +++ b/config/sidekiq.yml @@ -1,6 +1,6 @@ default: &DEFAULT namespace: oasis - url: <%= ENV['REDIS_HOST'] || 'redis://localhost:6379' %> + url: <%= ENV.fetch('REDIS_SYSTEM_URL', 'localhost:6379') %> development: <<: *DEFAULT diff --git a/lib/es.rb b/lib/es.rb index 4c60f40f6a..439d65ea66 100644 --- a/lib/es.rb +++ b/lib/es.rb @@ -10,28 +10,16 @@ module Es ).deep_symbolize_keys.freeze def client_reader - @client_reader ||= initialize_client(reader_config) + @client_reader ||= initialize_client end def client_writers - @client_writers ||= writer_config.map { |config| initialize_client(config) } + @client_writers ||= [initialize_client] end private - def reader_config - { - hosts: ENV.fetch('ES_HOSTS', '').split(',').map(&:strip) - } - end - - def writer_config - [{ - hosts: ENV.fetch('ES_HOSTS', '').split(',').map(&:strip) - }] - end - - def initialize_client(config) + def initialize_client(config = {}) Elasticsearch::Client.new(config.merge(CLIENT_CONFIG)).tap do |client| client.transport.logger = Rails.logger.clone client.transport.logger.formatter = proc do |severity, time, _progname, msg| diff --git a/spec/system/bing_v7_searches_spec.rb b/spec/system/bing_v7_searches_spec.rb index 57c8cc890d..1e343f884c 100644 --- a/spec/system/bing_v7_searches_spec.rb +++ b/spec/system/bing_v7_searches_spec.rb @@ -12,6 +12,8 @@ end it 'uses the web search key and end point' do + skip 'Bing is failing and Jim has approved skipping of Bing tests.' + expect(WebMock).to have_requested(:get, /#{web_search_host}#{web_search_path}/). with(headers: { 'Ocp-Apim-Subscription-Key' => web_subscription_id }) end