Skip to content

Commit

Permalink
Oban crontab: ensure module exists (#3558)
Browse files Browse the repository at this point in the history
  • Loading branch information
AntoineAugusti authored Oct 24, 2023
1 parent 31cd7ff commit 26cf19b
Show file tree
Hide file tree
Showing 5 changed files with 61 additions and 62 deletions.
2 changes: 1 addition & 1 deletion apps/transport/lib/db/data_conversion.ex
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ defmodule DB.DataConversion do
delete_data_conversions(conversions)

%{"dataset_id" => dataset_id}
|> Transport.Jobs.DatasetGtfsToNetexConverterJob.new()
|> Transport.Jobs.DatasetGTFSToNeTExConverterJob.new()
|> Oban.insert()
end

Expand Down
8 changes: 4 additions & 4 deletions apps/transport/lib/jobs/gtfs_to_netex_converter_job.ex
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
defmodule Transport.Jobs.GtfsToNetexConverterJob do
defmodule Transport.Jobs.GTFSToNeTExConverterJob do
@moduledoc """
This will enqueue GTFS -> NeTEx conversion jobs for all GTFS resources found in ResourceHistory
"""
Expand All @@ -7,11 +7,11 @@ defmodule Transport.Jobs.GtfsToNetexConverterJob do

@impl true
def perform(%{}) do
GTFSGenericConverter.enqueue_all_conversion_jobs("NeTEx", Transport.Jobs.SingleGtfsToNetexConverterJob)
GTFSGenericConverter.enqueue_all_conversion_jobs("NeTEx", Transport.Jobs.SingleGTFSToNeTExConverterJob)
end
end

defmodule Transport.Jobs.SingleGtfsToNetexConverterJob do
defmodule Transport.Jobs.SingleGTFSToNeTExConverterJob do
@moduledoc """
Conversion Job of a GTFS to a NeTEx, saving the resulting file in S3
"""
Expand All @@ -31,7 +31,7 @@ defmodule Transport.Jobs.SingleGtfsToNetexConverterJob do
end
end

defmodule Transport.Jobs.DatasetGtfsToNetexConverterJob do
defmodule Transport.Jobs.DatasetGTFSToNeTExConverterJob do
@moduledoc """
This will enqueue GTFS -> NeTEx conversions jobs for all GTFS resources linked to a dataset, but only for the most recent resource history
"""
Expand Down
4 changes: 2 additions & 2 deletions apps/transport/test/db/data_conversion_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ defmodule DB.DataConversionTest do

# list candidate resource history for future conversions
resource_history_ids =
dataset.id |> Transport.Jobs.DatasetGtfsToNetexConverterJob.list_gtfs_last_resource_history() |> Enum.sort()
dataset.id |> Transport.Jobs.DatasetGTFSToNeTExConverterJob.list_gtfs_last_resource_history() |> Enum.sort()

assert [resource_history_1.id, resource_history_2.id] == resource_history_ids
end
Expand Down Expand Up @@ -180,7 +180,7 @@ defmodule DB.DataConversionTest do

# new conversion is enqueued
assert_enqueued(
worker: Transport.Jobs.DatasetGtfsToNetexConverterJob,
worker: Transport.Jobs.DatasetGTFSToNeTExConverterJob,
args: %{"dataset_id" => dataset.id}
)
end
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
defmodule Transport.Jobs.SingleGtfsToNetexConverterJobTest do
defmodule Transport.Jobs.SingleGTFSToNeTExConverterJobTest do
use ExUnit.Case, async: true
use Oban.Testing, repo: DB.Repo
import DB.Factory
import Mox
alias Transport.Jobs.SingleGtfsToNetexConverterJob
alias Transport.Jobs.SingleGTFSToNeTExConverterJob

setup do
Ecto.Adapters.SQL.Sandbox.checkout(DB.Repo)
Expand All @@ -19,7 +19,7 @@ defmodule Transport.Jobs.SingleGtfsToNetexConverterJobTest do

# no mox expectation set, and the test passes => conversion is properly skipped
assert {:cancel, "Conversion is not needed"} ==
perform_job(SingleGtfsToNetexConverterJob, %{"resource_history_id" => resource_history_id})
perform_job(SingleGTFSToNeTExConverterJob, %{"resource_history_id" => resource_history_id})
end

test "launch a NeTEx conversion" do
Expand Down Expand Up @@ -66,7 +66,7 @@ defmodule Transport.Jobs.SingleGtfsToNetexConverterJobTest do

# job succeed
assert :ok ==
perform_job(SingleGtfsToNetexConverterJob, %{"resource_history_id" => resource_history_id})
perform_job(SingleGTFSToNeTExConverterJob, %{"resource_history_id" => resource_history_id})

# a data_conversion row is recorded ✌️‍
assert %DB.DataConversion{payload: %{"filesize" => 41, "filename" => "conversions/gtfs-to-netex/fff.netex.zip"}} =
Expand Down Expand Up @@ -111,7 +111,7 @@ defmodule Transport.Jobs.SingleGtfsToNetexConverterJobTest do
{:error, "conversion failed"}
end)

assert {:cancel, _} = perform_job(SingleGtfsToNetexConverterJob, %{"resource_history_id" => resource_history_id})
assert {:cancel, _} = perform_job(SingleGTFSToNeTExConverterJob, %{"resource_history_id" => resource_history_id})

# ResourceHistory's payload is updated with the error information
expected_payload =
Expand Down
99 changes: 49 additions & 50 deletions config/runtime.exs
Original file line number Diff line number Diff line change
Expand Up @@ -95,58 +95,57 @@ base_oban_conf = [repo: DB.Repo]
# but not during dev or test
# Be careful : there is "app_env :prod" in contrast to :staging (ie production website vs prochainement)
# and "config_env :prod" in contrast to :dev et :test
oban_prod_crontab = [
{"0 */6 * * *", Transport.Jobs.ResourceHistoryAndValidationDispatcherJob},
{"30 */6 * * *", Transport.Jobs.GTFSToGeoJSONConverterJob},
{"0 4 * * *", Transport.Jobs.GTFSImportStopsJob},
# every 6 hours but not at the same time as other jobs
{"0 3,9,15,21 * * *", Transport.Jobs.GTFSToNeTExConverterJob},
{"20 8 * * *", Transport.Jobs.CleanOrphanConversionsJob},
{"0 * * * *", Transport.Jobs.ResourcesUnavailableDispatcherJob},
{"*/10 * * * *", Transport.Jobs.ResourcesUnavailableDispatcherJob, args: %{only_unavailable: true}},
{"20 */2 * * *", Transport.Jobs.GTFSRTMetadataDispatcherJob},
{"30 */6 * * *", Transport.Jobs.BNLCToGeoData},
{"30 */6 * * *", Transport.Jobs.ParkingsRelaisToGeoData},
{"30 */6 * * *", Transport.Jobs.LowEmissionZonesToGeoData},
{"30 */6 * * *", Transport.Jobs.IRVEToGeoData},
{"15 10 * * *", Transport.Jobs.DatabaseBackupReplicationJob},
{"0 7 * * *", Transport.Jobs.GTFSRTMultiValidationDispatcherJob},
{"30 7 * * *", Transport.Jobs.GBFSMultiValidationDispatcherJob},
{"45 */3 * * *", Transport.Jobs.ResourceHistoryJSONSchemaValidationJob},
# Validata JSON is not properly maintained/monitored.
# Disable it for now.
# https://github.com/etalab/transport-site/issues/3492
# {"0 20 * * *", Transport.Jobs.ResourceHistoryValidataJSONJob},
{"15 */3 * * *", Transport.Jobs.ResourceHistoryTableSchemaValidationJob},
{"5 6 * * *", Transport.Jobs.NewDatagouvDatasetsJob},
{"0 6 * * *", Transport.Jobs.NewDatasetNotificationsJob},
{"0 21 * * *", Transport.Jobs.DatasetHistoryDispatcherJob},
# Should be executed after all `DatasetHistoryJob` have been executed
{"50 21 * * *", Transport.Jobs.ResourcesChangedNotificationJob},
{"0 22 * * *", Transport.Jobs.ArchiveMetricsJob},
{"15,45 * * * *", Transport.Jobs.MultiValidationWithErrorNotificationJob},
{"20,50 * * * *", Transport.Jobs.ResourceUnavailableNotificationJob},
{"30 6 * * 1", Transport.Jobs.DatasetsSwitchingClimateResilienceBillJob},
{"30 6 * * 1-5", Transport.Jobs.DatasetsClimateResilienceBillNotLOLicenceJob},
{"10 6 * * 1", Transport.Jobs.DatasetsWithoutGTFSRTRelatedResourcesNotificationJob},
{"45 2 * * *", Transport.Jobs.RemoveHistoryJob, args: %{schema_name: "etalab/schema-irve-dynamique", days_limit: 7}},
{"0 16 * * *", Transport.Jobs.DatasetQualityScoreDispatcher},
{"40 3 * * *", Transport.Jobs.UpdateContactsJob},
{"10 5 * * *", Transport.Jobs.NotificationSubscriptionProducerJob},
# "At 08:15 on Monday in March, June, and November.""
# The job will make sure that it's executed only on the first Monday of these months
{"15 8 * 3,6,11 1", Transport.Jobs.PeriodicReminderProducersNotificationJob}
]

# Make sure that all modules exist
oban_prod_crontab |> Enum.map(&Code.ensure_compiled!(elem(&1, 1)))

oban_crontab_all_envs =
case config_env() do
:prod ->
[
{"0 */6 * * *", Transport.Jobs.ResourceHistoryAndValidationDispatcherJob},
{"30 */6 * * *", Transport.Jobs.GtfsToGeojsonConverterJob},
{"0 4 * * *", Transport.Jobs.GTFSImportStopsJob},
# every 6 hours but not at the same time as other jobs
{"0 3,9,15,21 * * *", Transport.Jobs.GtfsToNetexConverterJob},
{"20 8 * * *", Transport.Jobs.CleanOrphanConversionsJob},
{"0 * * * *", Transport.Jobs.ResourcesUnavailableDispatcherJob},
{"*/10 * * * *", Transport.Jobs.ResourcesUnavailableDispatcherJob, args: %{only_unavailable: true}},
{"20 */2 * * *", Transport.Jobs.GTFSRTMetadataDispatcherJob},
{"30 */6 * * *", Transport.Jobs.BNLCToGeoData},
{"30 */6 * * *", Transport.Jobs.ParkingsRelaisToGeoData},
{"30 */6 * * *", Transport.Jobs.LowEmissionZonesToGeoData},
{"30 */6 * * *", Transport.Jobs.IRVEToGeoData},
{"15 10 * * *", Transport.Jobs.DatabaseBackupReplicationJob},
{"0 7 * * *", Transport.Jobs.GTFSRTMultiValidationDispatcherJob},
{"30 7 * * *", Transport.Jobs.GBFSMultiValidationDispatcherJob},
{"45 */3 * * *", Transport.Jobs.ResourceHistoryJSONSchemaValidationJob},
# Validata JSON is not properly maintained/monitored.
# Disable it for now.
# https://github.com/etalab/transport-site/issues/3492
# {"0 20 * * *", Transport.Jobs.ResourceHistoryValidataJSONJob},
{"15 */3 * * *", Transport.Jobs.ResourceHistoryTableSchemaValidationJob},
{"5 6 * * *", Transport.Jobs.NewDatagouvDatasetsJob},
{"0 6 * * *", Transport.Jobs.NewDatasetNotificationsJob},
{"0 21 * * *", Transport.Jobs.DatasetHistoryDispatcherJob},
# Should be executed after all `DatasetHistoryJob` have been executed
{"50 21 * * *", Transport.Jobs.ResourcesChangedNotificationJob},
{"0 22 * * *", Transport.Jobs.ArchiveMetricsJob},
{"15,45 * * * *", Transport.Jobs.MultiValidationWithErrorNotificationJob},
{"20,50 * * * *", Transport.Jobs.ResourceUnavailableNotificationJob},
{"30 6 * * 1", Transport.Jobs.DatasetsSwitchingClimateResilienceBillJob},
{"30 6 * * 1-5", Transport.Jobs.DatasetsClimateResilienceBillNotLOLicenceJob},
{"10 6 * * 1", Transport.Jobs.DatasetsWithoutGTFSRTRelatedResourcesNotificationJob},
{"45 2 * * *", Transport.Jobs.RemoveHistoryJob,
args: %{schema_name: "etalab/schema-irve-dynamique", days_limit: 7}},
{"0 16 * * *", Transport.Jobs.DatasetQualityScoreDispatcher},
{"40 3 * * *", Transport.Jobs.UpdateContactsJob},
{"10 5 * * *", Transport.Jobs.NotificationSubscriptionProducerJob},
# "At 08:15 on Monday in March, June, and November.""
# The job will make sure that it's executed only on the first Monday of these months
{"15 8 * 3,6,11 1", Transport.Jobs.PeriodicReminderProducersNotificationJob}
]

:dev ->
[]

:test ->
[]
:prod -> oban_prod_crontab
:dev -> []
:test -> []
end

# Oban Jobs that only run on the production server.
Expand Down

0 comments on commit 26cf19b

Please sign in to comment.