diff --git a/apps/transport/lib/db/data_conversion.ex b/apps/transport/lib/db/data_conversion.ex index 56368b3c71..ea76145cec 100644 --- a/apps/transport/lib/db/data_conversion.ex +++ b/apps/transport/lib/db/data_conversion.ex @@ -70,7 +70,7 @@ defmodule DB.DataConversion do delete_data_conversions(conversions) %{"dataset_id" => dataset_id} - |> Transport.Jobs.DatasetGtfsToNetexConverterJob.new() + |> Transport.Jobs.DatasetGTFSToNeTExConverterJob.new() |> Oban.insert() end diff --git a/apps/transport/lib/jobs/gtfs_to_netex_converter_job.ex b/apps/transport/lib/jobs/gtfs_to_netex_converter_job.ex index b1d0911663..d92ff0ed91 100644 --- a/apps/transport/lib/jobs/gtfs_to_netex_converter_job.ex +++ b/apps/transport/lib/jobs/gtfs_to_netex_converter_job.ex @@ -1,4 +1,4 @@ -defmodule Transport.Jobs.GtfsToNetexConverterJob do +defmodule Transport.Jobs.GTFSToNeTExConverterJob do @moduledoc """ This will enqueue GTFS -> NeTEx conversion jobs for all GTFS resources found in ResourceHistory """ @@ -7,11 +7,11 @@ defmodule Transport.Jobs.GtfsToNetexConverterJob do @impl true def perform(%{}) do - GTFSGenericConverter.enqueue_all_conversion_jobs("NeTEx", Transport.Jobs.SingleGtfsToNetexConverterJob) + GTFSGenericConverter.enqueue_all_conversion_jobs("NeTEx", Transport.Jobs.SingleGTFSToNeTExConverterJob) end end -defmodule Transport.Jobs.SingleGtfsToNetexConverterJob do +defmodule Transport.Jobs.SingleGTFSToNeTExConverterJob do @moduledoc """ Conversion Job of a GTFS to a NeTEx, saving the resulting file in S3 """ @@ -31,7 +31,7 @@ defmodule Transport.Jobs.SingleGtfsToNetexConverterJob do end end -defmodule Transport.Jobs.DatasetGtfsToNetexConverterJob do +defmodule Transport.Jobs.DatasetGTFSToNeTExConverterJob do @moduledoc """ This will enqueue GTFS -> NeTEx conversions jobs for all GTFS resources linked to a dataset, but only for the most recent resource history """ diff --git a/apps/transport/test/db/data_conversion_test.exs b/apps/transport/test/db/data_conversion_test.exs index f13392126c..f144837715 100644 --- a/apps/transport/test/db/data_conversion_test.exs +++ b/apps/transport/test/db/data_conversion_test.exs @@ -151,7 +151,7 @@ defmodule DB.DataConversionTest do # list candidate resource history for future conversions resource_history_ids = - dataset.id |> Transport.Jobs.DatasetGtfsToNetexConverterJob.list_gtfs_last_resource_history() |> Enum.sort() + dataset.id |> Transport.Jobs.DatasetGTFSToNeTExConverterJob.list_gtfs_last_resource_history() |> Enum.sort() assert [resource_history_1.id, resource_history_2.id] == resource_history_ids end @@ -180,7 +180,7 @@ defmodule DB.DataConversionTest do # new conversion is enqueued assert_enqueued( - worker: Transport.Jobs.DatasetGtfsToNetexConverterJob, + worker: Transport.Jobs.DatasetGTFSToNeTExConverterJob, args: %{"dataset_id" => dataset.id} ) end diff --git a/apps/transport/test/transport/jobs/single_gtfs_to_netex_converter_job_test.exs b/apps/transport/test/transport/jobs/single_gtfs_to_netex_converter_job_test.exs index 58de0c5e74..1271504084 100644 --- a/apps/transport/test/transport/jobs/single_gtfs_to_netex_converter_job_test.exs +++ b/apps/transport/test/transport/jobs/single_gtfs_to_netex_converter_job_test.exs @@ -1,9 +1,9 @@ -defmodule Transport.Jobs.SingleGtfsToNetexConverterJobTest do +defmodule Transport.Jobs.SingleGTFSToNeTExConverterJobTest do use ExUnit.Case, async: true use Oban.Testing, repo: DB.Repo import DB.Factory import Mox - alias Transport.Jobs.SingleGtfsToNetexConverterJob + alias Transport.Jobs.SingleGTFSToNeTExConverterJob setup do Ecto.Adapters.SQL.Sandbox.checkout(DB.Repo) @@ -19,7 +19,7 @@ defmodule Transport.Jobs.SingleGtfsToNetexConverterJobTest do # no mox expectation set, and the test passes => conversion is properly skipped assert {:cancel, "Conversion is not needed"} == - perform_job(SingleGtfsToNetexConverterJob, %{"resource_history_id" => resource_history_id}) + perform_job(SingleGTFSToNeTExConverterJob, %{"resource_history_id" => resource_history_id}) end test "launch a NeTEx conversion" do @@ -66,7 +66,7 @@ defmodule Transport.Jobs.SingleGtfsToNetexConverterJobTest do # job succeed assert :ok == - perform_job(SingleGtfsToNetexConverterJob, %{"resource_history_id" => resource_history_id}) + perform_job(SingleGTFSToNeTExConverterJob, %{"resource_history_id" => resource_history_id}) # a data_conversion row is recorded ✌️‍ assert %DB.DataConversion{payload: %{"filesize" => 41, "filename" => "conversions/gtfs-to-netex/fff.netex.zip"}} = @@ -111,7 +111,7 @@ defmodule Transport.Jobs.SingleGtfsToNetexConverterJobTest do {:error, "conversion failed"} end) - assert {:cancel, _} = perform_job(SingleGtfsToNetexConverterJob, %{"resource_history_id" => resource_history_id}) + assert {:cancel, _} = perform_job(SingleGTFSToNeTExConverterJob, %{"resource_history_id" => resource_history_id}) # ResourceHistory's payload is updated with the error information expected_payload = diff --git a/config/runtime.exs b/config/runtime.exs index 1a767fa39a..7bbfb09f2f 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -95,58 +95,57 @@ base_oban_conf = [repo: DB.Repo] # but not during dev or test # Be careful : there is "app_env :prod" in contrast to :staging (ie production website vs prochainement) # and "config_env :prod" in contrast to :dev et :test +oban_prod_crontab = [ + {"0 */6 * * *", Transport.Jobs.ResourceHistoryAndValidationDispatcherJob}, + {"30 */6 * * *", Transport.Jobs.GTFSToGeoJSONConverterJob}, + {"0 4 * * *", Transport.Jobs.GTFSImportStopsJob}, + # every 6 hours but not at the same time as other jobs + {"0 3,9,15,21 * * *", Transport.Jobs.GTFSToNeTExConverterJob}, + {"20 8 * * *", Transport.Jobs.CleanOrphanConversionsJob}, + {"0 * * * *", Transport.Jobs.ResourcesUnavailableDispatcherJob}, + {"*/10 * * * *", Transport.Jobs.ResourcesUnavailableDispatcherJob, args: %{only_unavailable: true}}, + {"20 */2 * * *", Transport.Jobs.GTFSRTMetadataDispatcherJob}, + {"30 */6 * * *", Transport.Jobs.BNLCToGeoData}, + {"30 */6 * * *", Transport.Jobs.ParkingsRelaisToGeoData}, + {"30 */6 * * *", Transport.Jobs.LowEmissionZonesToGeoData}, + {"30 */6 * * *", Transport.Jobs.IRVEToGeoData}, + {"15 10 * * *", Transport.Jobs.DatabaseBackupReplicationJob}, + {"0 7 * * *", Transport.Jobs.GTFSRTMultiValidationDispatcherJob}, + {"30 7 * * *", Transport.Jobs.GBFSMultiValidationDispatcherJob}, + {"45 */3 * * *", Transport.Jobs.ResourceHistoryJSONSchemaValidationJob}, + # Validata JSON is not properly maintained/monitored. + # Disable it for now. + # https://github.com/etalab/transport-site/issues/3492 + # {"0 20 * * *", Transport.Jobs.ResourceHistoryValidataJSONJob}, + {"15 */3 * * *", Transport.Jobs.ResourceHistoryTableSchemaValidationJob}, + {"5 6 * * *", Transport.Jobs.NewDatagouvDatasetsJob}, + {"0 6 * * *", Transport.Jobs.NewDatasetNotificationsJob}, + {"0 21 * * *", Transport.Jobs.DatasetHistoryDispatcherJob}, + # Should be executed after all `DatasetHistoryJob` have been executed + {"50 21 * * *", Transport.Jobs.ResourcesChangedNotificationJob}, + {"0 22 * * *", Transport.Jobs.ArchiveMetricsJob}, + {"15,45 * * * *", Transport.Jobs.MultiValidationWithErrorNotificationJob}, + {"20,50 * * * *", Transport.Jobs.ResourceUnavailableNotificationJob}, + {"30 6 * * 1", Transport.Jobs.DatasetsSwitchingClimateResilienceBillJob}, + {"30 6 * * 1-5", Transport.Jobs.DatasetsClimateResilienceBillNotLOLicenceJob}, + {"10 6 * * 1", Transport.Jobs.DatasetsWithoutGTFSRTRelatedResourcesNotificationJob}, + {"45 2 * * *", Transport.Jobs.RemoveHistoryJob, args: %{schema_name: "etalab/schema-irve-dynamique", days_limit: 7}}, + {"0 16 * * *", Transport.Jobs.DatasetQualityScoreDispatcher}, + {"40 3 * * *", Transport.Jobs.UpdateContactsJob}, + {"10 5 * * *", Transport.Jobs.NotificationSubscriptionProducerJob}, + # "At 08:15 on Monday in March, June, and November."" + # The job will make sure that it's executed only on the first Monday of these months + {"15 8 * 3,6,11 1", Transport.Jobs.PeriodicReminderProducersNotificationJob} +] + +# Make sure that all modules exist +oban_prod_crontab |> Enum.map(&Code.ensure_compiled!(elem(&1, 1))) + oban_crontab_all_envs = case config_env() do - :prod -> - [ - {"0 */6 * * *", Transport.Jobs.ResourceHistoryAndValidationDispatcherJob}, - {"30 */6 * * *", Transport.Jobs.GtfsToGeojsonConverterJob}, - {"0 4 * * *", Transport.Jobs.GTFSImportStopsJob}, - # every 6 hours but not at the same time as other jobs - {"0 3,9,15,21 * * *", Transport.Jobs.GtfsToNetexConverterJob}, - {"20 8 * * *", Transport.Jobs.CleanOrphanConversionsJob}, - {"0 * * * *", Transport.Jobs.ResourcesUnavailableDispatcherJob}, - {"*/10 * * * *", Transport.Jobs.ResourcesUnavailableDispatcherJob, args: %{only_unavailable: true}}, - {"20 */2 * * *", Transport.Jobs.GTFSRTMetadataDispatcherJob}, - {"30 */6 * * *", Transport.Jobs.BNLCToGeoData}, - {"30 */6 * * *", Transport.Jobs.ParkingsRelaisToGeoData}, - {"30 */6 * * *", Transport.Jobs.LowEmissionZonesToGeoData}, - {"30 */6 * * *", Transport.Jobs.IRVEToGeoData}, - {"15 10 * * *", Transport.Jobs.DatabaseBackupReplicationJob}, - {"0 7 * * *", Transport.Jobs.GTFSRTMultiValidationDispatcherJob}, - {"30 7 * * *", Transport.Jobs.GBFSMultiValidationDispatcherJob}, - {"45 */3 * * *", Transport.Jobs.ResourceHistoryJSONSchemaValidationJob}, - # Validata JSON is not properly maintained/monitored. - # Disable it for now. - # https://github.com/etalab/transport-site/issues/3492 - # {"0 20 * * *", Transport.Jobs.ResourceHistoryValidataJSONJob}, - {"15 */3 * * *", Transport.Jobs.ResourceHistoryTableSchemaValidationJob}, - {"5 6 * * *", Transport.Jobs.NewDatagouvDatasetsJob}, - {"0 6 * * *", Transport.Jobs.NewDatasetNotificationsJob}, - {"0 21 * * *", Transport.Jobs.DatasetHistoryDispatcherJob}, - # Should be executed after all `DatasetHistoryJob` have been executed - {"50 21 * * *", Transport.Jobs.ResourcesChangedNotificationJob}, - {"0 22 * * *", Transport.Jobs.ArchiveMetricsJob}, - {"15,45 * * * *", Transport.Jobs.MultiValidationWithErrorNotificationJob}, - {"20,50 * * * *", Transport.Jobs.ResourceUnavailableNotificationJob}, - {"30 6 * * 1", Transport.Jobs.DatasetsSwitchingClimateResilienceBillJob}, - {"30 6 * * 1-5", Transport.Jobs.DatasetsClimateResilienceBillNotLOLicenceJob}, - {"10 6 * * 1", Transport.Jobs.DatasetsWithoutGTFSRTRelatedResourcesNotificationJob}, - {"45 2 * * *", Transport.Jobs.RemoveHistoryJob, - args: %{schema_name: "etalab/schema-irve-dynamique", days_limit: 7}}, - {"0 16 * * *", Transport.Jobs.DatasetQualityScoreDispatcher}, - {"40 3 * * *", Transport.Jobs.UpdateContactsJob}, - {"10 5 * * *", Transport.Jobs.NotificationSubscriptionProducerJob}, - # "At 08:15 on Monday in March, June, and November."" - # The job will make sure that it's executed only on the first Monday of these months - {"15 8 * 3,6,11 1", Transport.Jobs.PeriodicReminderProducersNotificationJob} - ] - - :dev -> - [] - - :test -> - [] + :prod -> oban_prod_crontab + :dev -> [] + :test -> [] end # Oban Jobs that only run on the production server.